mirror of
https://github.com/QIDITECH/klipper.git
synced 2026-01-30 23:48:43 +03:00
Delete scripts directory
This commit is contained in:
@@ -1,283 +0,0 @@
|
||||
# Log data analyzing functions
|
||||
#
|
||||
# Copyright (C) 2021 Kevin O'Connor <kevin@koconnor.net>
|
||||
#
|
||||
# This file may be distributed under the terms of the GNU GPLv3 license.
|
||||
import math, collections
|
||||
import readlog
|
||||
|
||||
|
||||
######################################################################
|
||||
# Analysis code
|
||||
######################################################################
|
||||
|
||||
# Analyzer handlers: {name: class, ...}
|
||||
AHandlers = {}
|
||||
|
||||
# Calculate a derivative (position to velocity, or velocity to accel)
|
||||
class GenDerivative:
|
||||
ParametersMin = ParametersMax = 1
|
||||
DataSets = [
|
||||
('derivative(<dataset>)', 'Derivative of the given dataset'),
|
||||
]
|
||||
def __init__(self, amanager, name_parts):
|
||||
self.amanager = amanager
|
||||
self.source = name_parts[1]
|
||||
amanager.setup_dataset(self.source)
|
||||
def get_label(self):
|
||||
label = self.amanager.get_label(self.source)
|
||||
lname = label['label']
|
||||
units = label['units']
|
||||
if '(mm)' in units:
|
||||
rep = [('Position', 'Velocity'), ('(mm)', '(mm/s)')]
|
||||
elif '(mm/s)' in units:
|
||||
rep = [('Velocity', 'Acceleration'), ('(mm/s)', '(mm/s^2)')]
|
||||
else:
|
||||
return {'label': 'Derivative', 'units': 'Unknown'}
|
||||
for old, new in rep:
|
||||
lname = lname.replace(old, new).replace(old.lower(), new.lower())
|
||||
units = units.replace(old, new).replace(old.lower(), new.lower())
|
||||
return {'label': lname, 'units': units}
|
||||
def generate_data(self):
|
||||
inv_seg_time = 1. / self.amanager.get_segment_time()
|
||||
data = self.amanager.get_datasets()[self.source]
|
||||
deriv = [(data[i+1] - data[i]) * inv_seg_time
|
||||
for i in range(len(data)-1)]
|
||||
return [deriv[0]] + deriv
|
||||
AHandlers["derivative"] = GenDerivative
|
||||
|
||||
# Calculate an integral (accel to velocity, or velocity to position)
|
||||
class GenIntegral:
|
||||
ParametersMin = 1
|
||||
ParametersMax = 3
|
||||
DataSets = [
|
||||
('integral(<dataset>)', 'Integral of the given dataset'),
|
||||
('integral(<dataset1>,<dataset2>)',
|
||||
'Integral with dataset2 as reference'),
|
||||
('integral(<dataset1>,<dataset2>,<half_life>)',
|
||||
'Integral with weighted half-life time'),
|
||||
]
|
||||
def __init__(self, amanager, name_parts):
|
||||
self.amanager = amanager
|
||||
self.source = name_parts[1]
|
||||
amanager.setup_dataset(self.source)
|
||||
self.ref = None
|
||||
self.half_life = 0.015
|
||||
if len(name_parts) >= 3:
|
||||
self.ref = name_parts[2]
|
||||
amanager.setup_dataset(self.ref)
|
||||
if len(name_parts) == 4:
|
||||
self.half_life = float(name_parts[3])
|
||||
def get_label(self):
|
||||
label = self.amanager.get_label(self.source)
|
||||
lname = label['label']
|
||||
units = label['units']
|
||||
if '(mm/s)' in units:
|
||||
rep = [('Velocity', 'Position'), ('(mm/s)', '(mm)')]
|
||||
elif '(mm/s^2)' in units:
|
||||
rep = [('Acceleration', 'Velocity'), ('(mm/s^2)', '(mm/s)')]
|
||||
else:
|
||||
return {'label': 'Integral', 'units': 'Unknown'}
|
||||
for old, new in rep:
|
||||
lname = lname.replace(old, new).replace(old.lower(), new.lower())
|
||||
units = units.replace(old, new).replace(old.lower(), new.lower())
|
||||
return {'label': lname, 'units': units}
|
||||
def generate_data(self):
|
||||
seg_time = self.amanager.get_segment_time()
|
||||
src = self.amanager.get_datasets()[self.source]
|
||||
offset = sum(src) / len(src)
|
||||
total = 0.
|
||||
ref = None
|
||||
if self.ref is not None:
|
||||
ref = self.amanager.get_datasets()[self.ref]
|
||||
offset -= (ref[-1] - ref[0]) / (len(src) * seg_time)
|
||||
total = ref[0]
|
||||
src_weight = 1.
|
||||
if self.half_life:
|
||||
src_weight = math.exp(math.log(.5) * seg_time / self.half_life)
|
||||
ref_weight = 1. - src_weight
|
||||
data = [0.] * len(src)
|
||||
for i, v in enumerate(src):
|
||||
total += (v - offset) * seg_time
|
||||
if ref is not None:
|
||||
total = src_weight * total + ref_weight * ref[i]
|
||||
data[i] = total
|
||||
return data
|
||||
AHandlers["integral"] = GenIntegral
|
||||
|
||||
# Calculate a kinematic stepper position from the toolhead requested position
|
||||
class GenKinematicPosition:
|
||||
ParametersMin = ParametersMax = 1
|
||||
DataSets = [
|
||||
('kin(<stepper>)', 'Stepper position derived from toolhead kinematics'),
|
||||
]
|
||||
def __init__(self, amanager, name_parts):
|
||||
self.amanager = amanager
|
||||
stepper = name_parts[1]
|
||||
status = self.amanager.get_initial_status()
|
||||
kin = status['configfile']['settings']['printer']['kinematics']
|
||||
if kin not in ['cartesian', 'corexy']:
|
||||
raise amanager.error("Unsupported kinematics '%s'" % (kin,))
|
||||
if stepper not in ['stepper_x', 'stepper_y', 'stepper_z']:
|
||||
raise amanager.error("Unknown stepper '%s'" % (stepper,))
|
||||
if kin == 'corexy' and stepper in ['stepper_x', 'stepper_y']:
|
||||
self.source1 = 'trapq(toolhead,x)'
|
||||
self.source2 = 'trapq(toolhead,y)'
|
||||
if stepper == 'stepper_x':
|
||||
self.generate_data = self.generate_data_corexy_plus
|
||||
else:
|
||||
self.generate_data = self.generate_data_corexy_minus
|
||||
amanager.setup_dataset(self.source1)
|
||||
amanager.setup_dataset(self.source2)
|
||||
else:
|
||||
self.source1 = 'trapq(toolhead,%s)' % (stepper[-1:],)
|
||||
self.source2 = None
|
||||
self.generate_data = self.generate_data_passthrough
|
||||
amanager.setup_dataset(self.source1)
|
||||
def get_label(self):
|
||||
return {'label': 'Position', 'units': 'Position\n(mm)'}
|
||||
def generate_data_corexy_plus(self):
|
||||
datasets = self.amanager.get_datasets()
|
||||
data1 = datasets[self.source1]
|
||||
data2 = datasets[self.source2]
|
||||
return [d1 + d2 for d1, d2 in zip(data1, data2)]
|
||||
def generate_data_corexy_minus(self):
|
||||
datasets = self.amanager.get_datasets()
|
||||
data1 = datasets[self.source1]
|
||||
data2 = datasets[self.source2]
|
||||
return [d1 - d2 for d1, d2 in zip(data1, data2)]
|
||||
def generate_data_passthrough(self):
|
||||
return self.amanager.get_datasets()[self.source1]
|
||||
AHandlers["kin"] = GenKinematicPosition
|
||||
|
||||
# Calculate a toolhead x/y position from corexy stepper positions
|
||||
class GenCorexyPosition:
|
||||
ParametersMin = ParametersMax = 3
|
||||
DataSets = [
|
||||
('corexy(x,<stepper>,<stepper>)', 'Toolhead x position from steppers'),
|
||||
('corexy(y,<stepper>,<stepper>)', 'Toolhead y position from steppers'),
|
||||
]
|
||||
def __init__(self, amanager, name_parts):
|
||||
self.amanager = amanager
|
||||
self.is_plus = name_parts[1] == 'x'
|
||||
self.source1, self.source2 = name_parts[2:]
|
||||
amanager.setup_dataset(self.source1)
|
||||
amanager.setup_dataset(self.source2)
|
||||
def get_label(self):
|
||||
axis = 'x'
|
||||
if not self.is_plus:
|
||||
axis = 'y'
|
||||
return {'label': 'Derived %s position' % (axis,),
|
||||
'units': 'Position\n(mm)'}
|
||||
def generate_data(self):
|
||||
datasets = self.amanager.get_datasets()
|
||||
data1 = datasets[self.source1]
|
||||
data2 = datasets[self.source2]
|
||||
if self.is_plus:
|
||||
return [.5 * (d1 + d2) for d1, d2 in zip(data1, data2)]
|
||||
return [.5 * (d1 - d2) for d1, d2 in zip(data1, data2)]
|
||||
AHandlers["corexy"] = GenCorexyPosition
|
||||
|
||||
# Calculate a position deviation
|
||||
class GenDeviation:
|
||||
ParametersMin = ParametersMax = 2
|
||||
DataSets = [
|
||||
('deviation(<dataset1>,<dataset2>)', 'Difference between datasets'),
|
||||
]
|
||||
def __init__(self, amanager, name_parts):
|
||||
self.amanager = amanager
|
||||
self.source1, self.source2 = name_parts[1:]
|
||||
amanager.setup_dataset(self.source1)
|
||||
amanager.setup_dataset(self.source2)
|
||||
def get_label(self):
|
||||
label1 = self.amanager.get_label(self.source1)
|
||||
label2 = self.amanager.get_label(self.source2)
|
||||
if label1['units'] != label2['units']:
|
||||
return {'label': 'Deviation', 'units': 'Unknown'}
|
||||
parts = label1['units'].split('\n')
|
||||
units = '\n'.join([parts[0]] + ['Deviation'] + parts[1:])
|
||||
return {'label': label1['label'] + ' deviation', 'units': units}
|
||||
def generate_data(self):
|
||||
datasets = self.amanager.get_datasets()
|
||||
data1 = datasets[self.source1]
|
||||
data2 = datasets[self.source2]
|
||||
return [d1 - d2 for d1, d2 in zip(data1, data2)]
|
||||
AHandlers["deviation"] = GenDeviation
|
||||
|
||||
|
||||
######################################################################
|
||||
# Analyzer management and data generation
|
||||
######################################################################
|
||||
|
||||
# Return a description of available analyzers
|
||||
def list_datasets():
|
||||
datasets = []
|
||||
for ah in sorted(AHandlers.keys()):
|
||||
datasets += AHandlers[ah].DataSets
|
||||
return datasets
|
||||
|
||||
# Manage raw and generated data samples
|
||||
class AnalyzerManager:
|
||||
error = None
|
||||
def __init__(self, lmanager, segment_time):
|
||||
self.lmanager = lmanager
|
||||
self.error = lmanager.error
|
||||
self.segment_time = segment_time
|
||||
self.raw_datasets = collections.OrderedDict()
|
||||
self.gen_datasets = collections.OrderedDict()
|
||||
self.datasets = {}
|
||||
self.dataset_times = []
|
||||
self.duration = 5.
|
||||
def set_duration(self, duration):
|
||||
self.duration = duration
|
||||
def get_segment_time(self):
|
||||
return self.segment_time
|
||||
def get_datasets(self):
|
||||
return self.datasets
|
||||
def get_dataset_times(self):
|
||||
return self.dataset_times
|
||||
def get_initial_status(self):
|
||||
return self.lmanager.get_initial_status()
|
||||
def setup_dataset(self, name):
|
||||
name = name.strip()
|
||||
if name in self.raw_datasets:
|
||||
return self.raw_datasets[name]
|
||||
if name in self.gen_datasets:
|
||||
return self.gen_datasets[name]
|
||||
name_parts = readlog.name_split(name)
|
||||
if name_parts[0] in self.lmanager.available_dataset_types():
|
||||
hdl = self.lmanager.setup_dataset(name)
|
||||
self.raw_datasets[name] = hdl
|
||||
else:
|
||||
cls = AHandlers.get(name_parts[0])
|
||||
if cls is None:
|
||||
raise self.error("Unknown dataset '%s'" % (name,))
|
||||
num_param = len(name_parts) - 1
|
||||
if num_param < cls.ParametersMin or num_param > cls.ParametersMax:
|
||||
raise self.error("Invalid parameters to dataset '%s'" % (name,))
|
||||
hdl = cls(self, name_parts)
|
||||
self.gen_datasets[name] = hdl
|
||||
self.datasets[name] = []
|
||||
return hdl
|
||||
def get_label(self, dataset):
|
||||
hdl = self.raw_datasets.get(dataset)
|
||||
if hdl is None:
|
||||
hdl = self.gen_datasets.get(dataset)
|
||||
if hdl is None:
|
||||
raise self.error("Unknown dataset '%s'" % (dataset,))
|
||||
return hdl.get_label()
|
||||
def generate_datasets(self):
|
||||
# Generate raw data
|
||||
list_hdls = [(self.datasets[name], hdl)
|
||||
for name, hdl in self.raw_datasets.items()]
|
||||
initial_start_time = self.lmanager.get_initial_start_time()
|
||||
start_time = t = self.lmanager.get_start_time()
|
||||
end_time = start_time + self.duration
|
||||
while t < end_time:
|
||||
t += self.segment_time
|
||||
self.dataset_times.append(t - initial_start_time)
|
||||
for dl, hdl in list_hdls:
|
||||
dl.append(hdl.pull_data(t))
|
||||
# Generate analyzer data
|
||||
for name, hdl in self.gen_datasets.items():
|
||||
self.datasets[name] = hdl.generate_data()
|
||||
@@ -1,204 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Tool to subscribe to motion data and log it to a disk file
|
||||
#
|
||||
# Copyright (C) 2020-2021 Kevin O'Connor <kevin@koconnor.net>
|
||||
#
|
||||
# This file may be distributed under the terms of the GNU GPLv3 license.
|
||||
import sys, os, optparse, socket, select, json, errno, time, zlib
|
||||
|
||||
INDEX_UPDATE_TIME = 5.0
|
||||
ClientInfo = {'program': 'motan_data_logger', 'version': 'v0.1'}
|
||||
|
||||
def webhook_socket_create(uds_filename):
|
||||
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
||||
sock.setblocking(0)
|
||||
sys.stderr.write("Waiting for connect to %s\n" % (uds_filename,))
|
||||
while 1:
|
||||
try:
|
||||
sock.connect(uds_filename)
|
||||
except socket.error as e:
|
||||
if e.errno == errno.ECONNREFUSED:
|
||||
time.sleep(0.1)
|
||||
continue
|
||||
sys.stderr.write("Unable to connect socket %s [%d,%s]\n"
|
||||
% (uds_filename, e.errno,
|
||||
errno.errorcode[e.errno]))
|
||||
sys.exit(-1)
|
||||
break
|
||||
sys.stderr.write("Connection.\n")
|
||||
return sock
|
||||
|
||||
class LogWriter:
|
||||
def __init__(self, filename):
|
||||
self.file = open(filename, "wb")
|
||||
self.comp = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
|
||||
zlib.DEFLATED, 31)
|
||||
self.raw_pos = self.file_pos = 0
|
||||
def add_data(self, data):
|
||||
d = self.comp.compress(data + b"\x03")
|
||||
self.file.write(d)
|
||||
self.file_pos += len(d)
|
||||
self.raw_pos += len(data) + 1
|
||||
def flush(self, flag=zlib.Z_FULL_FLUSH):
|
||||
if not self.raw_pos:
|
||||
return self.file_pos
|
||||
d = self.comp.flush(flag)
|
||||
self.file.write(d)
|
||||
self.file_pos += len(d)
|
||||
return self.file_pos
|
||||
def close(self):
|
||||
self.flush(zlib.Z_FINISH)
|
||||
self.file.close()
|
||||
self.file = None
|
||||
self.comp = None
|
||||
|
||||
class DataLogger:
|
||||
def __init__(self, uds_filename, log_prefix):
|
||||
# IO
|
||||
self.webhook_socket = webhook_socket_create(uds_filename)
|
||||
self.poll = select.poll()
|
||||
self.poll.register(self.webhook_socket, select.POLLIN | select.POLLHUP)
|
||||
self.socket_data = b""
|
||||
# Data log
|
||||
self.logger = LogWriter(log_prefix + ".json.gz")
|
||||
self.index = LogWriter(log_prefix + ".index.gz")
|
||||
# Handlers
|
||||
self.query_handlers = {}
|
||||
self.async_handlers = {}
|
||||
# get_status databasing
|
||||
self.db = {}
|
||||
self.next_index_time = 0.
|
||||
# Start login process
|
||||
self.send_query("info", "info", {"client_info": ClientInfo},
|
||||
self.handle_info)
|
||||
def error(self, msg):
|
||||
sys.stderr.write(msg + "\n")
|
||||
def finish(self, msg):
|
||||
self.error(msg)
|
||||
self.logger.close()
|
||||
self.index.close()
|
||||
sys.exit(0)
|
||||
# Unix Domain Socket IO
|
||||
def send_query(self, msg_id, method, params, cb):
|
||||
self.query_handlers[msg_id] = cb
|
||||
msg = {"id": msg_id, "method": method, "params": params}
|
||||
cm = json.dumps(msg, separators=(',', ':')).encode()
|
||||
self.webhook_socket.send(cm + b"\x03")
|
||||
def process_socket(self):
|
||||
data = self.webhook_socket.recv(4096)
|
||||
if not data:
|
||||
self.finish("Socket closed")
|
||||
parts = data.split(b"\x03")
|
||||
parts[0] = self.socket_data + parts[0]
|
||||
self.socket_data = parts.pop()
|
||||
for part in parts:
|
||||
try:
|
||||
msg = json.loads(part)
|
||||
except:
|
||||
self.error("ERROR: Unable to parse line")
|
||||
continue
|
||||
self.logger.add_data(part)
|
||||
msg_q = msg.get("q")
|
||||
if msg_q is not None:
|
||||
hdl = self.async_handlers.get(msg_q)
|
||||
if hdl is not None:
|
||||
hdl(msg, part)
|
||||
continue
|
||||
msg_id = msg.get("id")
|
||||
hdl = self.query_handlers.get(msg_id)
|
||||
if hdl is not None:
|
||||
del self.query_handlers[msg_id]
|
||||
hdl(msg, part)
|
||||
if not self.query_handlers:
|
||||
self.flush_index()
|
||||
continue
|
||||
self.error("ERROR: Message with unknown id")
|
||||
def run(self):
|
||||
try:
|
||||
while 1:
|
||||
res = self.poll.poll(1000.)
|
||||
for fd, event in res:
|
||||
if fd == self.webhook_socket.fileno():
|
||||
self.process_socket()
|
||||
except KeyboardInterrupt as e:
|
||||
self.finish("Keyboard Interrupt")
|
||||
# Query response handlers
|
||||
def send_subscribe(self, msg_id, method, params, cb=None, async_cb=None):
|
||||
if cb is None:
|
||||
cb = self.handle_dump
|
||||
if async_cb is not None:
|
||||
self.async_handlers[msg_id] = async_cb
|
||||
params["response_template"] = {"q": msg_id}
|
||||
self.send_query(msg_id, method, params, cb)
|
||||
def handle_info(self, msg, raw_msg):
|
||||
if msg["result"]["state"] != "ready":
|
||||
self.finish("Klipper not in ready state")
|
||||
self.send_query("list", "objects/list", {}, self.handle_list)
|
||||
def handle_list(self, msg, raw_msg):
|
||||
subreq = {o: None for o in msg["result"]["objects"]}
|
||||
self.send_subscribe("status", "objects/subscribe", {"objects": subreq},
|
||||
self.handle_subscribe, self.handle_async_db)
|
||||
def handle_subscribe(self, msg, raw_msg):
|
||||
result = msg["result"]
|
||||
self.next_index_time = result["eventtime"] + INDEX_UPDATE_TIME
|
||||
self.db["status"] = status = result["status"]
|
||||
# Subscribe to trapq and stepper queue updates
|
||||
motion_report = status.get("motion_report", {})
|
||||
for trapq in motion_report.get("trapq", []):
|
||||
self.send_subscribe("trapq:" + trapq, "motion_report/dump_trapq",
|
||||
{"name": trapq})
|
||||
for stepper in motion_report.get("steppers", []):
|
||||
self.send_subscribe("stepq:" + stepper,
|
||||
"motion_report/dump_stepper", {"name": stepper})
|
||||
# Subscribe to additional sensor data
|
||||
config = status["configfile"]["settings"]
|
||||
for cfgname in config.keys():
|
||||
if cfgname == "adxl345" or cfgname.startswith("adxl345 "):
|
||||
aname = cfgname.split()[-1]
|
||||
self.send_subscribe("adxl345:" + aname, "adxl345/dump_adxl345",
|
||||
{"sensor": aname})
|
||||
if cfgname.startswith("angle "):
|
||||
aname = cfgname.split()[1]
|
||||
self.send_subscribe("angle:" + aname, "angle/dump_angle",
|
||||
{"sensor": aname})
|
||||
def handle_dump(self, msg, raw_msg):
|
||||
msg_id = msg["id"]
|
||||
if "result" not in msg:
|
||||
self.error("Unable to subscribe to '%s': %s"
|
||||
% (msg_id, msg.get("error", {}).get("message", "")))
|
||||
return
|
||||
self.db.setdefault("subscriptions", {})[msg_id] = msg["result"]
|
||||
def flush_index(self):
|
||||
self.db['file_position'] = self.logger.flush()
|
||||
self.index.add_data(json.dumps(self.db, separators=(',', ':')).encode())
|
||||
self.db = {"status": {}}
|
||||
def handle_async_db(self, msg, raw_msg):
|
||||
params = msg["params"]
|
||||
db_status = self.db['status']
|
||||
for k, v in params.get("status", {}).items():
|
||||
db_status.setdefault(k, {}).update(v)
|
||||
eventtime = params['eventtime']
|
||||
if eventtime >= self.next_index_time:
|
||||
self.next_index_time = eventtime + INDEX_UPDATE_TIME
|
||||
self.flush_index()
|
||||
|
||||
def nice():
|
||||
try:
|
||||
# Try to re-nice writing process
|
||||
os.nice(10)
|
||||
except:
|
||||
pass
|
||||
|
||||
def main():
|
||||
usage = "%prog [options] <socket filename> <log name>"
|
||||
opts = optparse.OptionParser(usage)
|
||||
options, args = opts.parse_args()
|
||||
if len(args) != 2:
|
||||
opts.error("Incorrect number of arguments")
|
||||
|
||||
nice()
|
||||
dl = DataLogger(args[0], args[1])
|
||||
dl.run()
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,149 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
# Script to perform motion analysis and graphing
|
||||
#
|
||||
# Copyright (C) 2019-2021 Kevin O'Connor <kevin@koconnor.net>
|
||||
#
|
||||
# This file may be distributed under the terms of the GNU GPLv3 license.
|
||||
import sys, optparse, ast
|
||||
import matplotlib
|
||||
import readlog, analyzers
|
||||
try:
|
||||
import urlparse
|
||||
except:
|
||||
import urllib.parse as urlparse
|
||||
|
||||
|
||||
######################################################################
|
||||
# Graphing
|
||||
######################################################################
|
||||
|
||||
def plot_motion(amanager, graphs, log_prefix):
|
||||
# Generate data
|
||||
for graph in graphs:
|
||||
for dataset, plot_params in graph:
|
||||
amanager.setup_dataset(dataset)
|
||||
amanager.generate_datasets()
|
||||
datasets = amanager.get_datasets()
|
||||
times = amanager.get_dataset_times()
|
||||
# Build plot
|
||||
fontP = matplotlib.font_manager.FontProperties()
|
||||
fontP.set_size('x-small')
|
||||
fig, rows = matplotlib.pyplot.subplots(nrows=len(graphs), sharex=True)
|
||||
if len(graphs) == 1:
|
||||
rows = [rows]
|
||||
rows[0].set_title("Motion Analysis (%s)" % (log_prefix,))
|
||||
for graph, graph_ax in zip(graphs, rows):
|
||||
graph_units = graph_twin_units = twin_ax = None
|
||||
for dataset, plot_params in graph:
|
||||
label = amanager.get_label(dataset)
|
||||
ax = graph_ax
|
||||
if graph_units is None:
|
||||
graph_units = label['units']
|
||||
ax.set_ylabel(graph_units)
|
||||
elif label['units'] != graph_units:
|
||||
if graph_twin_units is None:
|
||||
ax = twin_ax = graph_ax.twinx()
|
||||
graph_twin_units = label['units']
|
||||
ax.set_ylabel(graph_twin_units)
|
||||
elif label['units'] == graph_twin_units:
|
||||
ax = twin_ax
|
||||
else:
|
||||
graph_units = "Unknown"
|
||||
ax.set_ylabel(graph_units)
|
||||
pparams = {'label': label['label'], 'alpha': 0.8}
|
||||
pparams.update(plot_params)
|
||||
ax.plot(times, datasets[dataset], **pparams)
|
||||
if twin_ax is not None:
|
||||
li1, la1 = graph_ax.get_legend_handles_labels()
|
||||
li2, la2 = twin_ax.get_legend_handles_labels()
|
||||
twin_ax.legend(li1 + li2, la1 + la2, loc='best', prop=fontP)
|
||||
else:
|
||||
graph_ax.legend(loc='best', prop=fontP)
|
||||
graph_ax.grid(True)
|
||||
rows[-1].set_xlabel('Time (s)')
|
||||
return fig
|
||||
|
||||
|
||||
######################################################################
|
||||
# Startup
|
||||
######################################################################
|
||||
|
||||
def setup_matplotlib(output_to_file):
|
||||
global matplotlib
|
||||
if output_to_file:
|
||||
matplotlib.use('Agg')
|
||||
import matplotlib.pyplot, matplotlib.dates, matplotlib.font_manager
|
||||
import matplotlib.ticker
|
||||
|
||||
def parse_graph_description(desc):
|
||||
if '?' not in desc:
|
||||
return (desc, {})
|
||||
dataset, params = desc.split('?', 1)
|
||||
params = {k: v for k, v in urlparse.parse_qsl(params)}
|
||||
for fkey in ['alpha']:
|
||||
if fkey in params:
|
||||
params[fkey] = float(params[fkey])
|
||||
return (dataset, params)
|
||||
|
||||
def list_datasets():
|
||||
datasets = readlog.list_datasets() + analyzers.list_datasets()
|
||||
out = ["\nAvailable datasets:\n"]
|
||||
for dataset, desc in datasets:
|
||||
out.append("%-24s: %s\n" % (dataset, desc))
|
||||
out.append("\n")
|
||||
sys.stdout.write("".join(out))
|
||||
sys.exit(0)
|
||||
|
||||
def main():
|
||||
# Parse command-line arguments
|
||||
usage = "%prog [options] <logname>"
|
||||
opts = optparse.OptionParser(usage)
|
||||
opts.add_option("-o", "--output", type="string", dest="output",
|
||||
default=None, help="filename of output graph")
|
||||
opts.add_option("-s", "--skip", type="float", default=0.,
|
||||
help="Set the start time to graph")
|
||||
opts.add_option("-d", "--duration", type="float", default=5.,
|
||||
help="Number of seconds to graph")
|
||||
opts.add_option("--segment-time", type="float", default=0.000100,
|
||||
help="Analysis segment time (default 0.000100 seconds)")
|
||||
opts.add_option("-g", "--graph", help="Graph to generate (python literal)")
|
||||
opts.add_option("-l", "--list-datasets", action="store_true",
|
||||
help="List available datasets")
|
||||
options, args = opts.parse_args()
|
||||
if options.list_datasets:
|
||||
list_datasets()
|
||||
if len(args) != 1:
|
||||
opts.error("Incorrect number of arguments")
|
||||
log_prefix = args[0]
|
||||
|
||||
# Open data files
|
||||
lmanager = readlog.LogManager(log_prefix)
|
||||
lmanager.setup_index()
|
||||
lmanager.seek_time(options.skip)
|
||||
amanager = analyzers.AnalyzerManager(lmanager, options.segment_time)
|
||||
amanager.set_duration(options.duration)
|
||||
|
||||
# Default graphs to draw
|
||||
graph_descs = [
|
||||
["trapq(toolhead,velocity)?color=green"],
|
||||
["trapq(toolhead,accel)?color=green"],
|
||||
["deviation(stepq(stepper_x),kin(stepper_x))?color=blue"],
|
||||
]
|
||||
if options.graph is not None:
|
||||
graph_descs = ast.literal_eval(options.graph)
|
||||
graphs = [[parse_graph_description(g) for g in graph_row]
|
||||
for graph_row in graph_descs]
|
||||
|
||||
# Draw graph
|
||||
setup_matplotlib(options.output is not None)
|
||||
fig = plot_motion(amanager, graphs, log_prefix)
|
||||
|
||||
# Show graph
|
||||
if options.output is None:
|
||||
matplotlib.pyplot.show()
|
||||
else:
|
||||
fig.set_size_inches(8, 6)
|
||||
fig.savefig(options.output)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,629 +0,0 @@
|
||||
# Code for reading data logs produced by data_logger.py
|
||||
#
|
||||
# Copyright (C) 2021 Kevin O'Connor <kevin@koconnor.net>
|
||||
#
|
||||
# This file may be distributed under the terms of the GNU GPLv3 license.
|
||||
import json, zlib
|
||||
|
||||
class error(Exception):
|
||||
pass
|
||||
|
||||
|
||||
######################################################################
|
||||
# Log data handlers
|
||||
######################################################################
|
||||
|
||||
# Log data handlers: {name: class, ...}
|
||||
LogHandlers = {}
|
||||
|
||||
# Extract status fields from log
|
||||
class HandleStatusField:
|
||||
SubscriptionIdParts = 0
|
||||
ParametersMin = ParametersMax = 1
|
||||
DataSets = [
|
||||
('status(<field>)', 'A get_status field name (separate by periods)'),
|
||||
]
|
||||
def __init__(self, lmanager, name, name_parts):
|
||||
self.status_tracker = lmanager.get_status_tracker()
|
||||
self.field_name = name_parts[1]
|
||||
self.field_parts = name_parts[1].split('.')
|
||||
self.next_update_time = 0.
|
||||
self.result = None
|
||||
def get_label(self):
|
||||
label = '%s field' % (self.field_name,)
|
||||
return {'label': label, 'units': 'Unknown'}
|
||||
def pull_data(self, req_time):
|
||||
if req_time < self.next_update_time:
|
||||
return self.result
|
||||
db, next_update_time = self.status_tracker.pull_status(req_time)
|
||||
for fp in self.field_parts[:-1]:
|
||||
db = db.get(fp, {})
|
||||
self.result = db.get(self.field_parts[-1], 0.)
|
||||
self.next_update_time = next_update_time
|
||||
return self.result
|
||||
LogHandlers["status"] = HandleStatusField
|
||||
|
||||
# Extract requested position, velocity, and accel from a trapq log
|
||||
class HandleTrapQ:
|
||||
SubscriptionIdParts = 2
|
||||
ParametersMin = ParametersMax = 2
|
||||
DataSets = [
|
||||
('trapq(<name>,velocity)', 'Requested velocity for the given trapq'),
|
||||
('trapq(<name>,accel)', 'Requested acceleration for the given trapq'),
|
||||
('trapq(<name>,<axis>)', 'Requested axis (x, y, or z) position'),
|
||||
('trapq(<name>,<axis>_velocity)', 'Requested axis velocity'),
|
||||
('trapq(<name>,<axis>_accel)', 'Requested axis acceleration'),
|
||||
]
|
||||
def __init__(self, lmanager, name, name_parts):
|
||||
self.name = name
|
||||
self.jdispatch = lmanager.get_jdispatch()
|
||||
self.cur_data = [(0., 0., 0., 0., (0., 0., 0.), (0., 0., 0.))]
|
||||
self.data_pos = 0
|
||||
tq, trapq_name, datasel = name_parts
|
||||
ptypes = {}
|
||||
ptypes['velocity'] = {
|
||||
'label': '%s velocity' % (trapq_name,),
|
||||
'units': 'Velocity\n(mm/s)', 'func': self._pull_velocity
|
||||
}
|
||||
ptypes['accel'] = {
|
||||
'label': '%s acceleration' % (trapq_name,),
|
||||
'units': 'Acceleration\n(mm/s^2)', 'func': self._pull_accel
|
||||
}
|
||||
for axis, name in enumerate("xyz"):
|
||||
ptypes['%s' % (name,)] = {
|
||||
'label': '%s %s position' % (trapq_name, name), 'axis': axis,
|
||||
'units': 'Position\n(mm)', 'func': self._pull_axis_position
|
||||
}
|
||||
ptypes['%s_velocity' % (name,)] = {
|
||||
'label': '%s %s velocity' % (trapq_name, name), 'axis': axis,
|
||||
'units': 'Velocity\n(mm/s)', 'func': self._pull_axis_velocity
|
||||
}
|
||||
ptypes['%s_accel' % (name,)] = {
|
||||
'label': '%s %s acceleration' % (trapq_name, name),
|
||||
'axis': axis, 'units': 'Acceleration\n(mm/s^2)',
|
||||
'func': self._pull_axis_accel
|
||||
}
|
||||
pinfo = ptypes.get(datasel)
|
||||
if pinfo is None:
|
||||
raise error("Unknown trapq data selection '%s'" % (datasel,))
|
||||
self.label = {'label': pinfo['label'], 'units': pinfo['units']}
|
||||
self.axis = pinfo.get('axis')
|
||||
self.pull_data = pinfo['func']
|
||||
def get_label(self):
|
||||
return self.label
|
||||
def _find_move(self, req_time):
|
||||
data_pos = self.data_pos
|
||||
while 1:
|
||||
move = self.cur_data[data_pos]
|
||||
print_time, move_t, start_v, accel, start_pos, axes_r = move
|
||||
if req_time <= print_time + move_t:
|
||||
return move, req_time >= print_time
|
||||
data_pos += 1
|
||||
if data_pos < len(self.cur_data):
|
||||
self.data_pos = data_pos
|
||||
continue
|
||||
jmsg = self.jdispatch.pull_msg(req_time, self.name)
|
||||
if jmsg is None:
|
||||
return move, False
|
||||
self.cur_data = jmsg['data']
|
||||
self.data_pos = data_pos = 0
|
||||
def _pull_axis_position(self, req_time):
|
||||
move, in_range = self._find_move(req_time)
|
||||
print_time, move_t, start_v, accel, start_pos, axes_r = move
|
||||
mtime = max(0., min(move_t, req_time - print_time))
|
||||
dist = (start_v + .5 * accel * mtime) * mtime;
|
||||
return start_pos[self.axis] + axes_r[self.axis] * dist
|
||||
def _pull_axis_velocity(self, req_time):
|
||||
move, in_range = self._find_move(req_time)
|
||||
if not in_range:
|
||||
return 0.
|
||||
print_time, move_t, start_v, accel, start_pos, axes_r = move
|
||||
return (start_v + accel * (req_time - print_time)) * axes_r[self.axis]
|
||||
def _pull_axis_accel(self, req_time):
|
||||
move, in_range = self._find_move(req_time)
|
||||
if not in_range:
|
||||
return 0.
|
||||
print_time, move_t, start_v, accel, start_pos, axes_r = move
|
||||
return accel * axes_r[self.axis]
|
||||
def _pull_velocity(self, req_time):
|
||||
move, in_range = self._find_move(req_time)
|
||||
if not in_range:
|
||||
return 0.
|
||||
print_time, move_t, start_v, accel, start_pos, axes_r = move
|
||||
return start_v + accel * (req_time - print_time)
|
||||
def _pull_accel(self, req_time):
|
||||
move, in_range = self._find_move(req_time)
|
||||
if not in_range:
|
||||
return 0.
|
||||
print_time, move_t, start_v, accel, start_pos, axes_r = move
|
||||
return accel
|
||||
LogHandlers["trapq"] = HandleTrapQ
|
||||
|
||||
# Extract positions from queue_step log
|
||||
class HandleStepQ:
|
||||
SubscriptionIdParts = 2
|
||||
ParametersMin = 1
|
||||
ParametersMax = 2
|
||||
DataSets = [
|
||||
('stepq(<stepper>)', 'Commanded position of the given stepper'),
|
||||
('stepq(<stepper>,<time>)', 'Commanded position with smooth time'),
|
||||
]
|
||||
def __init__(self, lmanager, name, name_parts):
|
||||
self.name = name
|
||||
self.stepper_name = name_parts[1]
|
||||
self.jdispatch = lmanager.get_jdispatch()
|
||||
self.step_data = [(0., 0., 0.), (0., 0., 0.)] # [(time, half_pos, pos)]
|
||||
self.data_pos = 0
|
||||
self.smooth_time = 0.010
|
||||
if len(name_parts) == 3:
|
||||
try:
|
||||
self.smooth_time = float(name_parts[2])
|
||||
except ValueError:
|
||||
raise error("Invalid stepq smooth time '%s'" % (name_parts[2],))
|
||||
def get_label(self):
|
||||
label = '%s position' % (self.stepper_name,)
|
||||
return {'label': label, 'units': 'Position\n(mm)'}
|
||||
def pull_data(self, req_time):
|
||||
smooth_time = self.smooth_time
|
||||
while 1:
|
||||
data_pos = self.data_pos
|
||||
step_data = self.step_data
|
||||
# Find steps before and after req_time
|
||||
next_time, next_halfpos, next_pos = step_data[data_pos + 1]
|
||||
if req_time >= next_time:
|
||||
if data_pos + 2 < len(step_data):
|
||||
self.data_pos = data_pos + 1
|
||||
continue
|
||||
self._pull_block(req_time)
|
||||
continue
|
||||
last_time, last_halfpos, last_pos = step_data[data_pos]
|
||||
# Perform step smoothing
|
||||
rtdiff = req_time - last_time
|
||||
stime = next_time - last_time
|
||||
if stime <= smooth_time:
|
||||
pdiff = next_halfpos - last_halfpos
|
||||
return last_halfpos + rtdiff * pdiff / stime
|
||||
stime = .5 * smooth_time
|
||||
if rtdiff < stime:
|
||||
pdiff = last_pos - last_halfpos
|
||||
return last_halfpos + rtdiff * pdiff / stime
|
||||
rtdiff = next_time - req_time
|
||||
if rtdiff < stime:
|
||||
pdiff = last_pos - next_halfpos
|
||||
return next_halfpos + rtdiff * pdiff / stime
|
||||
return last_pos
|
||||
def _pull_block(self, req_time):
|
||||
step_data = self.step_data
|
||||
del step_data[:-1]
|
||||
self.data_pos = 0
|
||||
# Read data block containing requested time frame
|
||||
while 1:
|
||||
jmsg = self.jdispatch.pull_msg(req_time, self.name)
|
||||
if jmsg is None:
|
||||
last_time, last_halfpos, last_pos = step_data[0]
|
||||
self.step_data.append((req_time + .1, last_pos, last_pos))
|
||||
return
|
||||
last_time = jmsg['last_step_time']
|
||||
if req_time <= last_time:
|
||||
break
|
||||
# Process block into (time, half_position, position) 3-tuples
|
||||
first_time = step_time = jmsg['first_step_time']
|
||||
first_clock = jmsg['first_clock']
|
||||
step_clock = first_clock - jmsg['data'][0][0]
|
||||
cdiff = jmsg['last_clock'] - first_clock
|
||||
tdiff = last_time - first_time
|
||||
inv_freq = 0.
|
||||
if cdiff:
|
||||
inv_freq = tdiff / cdiff
|
||||
step_dist = jmsg['step_distance']
|
||||
step_pos = jmsg['start_position']
|
||||
for interval, raw_count, add in jmsg['data']:
|
||||
qs_dist = step_dist
|
||||
count = raw_count
|
||||
if count < 0:
|
||||
qs_dist = -qs_dist
|
||||
count = -count
|
||||
for i in range(count):
|
||||
step_clock += interval
|
||||
interval += add
|
||||
step_time = first_time + (step_clock - first_clock) * inv_freq
|
||||
step_halfpos = step_pos + .5 * qs_dist
|
||||
step_pos += qs_dist
|
||||
step_data.append((step_time, step_halfpos, step_pos))
|
||||
LogHandlers["stepq"] = HandleStepQ
|
||||
|
||||
# Extract stepper motor phase position
|
||||
class HandleStepPhase:
|
||||
SubscriptionIdParts = 0
|
||||
ParametersMin = 1
|
||||
ParametersMax = 2
|
||||
DataSets = [
|
||||
('step_phase(<driver>)', 'Stepper motor phase of the given stepper'),
|
||||
('step_phase(<driver>,microstep)', 'Microstep position for stepper'),
|
||||
]
|
||||
def __init__(self, lmanager, name, name_parts):
|
||||
self.name = name
|
||||
self.driver_name = name_parts[1]
|
||||
self.stepper_name = " ".join(self.driver_name.split()[1:])
|
||||
config = lmanager.get_initial_status()['configfile']['settings']
|
||||
if self.driver_name not in config or self.stepper_name not in config:
|
||||
raise error("Unable to find stepper driver '%s' config"
|
||||
% (self.driver_name,))
|
||||
if len(name_parts) == 3 and name_parts[2] != "microstep":
|
||||
raise error("Unknown step_phase selection '%s'" % (name_parts[2],))
|
||||
self.report_microsteps = len(name_parts) == 3
|
||||
sconfig = config[self.stepper_name]
|
||||
self.phases = sconfig["microsteps"]
|
||||
if not self.report_microsteps:
|
||||
self.phases *= 4
|
||||
self.jdispatch = lmanager.get_jdispatch()
|
||||
self.jdispatch.add_handler(name, "stepq:" + self.stepper_name)
|
||||
# stepq tracking
|
||||
self.step_data = [(0., 0), (0., 0)] # [(time, mcu_pos)]
|
||||
self.data_pos = 0
|
||||
# driver phase tracking
|
||||
self.status_tracker = lmanager.get_status_tracker()
|
||||
self.next_status_time = 0.
|
||||
self.mcu_phase_offset = 0
|
||||
def get_label(self):
|
||||
if self.report_microsteps:
|
||||
return {'label': '%s microstep' % (self.stepper_name,),
|
||||
'units': 'Microstep'}
|
||||
return {'label': '%s phase' % (self.stepper_name,), 'units': 'Phase'}
|
||||
def _pull_phase_offset(self, req_time):
|
||||
db, self.next_status_time = self.status_tracker.pull_status(req_time)
|
||||
mcu_phase_offset = db.get(self.driver_name, {}).get('mcu_phase_offset')
|
||||
if mcu_phase_offset is None:
|
||||
mcu_phase_offset = 0
|
||||
self.mcu_phase_offset = mcu_phase_offset
|
||||
def pull_data(self, req_time):
|
||||
if req_time >= self.next_status_time:
|
||||
self._pull_phase_offset(req_time)
|
||||
while 1:
|
||||
data_pos = self.data_pos
|
||||
step_data = self.step_data
|
||||
# Find steps before and after req_time
|
||||
next_time, next_pos = step_data[data_pos + 1]
|
||||
if req_time >= next_time:
|
||||
if data_pos + 2 < len(step_data):
|
||||
self.data_pos = data_pos + 1
|
||||
continue
|
||||
self._pull_block(req_time)
|
||||
continue
|
||||
step_pos = step_data[data_pos][1]
|
||||
return (step_pos - self.mcu_phase_offset) % self.phases
|
||||
def _pull_block(self, req_time):
|
||||
step_data = self.step_data
|
||||
del step_data[:-1]
|
||||
self.data_pos = 0
|
||||
# Read data block containing requested time frame
|
||||
while 1:
|
||||
jmsg = self.jdispatch.pull_msg(req_time, self.name)
|
||||
if jmsg is None:
|
||||
last_time, last_pos = step_data[0]
|
||||
self.step_data.append((req_time + .1, last_pos))
|
||||
return
|
||||
last_time = jmsg['last_step_time']
|
||||
if req_time <= last_time:
|
||||
break
|
||||
# Process block into (time, position) 2-tuples
|
||||
first_time = step_time = jmsg['first_step_time']
|
||||
first_clock = jmsg['first_clock']
|
||||
step_clock = first_clock - jmsg['data'][0][0]
|
||||
cdiff = jmsg['last_clock'] - first_clock
|
||||
tdiff = last_time - first_time
|
||||
inv_freq = 0.
|
||||
if cdiff:
|
||||
inv_freq = tdiff / cdiff
|
||||
step_pos = jmsg['start_mcu_position']
|
||||
for interval, raw_count, add in jmsg['data']:
|
||||
qs_dist = 1
|
||||
count = raw_count
|
||||
if count < 0:
|
||||
qs_dist = -1
|
||||
count = -count
|
||||
for i in range(count):
|
||||
step_clock += interval
|
||||
interval += add
|
||||
step_time = first_time + (step_clock - first_clock) * inv_freq
|
||||
step_pos += qs_dist
|
||||
step_data.append((step_time, step_pos))
|
||||
LogHandlers["step_phase"] = HandleStepPhase
|
||||
|
||||
# Extract accelerometer data
|
||||
class HandleADXL345:
|
||||
SubscriptionIdParts = 2
|
||||
ParametersMin = ParametersMax = 2
|
||||
DataSets = [
|
||||
('adxl345(<name>,<axis>)', 'Accelerometer for given axis (x, y, or z)'),
|
||||
]
|
||||
def __init__(self, lmanager, name, name_parts):
|
||||
self.name = name
|
||||
self.adxl_name = name_parts[1]
|
||||
self.jdispatch = lmanager.get_jdispatch()
|
||||
self.next_accel_time = self.last_accel_time = 0.
|
||||
self.next_accel = self.last_accel = (0., 0., 0.)
|
||||
self.cur_data = []
|
||||
self.data_pos = 0
|
||||
if name_parts[2] not in 'xyz':
|
||||
raise error("Unknown adxl345 data selection '%s'" % (name,))
|
||||
self.axis = 'xyz'.index(name_parts[2])
|
||||
def get_label(self):
|
||||
label = '%s %s acceleration' % (self.adxl_name, 'xyz'[self.axis])
|
||||
return {'label': label, 'units': 'Acceleration\n(mm/s^2)'}
|
||||
def pull_data(self, req_time):
|
||||
axis = self.axis
|
||||
while 1:
|
||||
if req_time <= self.next_accel_time:
|
||||
adiff = self.next_accel[axis] - self.last_accel[axis]
|
||||
tdiff = self.next_accel_time - self.last_accel_time
|
||||
rtdiff = req_time - self.last_accel_time
|
||||
return self.last_accel[axis] + rtdiff * adiff / tdiff
|
||||
if self.data_pos >= len(self.cur_data):
|
||||
# Read next data block
|
||||
jmsg = self.jdispatch.pull_msg(req_time, self.name)
|
||||
if jmsg is None:
|
||||
return 0.
|
||||
self.cur_data = jmsg['data']
|
||||
self.data_pos = 0
|
||||
continue
|
||||
self.last_accel = self.next_accel
|
||||
self.last_accel_time = self.next_accel_time
|
||||
self.next_accel_time, x, y, z = self.cur_data[self.data_pos]
|
||||
self.next_accel = (x, y, z)
|
||||
self.data_pos += 1
|
||||
LogHandlers["adxl345"] = HandleADXL345
|
||||
|
||||
# Extract positions from magnetic angle sensor
|
||||
class HandleAngle:
|
||||
SubscriptionIdParts = 2
|
||||
ParametersMin = ParametersMax = 1
|
||||
DataSets = [
|
||||
('angle(<name>)', 'Angle sensor position'),
|
||||
]
|
||||
def __init__(self, lmanager, name, name_parts):
|
||||
self.name = name
|
||||
self.angle_name = name_parts[1]
|
||||
self.jdispatch = lmanager.get_jdispatch()
|
||||
self.next_angle_time = self.last_angle_time = 0.
|
||||
self.next_angle = self.last_angle = 0.
|
||||
self.cur_data = []
|
||||
self.data_pos = 0
|
||||
self.position_offset = 0.
|
||||
self.angle_dist = 1.
|
||||
# Determine angle distance from associated stepper's rotation_distance
|
||||
config = lmanager.get_initial_status()['configfile']['settings']
|
||||
aname = 'angle %s' % (self.angle_name,)
|
||||
stepper_name = config.get(aname, {}).get('stepper')
|
||||
if stepper_name is not None:
|
||||
sconfig = config.get(stepper_name, {})
|
||||
rotation_distance = sconfig.get('rotation_distance', 1.)
|
||||
gear_ratio = sconfig.get('gear_ratio', ())
|
||||
if type(gear_ratio) == str: # XXX
|
||||
gear_ratio = [[float(v.strip()) for v in gr.split(':')]
|
||||
for gr in gear_ratio.split(',')]
|
||||
for n, d in gear_ratio:
|
||||
rotation_distance *= d / n
|
||||
self.angle_dist = rotation_distance / 65536.
|
||||
def get_label(self):
|
||||
label = '%s position' % (self.angle_name,)
|
||||
return {'label': label, 'units': 'Position\n(mm)'}
|
||||
def pull_data(self, req_time):
|
||||
while 1:
|
||||
if req_time <= self.next_angle_time:
|
||||
pdiff = self.next_angle - self.last_angle
|
||||
tdiff = self.next_angle_time - self.last_angle_time
|
||||
rtdiff = req_time - self.last_angle_time
|
||||
po = rtdiff * pdiff / tdiff
|
||||
return ((self.last_angle + po) * self.angle_dist
|
||||
+ self.position_offset)
|
||||
if self.data_pos >= len(self.cur_data):
|
||||
# Read next data block
|
||||
jmsg = self.jdispatch.pull_msg(req_time, self.name)
|
||||
if jmsg is None:
|
||||
return (self.next_angle * self.angle_dist
|
||||
+ self.position_offset)
|
||||
self.cur_data = jmsg['data']
|
||||
position_offset = jmsg.get('position_offset')
|
||||
if position_offset is not None:
|
||||
self.position_offset = position_offset
|
||||
self.data_pos = 0
|
||||
continue
|
||||
self.last_angle = self.next_angle
|
||||
self.last_angle_time = self.next_angle_time
|
||||
self.next_angle_time, self.next_angle = self.cur_data[self.data_pos]
|
||||
self.data_pos += 1
|
||||
LogHandlers["angle"] = HandleAngle
|
||||
|
||||
|
||||
######################################################################
|
||||
# Log reading
|
||||
######################################################################
|
||||
|
||||
# Read, uncompress, and parse messages in a log built by data_logger.py
|
||||
class JsonLogReader:
|
||||
def __init__(self, filename):
|
||||
self.file = open(filename, "rb")
|
||||
self.comp = zlib.decompressobj(31)
|
||||
self.msgs = [b""]
|
||||
def seek(self, pos):
|
||||
self.file.seek(pos)
|
||||
self.comp = zlib.decompressobj(-15)
|
||||
def pull_msg(self):
|
||||
msgs = self.msgs
|
||||
while 1:
|
||||
if len(msgs) > 1:
|
||||
msg = msgs.pop(0)
|
||||
try:
|
||||
json_msg = json.loads(msg)
|
||||
except:
|
||||
logging.exception("Unable to parse line")
|
||||
continue
|
||||
return json_msg
|
||||
raw_data = self.file.read(8192)
|
||||
if not raw_data:
|
||||
return None
|
||||
data = self.comp.decompress(raw_data)
|
||||
parts = data.split(b'\x03')
|
||||
parts[0] = msgs[0] + parts[0]
|
||||
self.msgs = msgs = parts
|
||||
|
||||
# Store messages in per-subscription queues until handlers are ready for them
|
||||
class JsonDispatcher:
|
||||
def __init__(self, log_prefix):
|
||||
self.names = {}
|
||||
self.queues = {}
|
||||
self.last_read_time = 0.
|
||||
self.log_reader = JsonLogReader(log_prefix + ".json.gz")
|
||||
self.is_eof = False
|
||||
def check_end_of_data(self):
|
||||
return self.is_eof and not any(self.queues.values())
|
||||
def add_handler(self, name, subscription_id):
|
||||
self.names[name] = q = []
|
||||
self.queues.setdefault(subscription_id, []).append(q)
|
||||
def pull_msg(self, req_time, name):
|
||||
q = self.names[name]
|
||||
while 1:
|
||||
if q:
|
||||
return q.pop(0)
|
||||
if req_time + 1. < self.last_read_time:
|
||||
return None
|
||||
json_msg = self.log_reader.pull_msg()
|
||||
if json_msg is None:
|
||||
self.is_eof = True
|
||||
return None
|
||||
qid = json_msg.get('q')
|
||||
if qid == 'status':
|
||||
pt = json_msg.get('toolhead', {}).get('estimated_print_time')
|
||||
if pt is not None:
|
||||
self.last_read_time = pt
|
||||
for mq in self.queues.get(qid, []):
|
||||
mq.append(json_msg['params'])
|
||||
|
||||
|
||||
######################################################################
|
||||
# Dataset and log tracking
|
||||
######################################################################
|
||||
|
||||
# Tracking of get_status messages
|
||||
class TrackStatus:
|
||||
def __init__(self, lmanager, name, start_status):
|
||||
self.name = name
|
||||
self.jdispatch = lmanager.get_jdispatch()
|
||||
self.next_status_time = 0.
|
||||
self.status = dict(start_status)
|
||||
self.next_update = {}
|
||||
def pull_status(self, req_time):
|
||||
status = self.status
|
||||
while 1:
|
||||
if req_time < self.next_status_time:
|
||||
return status, self.next_status_time
|
||||
for k, v in self.next_update.items():
|
||||
status.setdefault(k, {}).update(v)
|
||||
jmsg = self.jdispatch.pull_msg(req_time, self.name)
|
||||
if jmsg is None:
|
||||
self.next_status_time = req_time + 0.100
|
||||
self.next_update = {}
|
||||
return status, self.next_status_time
|
||||
self.next_update = jmsg['status']
|
||||
th = self.next_update.get('toolhead', {})
|
||||
self.next_status_time = th.get('estimated_print_time', 0.)
|
||||
|
||||
# Split a string by commas while keeping parenthesis intact
|
||||
def param_split(line):
|
||||
out = []
|
||||
level = prev = 0
|
||||
for i, c in enumerate(line):
|
||||
if not level and c == ',':
|
||||
out.append(line[prev:i])
|
||||
prev = i+1
|
||||
elif c == '(':
|
||||
level += 1
|
||||
elif level and c== ')':
|
||||
level -= 1
|
||||
out.append(line[prev:])
|
||||
return out
|
||||
|
||||
# Split a dataset name (eg, "abc(def,ghi)") into parts
|
||||
def name_split(name):
|
||||
if '(' not in name or not name.endswith(')'):
|
||||
raise error("Malformed dataset name '%s'" % (name,))
|
||||
aname, aparams = name.split('(', 1)
|
||||
return [aname] + param_split(aparams[:-1])
|
||||
|
||||
# Return a description of possible datasets
|
||||
def list_datasets():
|
||||
datasets = []
|
||||
for lh in sorted(LogHandlers.keys()):
|
||||
datasets += LogHandlers[lh].DataSets
|
||||
return datasets
|
||||
|
||||
# Main log access management
|
||||
class LogManager:
|
||||
error = error
|
||||
def __init__(self, log_prefix):
|
||||
self.index_reader = JsonLogReader(log_prefix + ".index.gz")
|
||||
self.jdispatch = JsonDispatcher(log_prefix)
|
||||
self.initial_start_time = self.start_time = 0.
|
||||
self.datasets = {}
|
||||
self.initial_status = {}
|
||||
self.start_status = {}
|
||||
self.log_subscriptions = {}
|
||||
self.status_tracker = None
|
||||
def setup_index(self):
|
||||
fmsg = self.index_reader.pull_msg()
|
||||
self.initial_status = status = fmsg['status']
|
||||
self.start_status = dict(status)
|
||||
start_time = status['toolhead']['estimated_print_time']
|
||||
self.initial_start_time = self.start_time = start_time
|
||||
self.log_subscriptions = fmsg.get('subscriptions', {})
|
||||
def get_initial_status(self):
|
||||
return self.initial_status
|
||||
def available_dataset_types(self):
|
||||
return {name: None for name in LogHandlers}
|
||||
def get_jdispatch(self):
|
||||
return self.jdispatch
|
||||
def seek_time(self, req_time):
|
||||
self.start_time = req_start_time = self.initial_start_time + req_time
|
||||
start_status = self.start_status
|
||||
seek_time = max(self.initial_start_time, req_start_time - 1.)
|
||||
file_position = 0
|
||||
while 1:
|
||||
fmsg = self.index_reader.pull_msg()
|
||||
if fmsg is None:
|
||||
break
|
||||
th = fmsg['status']['toolhead']
|
||||
ptime = max(th['estimated_print_time'], th.get('print_time', 0.))
|
||||
if ptime > seek_time:
|
||||
break
|
||||
for k, v in fmsg["status"].items():
|
||||
start_status.setdefault(k, {}).update(v)
|
||||
file_position = fmsg['file_position']
|
||||
if file_position:
|
||||
self.jdispatch.log_reader.seek(file_position)
|
||||
def get_initial_start_time(self):
|
||||
return self.initial_start_time
|
||||
def get_start_time(self):
|
||||
return self.start_time
|
||||
def get_status_tracker(self):
|
||||
if self.status_tracker is None:
|
||||
self.status_tracker = TrackStatus(self, "status", self.start_status)
|
||||
self.jdispatch.add_handler("status", "status")
|
||||
return self.status_tracker
|
||||
def setup_dataset(self, name):
|
||||
if name in self.datasets:
|
||||
return self.datasets[name]
|
||||
name_parts = name_split(name)
|
||||
cls = LogHandlers.get(name_parts[0])
|
||||
if cls is None:
|
||||
raise error("Unknown dataset '%s'" % (name_parts[0],))
|
||||
len_pp = len(name_parts) - 1
|
||||
if len_pp < cls.ParametersMin or len_pp > cls.ParametersMax:
|
||||
raise error("Invalid number of parameters for '%s'" % (name,))
|
||||
if cls.SubscriptionIdParts:
|
||||
subscription_id = ":".join(name_parts[:cls.SubscriptionIdParts])
|
||||
if subscription_id not in self.log_subscriptions:
|
||||
raise error("Dataset '%s' not in capture" % (subscription_id,))
|
||||
self.jdispatch.add_handler(name, subscription_id)
|
||||
self.datasets[name] = hdl = cls(self, name, name_parts)
|
||||
return hdl
|
||||
Reference in New Issue
Block a user