mirror of https://github.com/Desuuuu/klipper.git
motan_graph: Initial support for graphing data log
Signed-off-by: Kevin O'Connor <kevin@koconnor.net>
This commit is contained in:
parent
171a73e380
commit
42080751d7
|
@ -0,0 +1,195 @@
|
||||||
|
# Log data analyzing functions
|
||||||
|
#
|
||||||
|
# Copyright (C) 2021 Kevin O'Connor <kevin@koconnor.net>
|
||||||
|
#
|
||||||
|
# This file may be distributed under the terms of the GNU GPLv3 license.
|
||||||
|
import collections
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Analysis code
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
# Analyzer handlers: {name: class, ...}
|
||||||
|
AHandlers = {}
|
||||||
|
|
||||||
|
# Calculate a derivative (position to velocity, or velocity to accel)
|
||||||
|
class GenDerivative:
|
||||||
|
DataSets = [
|
||||||
|
('derivative:<dataset>', 'Derivative of the given dataset'),
|
||||||
|
]
|
||||||
|
def __init__(self, amanager, params):
|
||||||
|
self.amanager = amanager
|
||||||
|
self.source = params
|
||||||
|
amanager.setup_dataset(self.source)
|
||||||
|
def get_label(self):
|
||||||
|
label = self.amanager.get_label(self.source)
|
||||||
|
lname = label['label']
|
||||||
|
units = label['units']
|
||||||
|
if '(mm)' in units:
|
||||||
|
rep = [('Position', 'Velocity'), ('(mm)', '(mm/s)')]
|
||||||
|
elif '(mm/s)' in units:
|
||||||
|
rep = [('Velocity', 'Acceleration'), ('(mm/s)', '(mm/s^2)')]
|
||||||
|
else:
|
||||||
|
return {'label': 'Derivative', 'units': 'Unknown'}
|
||||||
|
for old, new in rep:
|
||||||
|
lname = lname.replace(old, new).replace(old.lower(), new.lower())
|
||||||
|
units = units.replace(old, new).replace(old.lower(), new.lower())
|
||||||
|
return {'label': lname, 'units': units}
|
||||||
|
def generate_data(self):
|
||||||
|
inv_seg_time = 1. / self.amanager.get_segment_time()
|
||||||
|
data = self.amanager.get_datasets()[self.source]
|
||||||
|
deriv = [(data[i+1] - data[i]) * inv_seg_time
|
||||||
|
for i in range(len(data)-1)]
|
||||||
|
return [deriv[0]] + deriv
|
||||||
|
AHandlers["derivative"] = GenDerivative
|
||||||
|
|
||||||
|
# Calculate a kinematic stepper position from the toolhead requested position
|
||||||
|
class GenKinematicPosition:
|
||||||
|
DataSets = [
|
||||||
|
('kin:<stepper>', 'Stepper position derived from toolhead kinematics'),
|
||||||
|
]
|
||||||
|
def __init__(self, amanager, params):
|
||||||
|
self.amanager = amanager
|
||||||
|
status = self.amanager.get_initial_status()
|
||||||
|
kin = status['configfile']['settings']['printer']['kinematics']
|
||||||
|
if kin not in ['cartesian', 'corexy']:
|
||||||
|
raise amanager.error("Unsupported kinematics '%s'" % (kin,))
|
||||||
|
if params not in ['stepper_x', 'stepper_y', 'stepper_z']:
|
||||||
|
raise amanager.error("Unknown stepper '%s'" % (params,))
|
||||||
|
if kin == 'corexy' and params in ['stepper_x', 'stepper_y']:
|
||||||
|
self.source1 = 'trapq:toolhead:x'
|
||||||
|
self.source2 = 'trapq:toolhead:y'
|
||||||
|
if params == 'stepper_x':
|
||||||
|
self.generate_data = self.generate_data_corexy_plus
|
||||||
|
else:
|
||||||
|
self.generate_data = self.generate_data_corexy_minus
|
||||||
|
amanager.setup_dataset(self.source1)
|
||||||
|
amanager.setup_dataset(self.source2)
|
||||||
|
else:
|
||||||
|
self.source1 = 'trapq:toolhead:' + params[-1:]
|
||||||
|
self.source2 = None
|
||||||
|
self.generate_data = self.generate_data_passthrough
|
||||||
|
amanager.setup_dataset(self.source1)
|
||||||
|
def get_label(self):
|
||||||
|
return {'label': 'Position', 'units': 'Position\n(mm)'}
|
||||||
|
def generate_data_corexy_plus(self):
|
||||||
|
datasets = self.amanager.get_datasets()
|
||||||
|
data1 = datasets[self.source1]
|
||||||
|
data2 = datasets[self.source2]
|
||||||
|
return [d1 + d2 for d1, d2 in zip(data1, data2)]
|
||||||
|
def generate_data_corexy_minus(self):
|
||||||
|
datasets = self.amanager.get_datasets()
|
||||||
|
data1 = datasets[self.source1]
|
||||||
|
data2 = datasets[self.source2]
|
||||||
|
return [d1 - d2 for d1, d2 in zip(data1, data2)]
|
||||||
|
def generate_data_passthrough(self):
|
||||||
|
return self.amanager.get_datasets()[self.source1]
|
||||||
|
AHandlers["kin"] = GenKinematicPosition
|
||||||
|
|
||||||
|
# Calculate a position deviation
|
||||||
|
class GenDeviation:
|
||||||
|
DataSets = [
|
||||||
|
('deviation:<dataset1>-<dataset2>', 'Difference between datasets'),
|
||||||
|
]
|
||||||
|
def __init__(self, amanager, params):
|
||||||
|
self.amanager = amanager
|
||||||
|
parts = params.split('-')
|
||||||
|
if len(parts) != 2:
|
||||||
|
raise amanager.error("Invalid deviation '%s'" % (params,))
|
||||||
|
self.source1, self.source2 = parts
|
||||||
|
amanager.setup_dataset(self.source1)
|
||||||
|
amanager.setup_dataset(self.source2)
|
||||||
|
def get_label(self):
|
||||||
|
label1 = self.amanager.get_label(self.source1)
|
||||||
|
label2 = self.amanager.get_label(self.source2)
|
||||||
|
if label1['units'] != label2['units']:
|
||||||
|
return {'label': 'Deviation', 'units': 'Unknown'}
|
||||||
|
parts = label1['units'].split('\n')
|
||||||
|
units = '\n'.join([parts[0]] + ['Deviation'] + parts[1:])
|
||||||
|
return {'label': label1['label'] + ' deviation', 'units': units}
|
||||||
|
def generate_data(self):
|
||||||
|
datasets = self.amanager.get_datasets()
|
||||||
|
data1 = datasets[self.source1]
|
||||||
|
data2 = datasets[self.source2]
|
||||||
|
return [d1 - d2 for d1, d2 in zip(data1, data2)]
|
||||||
|
AHandlers["deviation"] = GenDeviation
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# List datasets
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
def list_datasets():
|
||||||
|
datasets = []
|
||||||
|
for ah in sorted(AHandlers.keys()):
|
||||||
|
datasets += AHandlers[ah].DataSets
|
||||||
|
return datasets
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Data generation
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
# Manage raw and generated data samples
|
||||||
|
class AnalyzerManager:
|
||||||
|
error = None
|
||||||
|
def __init__(self, lmanager, segment_time):
|
||||||
|
self.lmanager = lmanager
|
||||||
|
self.error = lmanager.error
|
||||||
|
self.segment_time = segment_time
|
||||||
|
self.raw_datasets = collections.OrderedDict()
|
||||||
|
self.gen_datasets = collections.OrderedDict()
|
||||||
|
self.datasets = {}
|
||||||
|
self.dataset_times = []
|
||||||
|
self.duration = 5.
|
||||||
|
def set_duration(self, duration):
|
||||||
|
self.duration = duration
|
||||||
|
def get_segment_time(self):
|
||||||
|
return self.segment_time
|
||||||
|
def get_datasets(self):
|
||||||
|
return self.datasets
|
||||||
|
def get_dataset_times(self):
|
||||||
|
return self.dataset_times
|
||||||
|
def get_initial_status(self):
|
||||||
|
return self.lmanager.get_initial_status()
|
||||||
|
def setup_dataset(self, name):
|
||||||
|
name = name.strip()
|
||||||
|
if name in self.raw_datasets:
|
||||||
|
return self.raw_datasets[name]
|
||||||
|
if name in self.gen_datasets:
|
||||||
|
return self.gen_datasets[name]
|
||||||
|
nparts = name.split(':')
|
||||||
|
if nparts[0] in self.lmanager.available_dataset_types():
|
||||||
|
hdl = self.lmanager.setup_dataset(name)
|
||||||
|
self.raw_datasets[name] = hdl
|
||||||
|
else:
|
||||||
|
cls = AHandlers.get(nparts[0])
|
||||||
|
if cls is None:
|
||||||
|
raise self.error("Unknown dataset '%s'" % (name,))
|
||||||
|
hdl = cls(self, ':'.join(nparts[1:]))
|
||||||
|
self.gen_datasets[name] = hdl
|
||||||
|
self.datasets[name] = []
|
||||||
|
return hdl
|
||||||
|
def get_label(self, dataset):
|
||||||
|
hdl = self.raw_datasets.get(dataset)
|
||||||
|
if hdl is None:
|
||||||
|
hdl = self.gen_datasets.get(dataset)
|
||||||
|
if hdl is None:
|
||||||
|
raise error("Unknown dataset '%s'" % (dataset,))
|
||||||
|
return hdl.get_label()
|
||||||
|
def generate_datasets(self):
|
||||||
|
# Generate raw data
|
||||||
|
list_hdls = [(self.datasets[name], hdl)
|
||||||
|
for name, hdl in self.raw_datasets.items()]
|
||||||
|
initial_start_time = self.lmanager.get_initial_start_time()
|
||||||
|
start_time = t = self.lmanager.get_start_time()
|
||||||
|
end_time = start_time + self.duration
|
||||||
|
while t < end_time:
|
||||||
|
t += self.segment_time
|
||||||
|
self.dataset_times.append(t - initial_start_time)
|
||||||
|
for dl, hdl in list_hdls:
|
||||||
|
dl.append(hdl.pull_data(t))
|
||||||
|
# Generate analyzer data
|
||||||
|
for name, hdl in self.gen_datasets.items():
|
||||||
|
self.datasets[name] = hdl.generate_data()
|
|
@ -0,0 +1,144 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
# Script to perform motion analysis and graphing
|
||||||
|
#
|
||||||
|
# Copyright (C) 2019-2021 Kevin O'Connor <kevin@koconnor.net>
|
||||||
|
#
|
||||||
|
# This file may be distributed under the terms of the GNU GPLv3 license.
|
||||||
|
import sys, optparse, ast
|
||||||
|
import matplotlib
|
||||||
|
import readlog, analyzers
|
||||||
|
try:
|
||||||
|
import urlparse
|
||||||
|
except:
|
||||||
|
import urllib.parse as urlparse
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Graphing
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
def plot_motion(amanager, graphs):
|
||||||
|
# Generate data
|
||||||
|
for graph in graphs:
|
||||||
|
for dataset, plot_params in graph:
|
||||||
|
amanager.setup_dataset(dataset)
|
||||||
|
amanager.generate_datasets()
|
||||||
|
datasets = amanager.get_datasets()
|
||||||
|
times = amanager.get_dataset_times()
|
||||||
|
# Build plot
|
||||||
|
fontP = matplotlib.font_manager.FontProperties()
|
||||||
|
fontP.set_size('x-small')
|
||||||
|
fig, rows = matplotlib.pyplot.subplots(nrows=len(graphs), sharex=True)
|
||||||
|
if len(graphs) == 1:
|
||||||
|
rows = [rows]
|
||||||
|
rows[0].set_title("Motion Analysis")
|
||||||
|
for graph, graph_ax in zip(graphs, rows):
|
||||||
|
graph_units = graph_twin_units = twin_ax = None
|
||||||
|
for dataset, plot_params in graph:
|
||||||
|
label = amanager.get_label(dataset)
|
||||||
|
ax = graph_ax
|
||||||
|
if graph_units is None:
|
||||||
|
graph_units = label['units']
|
||||||
|
ax.set_ylabel(graph_units)
|
||||||
|
elif label['units'] != graph_units:
|
||||||
|
if graph_twin_units is None:
|
||||||
|
ax = twin_ax = graph_ax.twinx()
|
||||||
|
graph_twin_units = label['units']
|
||||||
|
ax.set_ylabel(graph_twin_units)
|
||||||
|
elif label['units'] == graph_twin_units:
|
||||||
|
ax = twin_ax
|
||||||
|
else:
|
||||||
|
graph_units = "Unknown"
|
||||||
|
ax.set_ylabel(graph_units)
|
||||||
|
pparams = {'label': label['label'], 'alpha': 0.8}
|
||||||
|
pparams.update(plot_params)
|
||||||
|
ax.plot(times, datasets[dataset], **pparams)
|
||||||
|
ax.legend(loc='best', prop=fontP)
|
||||||
|
ax.grid(True)
|
||||||
|
rows[-1].set_xlabel('Time (s)')
|
||||||
|
return fig
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Startup
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
def setup_matplotlib(output_to_file):
|
||||||
|
global matplotlib
|
||||||
|
if output_to_file:
|
||||||
|
matplotlib.use('Agg')
|
||||||
|
import matplotlib.pyplot, matplotlib.dates, matplotlib.font_manager
|
||||||
|
import matplotlib.ticker
|
||||||
|
|
||||||
|
def parse_graph_description(desc):
|
||||||
|
if '?' not in desc:
|
||||||
|
return (desc, {})
|
||||||
|
dataset, params = desc.split('?', 1)
|
||||||
|
params = {k: v for k, v in urlparse.parse_qsl(params)}
|
||||||
|
for fkey in ['alpha']:
|
||||||
|
if fkey in params:
|
||||||
|
params[fkey] = float(params[fkey])
|
||||||
|
return (dataset, params)
|
||||||
|
|
||||||
|
def list_datasets():
|
||||||
|
datasets = readlog.list_datasets() + analyzers.list_datasets()
|
||||||
|
out = ["\nAvailable datasets:\n"]
|
||||||
|
for dataset, desc in datasets:
|
||||||
|
out.append("%-24s: %s\n" % (dataset, desc))
|
||||||
|
out.append("\n")
|
||||||
|
sys.stdout.write("".join(out))
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# Parse command-line arguments
|
||||||
|
usage = "%prog [options] <logname>"
|
||||||
|
opts = optparse.OptionParser(usage)
|
||||||
|
opts.add_option("-o", "--output", type="string", dest="output",
|
||||||
|
default=None, help="filename of output graph")
|
||||||
|
opts.add_option("-s", "--skip", type="float", default=0.,
|
||||||
|
help="Set the start time to graph")
|
||||||
|
opts.add_option("-d", "--duration", type="float", default=5.,
|
||||||
|
help="Number of seconds to graph")
|
||||||
|
opts.add_option("--segment-time", type="float", default=0.000100,
|
||||||
|
help="Analysis segment time (default 0.000100 seconds)")
|
||||||
|
opts.add_option("-g", "--graph", help="Graph to generate (python literal)")
|
||||||
|
opts.add_option("-l", "--list-datasets", action="store_true",
|
||||||
|
help="List available datasets")
|
||||||
|
options, args = opts.parse_args()
|
||||||
|
if options.list_datasets:
|
||||||
|
list_datasets()
|
||||||
|
if len(args) != 1:
|
||||||
|
opts.error("Incorrect number of arguments")
|
||||||
|
log_prefix = args[0]
|
||||||
|
|
||||||
|
# Open data files
|
||||||
|
lmanager = readlog.LogManager(log_prefix)
|
||||||
|
lmanager.setup_index()
|
||||||
|
lmanager.seek_time(options.skip)
|
||||||
|
amanager = analyzers.AnalyzerManager(lmanager, options.segment_time)
|
||||||
|
amanager.set_duration(options.duration)
|
||||||
|
|
||||||
|
# Default graphs to draw
|
||||||
|
graph_descs = [
|
||||||
|
["trapq:toolhead:velocity?color=green"],
|
||||||
|
["trapq:toolhead:accel?color=green"],
|
||||||
|
["deviation:stepq:stepper_x-kin:stepper_x?color=blue"],
|
||||||
|
]
|
||||||
|
if options.graph is not None:
|
||||||
|
graph_descs = ast.literal_eval(options.graph)
|
||||||
|
graphs = [[parse_graph_description(g) for g in graph_row]
|
||||||
|
for graph_row in graph_descs]
|
||||||
|
|
||||||
|
# Draw graph
|
||||||
|
setup_matplotlib(options.output is not None)
|
||||||
|
fig = plot_motion(amanager, graphs)
|
||||||
|
|
||||||
|
# Show graph
|
||||||
|
if options.output is None:
|
||||||
|
matplotlib.pyplot.show()
|
||||||
|
else:
|
||||||
|
fig.set_size_inches(8, 6)
|
||||||
|
fig.savefig(options.output)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
|
@ -0,0 +1,311 @@
|
||||||
|
# Code for reading data logs produced by data_logger.py
|
||||||
|
#
|
||||||
|
# Copyright (C) 2021 Kevin O'Connor <kevin@koconnor.net>
|
||||||
|
#
|
||||||
|
# This file may be distributed under the terms of the GNU GPLv3 license.
|
||||||
|
import json, zlib
|
||||||
|
|
||||||
|
class error(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Log data handlers
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
# Log data handlers: {name: class, ...}
|
||||||
|
LogHandlers = {}
|
||||||
|
|
||||||
|
# Extract requested position, velocity, and accel from a trapq log
|
||||||
|
class HandleTrapQ:
|
||||||
|
ParametersSubscriptionId = 2
|
||||||
|
ParametersMin = ParametersMax = 3
|
||||||
|
DataSets = [
|
||||||
|
('trapq:<name>:velocity', 'Requested velocity for the given trapq'),
|
||||||
|
('trapq:<name>:<axis>', 'Requested axis (x, y, or z) position'),
|
||||||
|
('trapq:<name>:<axis>_velocity', 'Requested axis velocity'),
|
||||||
|
('trapq:<name>:<axis>_accel', 'Requested axis acceleration'),
|
||||||
|
]
|
||||||
|
def __init__(self, lmanager, name):
|
||||||
|
self.name = name
|
||||||
|
self.jdispatch = lmanager.get_jdispatch()
|
||||||
|
self.cur_data = [(0., 0., 0., 0., (0., 0., 0.), (0., 0., 0.))]
|
||||||
|
self.data_pos = 0
|
||||||
|
tq, trapq_name, datasel = name.split(':')
|
||||||
|
ptypes = {}
|
||||||
|
ptypes['velocity'] = {
|
||||||
|
'label': '%s velocity' % (trapq_name,),
|
||||||
|
'units': 'Velocity\n(mm/s)', 'func': self._pull_velocity
|
||||||
|
}
|
||||||
|
ptypes['accel'] = {
|
||||||
|
'label': '%s acceleration' % (trapq_name,),
|
||||||
|
'units': 'Acceleration\n(mm/s^2)', 'func': self._pull_accel
|
||||||
|
}
|
||||||
|
for axis, name in enumerate("xyz"):
|
||||||
|
ptypes['%s' % (name,)] = {
|
||||||
|
'label': '%s %s position' % (trapq_name, name), 'axis': axis,
|
||||||
|
'units': 'Position\n(mm)', 'func': self._pull_axis_position
|
||||||
|
}
|
||||||
|
ptypes['%s_velocity' % (name,)] = {
|
||||||
|
'label': '%s %s velocity' % (trapq_name, name), 'axis': axis,
|
||||||
|
'units': 'Velocity\n(mm/s)', 'func': self._pull_axis_velocity
|
||||||
|
}
|
||||||
|
ptypes['%s_accel' % (name,)] = {
|
||||||
|
'label': '%s %s acceleration' % (trapq_name, name),
|
||||||
|
'axis': axis, 'units': 'Acceleration\n(mm/s^2)',
|
||||||
|
'func': self._pull_axis_accel
|
||||||
|
}
|
||||||
|
pinfo = ptypes.get(datasel)
|
||||||
|
if pinfo is None:
|
||||||
|
raise error("Unknown trapq data selection '%s'" % (datasel,))
|
||||||
|
self.label = {'label': pinfo['label'], 'units': pinfo['units']}
|
||||||
|
self.axis = pinfo.get('axis')
|
||||||
|
self.pull_data = pinfo['func']
|
||||||
|
def get_label(self):
|
||||||
|
return self.label
|
||||||
|
def _find_move(self, req_time):
|
||||||
|
data_pos = self.data_pos
|
||||||
|
while 1:
|
||||||
|
move = self.cur_data[data_pos]
|
||||||
|
print_time, move_t, start_v, accel, start_pos, axes_r = move
|
||||||
|
if req_time <= print_time + move_t:
|
||||||
|
return move, req_time >= print_time
|
||||||
|
data_pos += 1
|
||||||
|
if data_pos < len(self.cur_data):
|
||||||
|
self.data_pos = data_pos
|
||||||
|
continue
|
||||||
|
jmsg = self.jdispatch.pull_msg(req_time, self.name)
|
||||||
|
if jmsg is None:
|
||||||
|
return move, False
|
||||||
|
self.cur_data = jmsg['data']
|
||||||
|
self.data_pos = data_pos = 0
|
||||||
|
def _pull_axis_position(self, req_time):
|
||||||
|
move, in_range = self._find_move(req_time)
|
||||||
|
print_time, move_t, start_v, accel, start_pos, axes_r = move
|
||||||
|
mtime = max(0., min(move_t, req_time - print_time))
|
||||||
|
dist = (start_v + .5 * accel * mtime) * mtime;
|
||||||
|
return start_pos[self.axis] + axes_r[self.axis] * dist
|
||||||
|
def _pull_axis_velocity(self, req_time):
|
||||||
|
move, in_range = self._find_move(req_time)
|
||||||
|
if not in_range:
|
||||||
|
return 0.
|
||||||
|
print_time, move_t, start_v, accel, start_pos, axes_r = move
|
||||||
|
return (start_v + accel * (req_time - print_time)) * axes_r[self.axis]
|
||||||
|
def _pull_axis_accel(self, req_time):
|
||||||
|
move, in_range = self._find_move(req_time)
|
||||||
|
if not in_range:
|
||||||
|
return 0.
|
||||||
|
print_time, move_t, start_v, accel, start_pos, axes_r = move
|
||||||
|
return accel * axes_r[self.axis]
|
||||||
|
def _pull_velocity(self, req_time):
|
||||||
|
move, in_range = self._find_move(req_time)
|
||||||
|
if not in_range:
|
||||||
|
return 0.
|
||||||
|
print_time, move_t, start_v, accel, start_pos, axes_r = move
|
||||||
|
return start_v + accel * (req_time - print_time)
|
||||||
|
def _pull_accel(self, req_time):
|
||||||
|
move, in_range = self._find_move(req_time)
|
||||||
|
if not in_range:
|
||||||
|
return 0.
|
||||||
|
print_time, move_t, start_v, accel, start_pos, axes_r = move
|
||||||
|
return accel
|
||||||
|
LogHandlers["trapq"] = HandleTrapQ
|
||||||
|
|
||||||
|
# Extract positions from queue_step log
|
||||||
|
class HandleStepQ:
|
||||||
|
ParametersSubscriptionId = 2
|
||||||
|
ParametersMin = ParametersMax = 2
|
||||||
|
DataSets = [
|
||||||
|
('stepq:<stepper>', 'Commanded position of the given stepper'),
|
||||||
|
]
|
||||||
|
def __init__(self, lmanager, name):
|
||||||
|
self.name = name
|
||||||
|
self.jdispatch = lmanager.get_jdispatch()
|
||||||
|
self.step_data = [(0., 0.), (0., 0.)] # [(time, pos), ...]
|
||||||
|
self.data_pos = 0
|
||||||
|
def get_label(self):
|
||||||
|
label = '%s position' % (self.name.split(':')[1],)
|
||||||
|
return {'label': label, 'units': 'Position\n(mm)'}
|
||||||
|
def pull_data(self, req_time):
|
||||||
|
while 1:
|
||||||
|
data_pos = self.data_pos
|
||||||
|
step_data = self.step_data
|
||||||
|
next_time, next_pos = step_data[data_pos + 1]
|
||||||
|
if req_time < next_time:
|
||||||
|
return step_data[data_pos][1]
|
||||||
|
if data_pos + 2 < len(step_data):
|
||||||
|
self.data_pos = data_pos + 1
|
||||||
|
continue
|
||||||
|
self._pull_block(req_time)
|
||||||
|
def _pull_block(self, req_time):
|
||||||
|
step_data = self.step_data
|
||||||
|
del step_data[:-1]
|
||||||
|
self.data_pos = 0
|
||||||
|
# Read data block containing requested time frame
|
||||||
|
while 1:
|
||||||
|
jmsg = self.jdispatch.pull_msg(req_time, self.name)
|
||||||
|
if jmsg is None:
|
||||||
|
self.step_data.append((req_time + .1, step_data[0][1]))
|
||||||
|
return
|
||||||
|
last_time = jmsg['last_step_time']
|
||||||
|
if req_time <= last_time:
|
||||||
|
break
|
||||||
|
# Process block into (time, position) 2-tuples
|
||||||
|
first_time = step_time = jmsg['first_step_time']
|
||||||
|
first_clock = jmsg['first_clock']
|
||||||
|
step_clock = first_clock - jmsg['data'][0][0]
|
||||||
|
cdiff = jmsg['last_clock'] - first_clock
|
||||||
|
tdiff = last_time - first_time
|
||||||
|
inv_freq = 0.
|
||||||
|
if cdiff:
|
||||||
|
inv_freq = tdiff / cdiff
|
||||||
|
step_dist = jmsg['step_distance']
|
||||||
|
step_pos = jmsg['start_position']
|
||||||
|
for interval, raw_count, add in jmsg['data']:
|
||||||
|
qs_dist = step_dist
|
||||||
|
count = raw_count
|
||||||
|
if count < 0:
|
||||||
|
qs_dist = -qs_dist
|
||||||
|
count = -count
|
||||||
|
for i in range(count):
|
||||||
|
step_clock += interval
|
||||||
|
interval += add
|
||||||
|
step_time = first_time + (step_clock - first_clock) * inv_freq
|
||||||
|
step_pos += qs_dist
|
||||||
|
step_data.append((step_time, step_pos))
|
||||||
|
LogHandlers["stepq"] = HandleStepQ
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# List datasets
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
def list_datasets():
|
||||||
|
datasets = []
|
||||||
|
for lh in sorted(LogHandlers.keys()):
|
||||||
|
datasets += LogHandlers[lh].DataSets
|
||||||
|
return datasets
|
||||||
|
|
||||||
|
|
||||||
|
######################################################################
|
||||||
|
# Log reading
|
||||||
|
######################################################################
|
||||||
|
|
||||||
|
# Read, uncompress, and parse messages in a log built by data_logger.py
|
||||||
|
class JsonLogReader:
|
||||||
|
def __init__(self, filename):
|
||||||
|
self.file = open(filename, "rb")
|
||||||
|
self.comp = zlib.decompressobj(31)
|
||||||
|
self.msgs = [b""]
|
||||||
|
def seek(self, pos):
|
||||||
|
self.file.seek(pos)
|
||||||
|
self.comp = zlib.decompressobj(-15)
|
||||||
|
def pull_msg(self):
|
||||||
|
msgs = self.msgs
|
||||||
|
while 1:
|
||||||
|
if len(msgs) > 1:
|
||||||
|
msg = msgs.pop(0)
|
||||||
|
try:
|
||||||
|
json_msg = json.loads(msg)
|
||||||
|
except:
|
||||||
|
logging.exception("Unable to parse line")
|
||||||
|
continue
|
||||||
|
return json_msg
|
||||||
|
raw_data = self.file.read(8192)
|
||||||
|
if not raw_data:
|
||||||
|
return None
|
||||||
|
data = self.comp.decompress(raw_data)
|
||||||
|
parts = data.split(b'\x03')
|
||||||
|
parts[0] = msgs[0] + parts[0]
|
||||||
|
self.msgs = msgs = parts
|
||||||
|
|
||||||
|
# Store messages in per-subscription queues until handlers are ready for them
|
||||||
|
class JsonDispatcher:
|
||||||
|
def __init__(self, log_prefix):
|
||||||
|
self.names = {}
|
||||||
|
self.queues = {}
|
||||||
|
self.last_read_time = 0.
|
||||||
|
self.log_reader = JsonLogReader(log_prefix + ".json.gz")
|
||||||
|
self.is_eof = False
|
||||||
|
def check_end_of_data(self):
|
||||||
|
return self.is_eof and not any(self.queues.values())
|
||||||
|
def add_handler(self, name, subscription_id):
|
||||||
|
self.names[name] = q = []
|
||||||
|
self.queues.setdefault(subscription_id, []).append(q)
|
||||||
|
def pull_msg(self, req_time, name):
|
||||||
|
q = self.names[name]
|
||||||
|
while 1:
|
||||||
|
if q:
|
||||||
|
return q.pop(0)
|
||||||
|
if req_time + 1. < self.last_read_time:
|
||||||
|
return None
|
||||||
|
json_msg = self.log_reader.pull_msg()
|
||||||
|
if json_msg is None:
|
||||||
|
self.is_eof = True
|
||||||
|
return None
|
||||||
|
qid = json_msg.get('q')
|
||||||
|
if qid == 'status':
|
||||||
|
pt = json_msg.get('toolhead', {}).get('estimated_print_time')
|
||||||
|
if pt is not None:
|
||||||
|
self.last_read_time = pt
|
||||||
|
for mq in self.queues.get(qid, []):
|
||||||
|
mq.append(json_msg['params'])
|
||||||
|
|
||||||
|
# Main log access management
|
||||||
|
class LogManager:
|
||||||
|
error = error
|
||||||
|
def __init__(self, log_prefix):
|
||||||
|
self.index_reader = JsonLogReader(log_prefix + ".index.gz")
|
||||||
|
self.jdispatch = JsonDispatcher(log_prefix)
|
||||||
|
self.initial_start_time = self.start_time = 0.
|
||||||
|
self.datasets = {}
|
||||||
|
self.initial_status = {}
|
||||||
|
self.log_subscriptions = {}
|
||||||
|
self.status = {}
|
||||||
|
def setup_index(self):
|
||||||
|
fmsg = self.index_reader.pull_msg()
|
||||||
|
self.initial_status = status = fmsg['status']
|
||||||
|
self.status = dict(status)
|
||||||
|
start_time = status['toolhead']['estimated_print_time']
|
||||||
|
self.initial_start_time = self.start_time = start_time
|
||||||
|
self.log_subscriptions = fmsg.get('subscriptions', {})
|
||||||
|
def get_initial_status(self):
|
||||||
|
return self.initial_status
|
||||||
|
def available_dataset_types(self):
|
||||||
|
return {name: None for name in LogHandlers}
|
||||||
|
def get_jdispatch(self):
|
||||||
|
return self.jdispatch
|
||||||
|
def seek_time(self, req_time):
|
||||||
|
self.start_time = req_start_time = self.initial_start_time + req_time
|
||||||
|
seek_time = max(self.initial_start_time, req_start_time - 1.)
|
||||||
|
file_position = 0
|
||||||
|
while 1:
|
||||||
|
fmsg = self.index_reader.pull_msg()
|
||||||
|
if fmsg is None:
|
||||||
|
break
|
||||||
|
th = fmsg['status']['toolhead']
|
||||||
|
ptime = max(th['estimated_print_time'], th.get('print_time', 0.))
|
||||||
|
if ptime > seek_time:
|
||||||
|
break
|
||||||
|
file_position = fmsg['file_position']
|
||||||
|
if file_position:
|
||||||
|
self.jdispatch.log_reader.seek(file_position)
|
||||||
|
def get_initial_start_time(self):
|
||||||
|
return self.initial_start_time
|
||||||
|
def get_start_time(self):
|
||||||
|
return self.start_time
|
||||||
|
def setup_dataset(self, name):
|
||||||
|
if name in self.datasets:
|
||||||
|
return self.datasets[name]
|
||||||
|
parts = name.split(':')
|
||||||
|
cls = LogHandlers.get(parts[0])
|
||||||
|
if cls is None:
|
||||||
|
raise error("Unknown dataset '%s'" % (parts[0],))
|
||||||
|
if len(parts) < cls.ParametersMin or len(parts) > cls.ParametersMax:
|
||||||
|
raise error("Invalid number of parameters for %s" % (parts[0],))
|
||||||
|
subscription_id = ":".join(parts[:cls.ParametersSubscriptionId])
|
||||||
|
if subscription_id not in self.log_subscriptions:
|
||||||
|
raise error("Dataset '%s' not in capture" % (subscription_id,))
|
||||||
|
self.datasets[name] = hdl = cls(self, name)
|
||||||
|
self.jdispatch.add_handler(name, subscription_id)
|
||||||
|
return hdl
|
Loading…
Reference in New Issue