Slight improvement to the metrics class.

Major performance improvement by reducing the number of times we convert to and from Box.
Bumped to version 1.1 to keep in line with our tags in github.
This commit is contained in:
Dan
2021-11-29 17:36:31 -05:00
parent 9b5b99fd7b
commit 1fa88aa73f
3 changed files with 13 additions and 25 deletions

View File

@ -216,7 +216,9 @@ class PythonScriptEngine(object):
lcls.update(kwargs)
globals = copy.copy(self.globals) # else we pollute all later evals.
for x in lcls.keys():
if isinstance(lcls[x], dict):
if isinstance(lcls[x], Box):
pass # Stunning performance improvement with this line.
elif isinstance(lcls[x], dict):
lcls[x] = Box(lcls[x])
globals.update(lcls)
globals.update(external_methods)
@ -239,17 +241,6 @@ class PythonScriptEngine(object):
for key in data.keys():
data[key] = self.convertToBoxSub(data[key])
def convertFromBoxSub(self, data):
if isinstance(data, list):
return [self.convertFromBoxSub(x) for x in data]
if isinstance(data, (dict, Box)):
return {k: self.convertFromBoxSub(v) for k, v in data.items()}
return data
def convertFromBox(self, data):
for k in data.keys():
data[k] = self.convertFromBoxSub(data[k])
def execute(self, task, script, data, external_methods=None):
"""
Execute the script, within the context of the specified task
@ -259,11 +250,7 @@ class PythonScriptEngine(object):
globals = self.globals
self.convertToBox(data)
# data.update({'task':task}) # one of our legacy tests is looking at task.
# this may cause a problem down the road if we
# actually have a variable named 'task'
globals.update(
data) # dict comprehensions cause problems when the variables are not viable.
globals.update(data)
globals.update(external_methods)
try:
exec(script, globals, data)
@ -285,5 +272,4 @@ class PythonScriptEngine(object):
error_line = script.splitlines()[line_number - 1]
raise WorkflowTaskExecException(task, detail, err, line_number,
error_line)
self.convertFromBox(data)

View File

@ -1,16 +1,17 @@
import logging
import time
from SpiffWorkflow import Task
LOG = logging.getLogger(__name__)
threshold = 0.01
def firsttime():
return time.time()
def sincetime(txt,lasttime):
thistime=firsttime()
LOG.info('%2.4f | %s' % (thistime-lasttime, txt))
if thistime - lasttime > threshold:
LOG.info('%2.4f | %s' % (thistime-lasttime, txt))
return thistime
@ -25,7 +26,8 @@ def timeit(f):
task = ""
task_type = ""
for arg in args:
if isinstance(arg, Task):
if arg.__class__.__name__ == 'Task':
task = arg.get_description()
task_type = arg.task_spec.__class__.__name__
if isinstance(arg, str):
@ -33,8 +35,8 @@ def timeit(f):
arguments.append(argument.replace("\n", " "))
else:
arguments.append(arg.__class__.__name__)
LOG.info('| %2.4f | % s | %s | %r | %s ' % (te-ts, task, task_type, f.__name__, " | ".join(arguments)))
if te-ts > threshold:
LOG.info('| %2.4f | % s | %s | %r | %s ' % (te-ts, f.__qualname__, task, task_type, " | ".join(arguments)))
return result
return timed

View File

@ -15,7 +15,7 @@ HERE = pathlib.Path(__file__).parent
README = (HERE / "README.md").read_text()
setup(name='SpiffWorkflow',
version='1.0.0',
version='1.1.0',
description='A workflow framework and BPMN/DMN Processor',
long_description=README,
long_description_content_type="text/markdown",