id
int64 0
458k
| file_name
stringlengths 4
119
| file_path
stringlengths 14
227
| content
stringlengths 24
9.96M
| size
int64 24
9.96M
| language
stringclasses 1
value | extension
stringclasses 14
values | total_lines
int64 1
219k
| avg_line_length
float64 2.52
4.63M
| max_line_length
int64 5
9.91M
| alphanum_fraction
float64 0
1
| repo_name
stringlengths 7
101
| repo_stars
int64 100
139k
| repo_forks
int64 0
26.4k
| repo_open_issues
int64 0
2.27k
| repo_license
stringclasses 12
values | repo_extraction_date
stringclasses 433
values |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1,000 | ThreadStart.py | sartography_SpiffWorkflow/SpiffWorkflow/specs/ThreadStart.py | # Copyright (C) 2007 Samuel Abels
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .base import TaskSpec
from SpiffWorkflow.util.task import TaskState
class ThreadStart(TaskSpec):
"""
This class implements the task the is placed at the beginning
of each thread. It is NOT supposed to be used! It is purely internal,
and used only by the ThreadSplit task.
The task has no inputs and at least one output.
If more than one output is connected, the task does an implicit
parallel split.
"""
def __init__(self, wf_spec, name='ThreadStart', **kwargs):
"""
Constructor. The name of this task is *always* 'ThreadStart'.
:type wf_spec: WorkflowSpec
:param wf_spec: A reference to the workflow specification.
:type kwargs: dict
:param kwargs: See :class:`SpiffWorkflow.specs.TaskSpec`.
"""
TaskSpec.__init__(self, wf_spec, name, **kwargs)
self.internal = True
def _run_hook(self, my_task):
my_task._assign_new_thread_id()
my_task._sync_children(self.outputs, TaskState.READY)
return True
def serialize(self, serializer):
return serializer.serialize_thread_start(self)
@classmethod
def deserialize(self, serializer, wf_spec, s_state):
return serializer.deserialize_thread_start(wf_spec, s_state)
| 2,089 | Python | .py | 48 | 38.916667 | 73 | 0.721457 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,001 | SubWorkflow.py | sartography_SpiffWorkflow/SpiffWorkflow/specs/SubWorkflow.py | # Copyright (C) 2007 Samuel Abels, 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import os
from lxml import etree
from .StartTask import StartTask
from .base import TaskSpec
from ..task import TaskState
from ..exceptions import WorkflowException
from ..operators import valueof
class SubWorkflow(TaskSpec):
"""
A SubWorkflow is a task that wraps a WorkflowSpec, such that you can
re-use it in multiple places as if it were a task.
If more than one input is connected, the task performs an implicit
multi merge.
If more than one output is connected, the task performs an implicit
parallel split.
"""
def __init__(self,
wf_spec,
name,
file,
in_assign=None,
out_assign=None,
**kwargs):
"""
Constructor.
:type wf_spec: WorkflowSpec
:param wf_spec: A reference to the workflow specification.
:type name: str
:param name: The name of the task spec.
:type file: str
:param file: The name of a file containing a workflow.
:type in_assign: list(str)
:param in_assign: The names of data fields to carry over.
:type out_assign: list(str)
:param out_assign: The names of data fields to carry back.
:type kwargs: dict
:param kwargs: See :class:`SpiffWorkflow.specs.TaskSpec`.
"""
super(SubWorkflow, self).__init__(wf_spec, name, **kwargs)
self.file = None
self.in_assign = in_assign is not None and in_assign or []
self.out_assign = out_assign is not None and out_assign or []
if file is not None:
dirname = os.path.dirname(wf_spec.file)
self.file = os.path.join(dirname, file)
def test(self):
TaskSpec.test(self)
if self.file is not None and not os.path.exists(self.file):
raise WorkflowException('File does not exist: %s' % self.file, task_spec=self)
def _predict_hook(self, my_task):
# Modifying the task spec is a TERRIBLE idea, but if we don't do it, sync_children won't work
outputs = [task.task_spec for task in my_task.children]
for output in self.outputs:
if output not in outputs:
outputs.insert(0, output)
if my_task.has_state(TaskState.DEFINITE_MASK):
# This prevents errors with sync children
my_task._sync_children(outputs, TaskState.LIKELY)
else:
my_task._sync_children(outputs, my_task.state)
def _create_subworkflow(self, my_task):
from ..serializer.prettyxml import XmlSerializer
from ..specs.WorkflowSpec import WorkflowSpec
from ..workflow import Workflow
file_name = valueof(my_task, self.file)
serializer = XmlSerializer()
with open(file_name) as fp:
xml = etree.parse(fp).getroot()
wf_spec = WorkflowSpec.deserialize(serializer, xml, filename=file_name)
subworkflow = Workflow(wf_spec)
my_task._sync_children(self.outputs, TaskState.FUTURE)
# I don't necessarily like this, but I can't say I like anything about subproceses work here
for task in subworkflow.task_tree:
my_task.workflow.tasks[task.id] = task
subworkflow.tasks[my_task.id] = my_task
subworkflow.task_tree.parent = my_task
my_task._children.insert(0, subworkflow.task_tree.id)
subworkflow.completed_event.connect(self._on_subworkflow_completed, my_task)
my_task._set_internal_data(subworkflow=subworkflow)
my_task._set_state(TaskState.WAITING)
def _run_hook(self, my_task):
# Assign variables, if so requested.
subworkflow = my_task._get_internal_data('subworkflow')
for child in subworkflow.task_tree.children:
for assignment in self.in_assign:
assignment.assign(my_task, child)
child.task_spec._update(child)
return True
def _update_hook(self, my_task):
super()._update_hook(my_task)
subworkflow = my_task._get_internal_data('subworkflow')
if subworkflow is None:
self._create_subworkflow(my_task)
elif subworkflow.completed:
my_task.complete()
def _on_subworkflow_completed(self, subworkflow, my_task):
# Assign variables, if so requested.
for child in my_task.children:
if not isinstance(child.task_spec, StartTask):
if subworkflow.last_task is not None:
child.data = subworkflow.last_task.data
for assignment in self.out_assign:
assignment.assign(subworkflow, child)
my_task.task_spec._update(my_task)
def serialize(self, serializer):
return serializer.serialize_sub_workflow(self)
@classmethod
def deserialize(self, serializer, wf_spec, s_state):
return serializer.deserialize_sub_workflow(wf_spec, s_state)
| 5,756 | Python | .py | 127 | 37.094488 | 101 | 0.662747 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,002 | Cancel.py | sartography_SpiffWorkflow/SpiffWorkflow/specs/Cancel.py | # Copyright (C) 2007 Samuel Abels
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from ..exceptions import WorkflowException
from .base import TaskSpec
class Cancel(TaskSpec):
"""
This class cancels a complete workflow.
If more than one input is connected, the task performs an implicit
multi merge.
If more than one output is connected, the task performs an implicit
parallel split.
"""
def __init__(self, wf_spec, name, success=False, **kwargs):
"""
Constructor.
:type wf_spec: WorkflowSpec
:param wf_spec: A reference to the workflow specification.
:type name: str
:param name: The name of the task spec.
:type success: bool
:param success: Whether to cancel successfully or unsuccessfully.
:type kwargs: dict
:param kwargs: See :class:`SpiffWorkflow.specs.TaskSpec`.
"""
TaskSpec.__init__(self, wf_spec, name, **kwargs)
self.cancel_successfully = success
def test(self):
"""
Checks whether all required attributes are set. Throws an exception
if an error was detected.
"""
TaskSpec.test(self)
if len(self.outputs) > 0:
raise WorkflowException('Cancel with an output.', task_spec=self)
def _run_hook(self, my_task):
my_task.workflow.cancel(self.cancel_successfully)
return True
def serialize(self, serializer):
return serializer.serialize_cancel(self)
@classmethod
def deserialize(self, serializer, wf_spec, s_state):
return serializer.deserialize_cancel(wf_spec, s_state)
| 2,363 | Python | .py | 58 | 35.293103 | 77 | 0.703268 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,003 | StartTask.py | sartography_SpiffWorkflow/SpiffWorkflow/specs/StartTask.py | # Copyright (C) 2007 Samuel Abels
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from ..exceptions import WorkflowException
from .base import TaskSpec
class StartTask(TaskSpec):
"""
This class implements the task the is placed at the beginning
of each workflow. The task has no inputs and at least one output.
If more than one output is connected, the task does an implicit
parallel split.
"""
def __init__(self, wf_spec, name='Start', **kwargs):
"""
Constructor. The name of this task is *always* 'Start'.
:type wf_spec: WorkflowSpec
:param wf_spec: A reference to the workflow specification.
:type kwargs: dict
:param kwargs: See :class:`SpiffWorkflow.specs.TaskSpec`.
"""
TaskSpec.__init__(self, wf_spec, name, **kwargs)
def _connect_notify(self, task_spec):
"""
Called by the previous task to let us know that it exists.
"""
raise WorkflowException('StartTask can not have any inputs.', task_spec=self)
def test(self):
"""
Checks whether all required attributes are set. Throws an exception
if an error was detected.
"""
if len(self.inputs) != 0:
raise WorkflowException('StartTask with an input.', task_spec=self)
elif len(self.outputs) < 1:
raise WorkflowException('No output task connected.', task_spec=self)
def serialize(self, serializer):
return serializer.serialize_start_task(self)
@classmethod
def deserialize(cls, serializer, wf_spec, s_state):
return serializer.deserialize_start_task(wf_spec, s_state)
| 2,387 | Python | .py | 55 | 38.018182 | 85 | 0.701679 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,004 | Execute.py | sartography_SpiffWorkflow/SpiffWorkflow/specs/Execute.py | # Copyright (C) 2007 Samuel Abels
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import subprocess
from ..task import TaskState
from .base import TaskSpec
class Execute(TaskSpec):
"""
This class executes an external process, goes into WAITING until the
process is complete, and returns the results of the execution.
Usage:
task = Execute(spec, 'Ping', args=["ping", "-t", "1", "127.0.0.1"])
... when workflow complete
print workflow.get_task('Ping').results
"""
def __init__(self, wf_spec, name, args=None, **kwargs):
"""
Constructor.
:type wf_spec: WorkflowSpec
:param wf_spec: A reference to the workflow specification.
:type name: str
:param name: The name of the task spec.
:type args: list
:param args: args to pass to process (first arg is the command).
:type kwargs: dict
:param kwargs: kwargs to pass-through to TaskSpec initializer.
"""
TaskSpec.__init__(self, wf_spec, name, **kwargs)
self.args = args
def _start(self, my_task, force=False):
"""Returns False when successfully fired, True otherwise"""
if (not hasattr(my_task, 'subprocess')) or my_task.subprocess is None:
my_task.subprocess = subprocess.Popen(self.args,
stderr=subprocess.STDOUT,
stdout=subprocess.PIPE)
if my_task.subprocess:
my_task.subprocess.poll()
if my_task.subprocess.returncode is None:
# Still waiting
return False
else:
results = my_task.subprocess.communicate()
my_task.results = results
return True
return False
def _update_hook(self, my_task):
super()._update_hook(my_task)
if not self._start(my_task):
my_task._set_state(TaskState.WAITING)
else:
return True
def serialize(self, serializer):
return serializer.serialize_execute(self)
@classmethod
def deserialize(self, serializer, wf_spec, s_state):
spec = serializer.deserialize_execute(wf_spec, s_state)
return spec
| 2,999 | Python | .py | 72 | 33.569444 | 78 | 0.645383 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,005 | CancelTask.py | sartography_SpiffWorkflow/SpiffWorkflow/specs/CancelTask.py | # Copyright (C) 2007 Samuel Abels
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .Trigger import Trigger
class CancelTask(Trigger):
"""
This class implements a trigger that cancels another task (branch).
If more than one input is connected, the task performs an implicit
multi merge.
If more than one output is connected, the task performs an implicit
parallel split.
"""
def _run_hook(self, my_task):
for spec_name in self.context:
for cancel_task in my_task.workflow.get_tasks(spec_name=spec_name):
cancel_task.cancel()
return True
def serialize(self, serializer):
return serializer.serialize_cancel_task(self)
@classmethod
def deserialize(self, serializer, wf_spec, s_state):
return serializer.deserialize_cancel_task(wf_spec, s_state)
| 1,581 | Python | .py | 37 | 38.72973 | 79 | 0.742355 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,006 | __init__.py | sartography_SpiffWorkflow/SpiffWorkflow/dmn/__init__.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA | 805 | Python | .py | 18 | 43.777778 | 69 | 0.786802 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,007 | __init__.py | sartography_SpiffWorkflow/SpiffWorkflow/dmn/parser/__init__.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .BpmnDmnParser import BpmnDmnParser | 847 | Python | .py | 19 | 43.578947 | 69 | 0.792271 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,008 | DMNParser.py | sartography_SpiffWorkflow/SpiffWorkflow/dmn/parser/DMNParser.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import ast
from SpiffWorkflow.bpmn.parser.node_parser import NodeParser
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
from SpiffWorkflow.bpmn.parser.util import xpath_eval
from SpiffWorkflow.dmn.specs.model import (
Decision,
DecisionTable,
InputEntry,
OutputEntry,
Input,
Output,
Rule,
)
class DMNParser(NodeParser):
"""
Please note this DMN Parser still needs a lot of work. A few key areas
that need to be addressed:
1. it assumes that only one decision table exists within a decision
2. it is not always name space aware (I fixed the top level, but could be
cleaner all the way through.
"""
DT_FORMAT = '%Y-%m-%dT%H:%M:%S'
def __init__(self, p, node, nsmap, svg=None, filename=None):
"""
Constructor.
:param p: the owning BpmnParser instance
:param node: the XML node for the DMN document
:param svg: the SVG representation of this process as a string
(optional)
:param filename: the source BMN filename (optional)
"""
super().__init__(node, nsmap, filename=filename)
self.parser = p
self.node = node
self.decision = None
self.svg = svg
self.filename = filename
def parse(self):
self.decision = self._parse_decision(self.xpath('.//dmn:decision'))
@property
def bpmn_id(self):
"""
Returns the process ID
"""
return self.xpath('dmn:decision[1]')[0].get('id')
def get_name(self):
"""
Returns the process name (or ID, if no name is included in the file)
"""
return self.xpath('dmn:decision[1]')[0].get('name')
def _parse_decision(self, root):
decision_elements = list(root)
if len(decision_elements) == 0:
raise ValidationException('No decisions found', file_name=self.filename,
node=root)
if len(decision_elements) > 1:
raise ValidationException('Multiple decision tables are not current supported.',
file_name=self.filename, node=root)
decision_element = decision_elements[0]
decision = Decision(decision_element.attrib['id'],
decision_element.attrib.get('name', ''))
# Parse decision tables
self._parse_decision_tables(decision, decision_element)
return decision
def _parse_decision_tables(self, decision, decisionElement):
for decision_table_element in decisionElement.findall('dmn:decisionTable', namespaces=self.nsmap):
name = decision_table_element.attrib.get('name', '')
hitPolicy = decision_table_element.attrib.get('hitPolicy', 'UNIQUE').upper()
decision_table = DecisionTable(decision_table_element.attrib['id'],
name, hitPolicy)
decision.decisionTables.append(decision_table)
# parse inputs
self._parse_inputs_outputs(decision_table, decision_table_element)
def _parse_inputs_outputs(self, decisionTable,
decisionTableElement):
rule_counter = 0
for element in decisionTableElement:
if element.tag.endswith('input'):
e_input = self._parse_input(element)
decisionTable.inputs.append(e_input)
elif element.tag.endswith('output'):
output = self._parse_output(element)
decisionTable.outputs.append(output)
elif element.tag.endswith('rule'):
rule_counter += 1
rule = self._parse_rule(decisionTable, element, rule_counter)
decisionTable.rules.append(rule)
else:
raise ValidationException(
'Unknown type in decision table: %r' % element.tag,
node=element, file_name=self.filename)
def _parse_input(self, input_element):
type_ref = None
xpath = xpath_eval(input_element, self.nsmap)
expression = None
for input_expression in xpath('dmn:inputExpression'):
type_ref = input_expression.attrib.get('typeRef', '')
expression_node = input_expression.find('dmn:text', namespaces=self.nsmap)
if expression_node is not None:
expression = expression_node.text
return Input(input_element.attrib['id'],
input_element.attrib.get('label', ''),
input_element.attrib.get('name', ''),
expression,
type_ref)
def _parse_output(self, outputElement):
output = Output(outputElement.attrib['id'],
outputElement.attrib.get('label', ''),
outputElement.attrib.get('name', ''),
outputElement.attrib.get('typeRef', ''))
return output
def _parse_rule(self, decisionTable, ruleElement, rowNumber):
rule = Rule(ruleElement.attrib['id'])
rule.row_number = rowNumber
input_idx = 0
output_idx = 0
for child in ruleElement:
# Load description
if child.tag.endswith('description'):
rule.description = child.text
# Load input entries
elif child.tag.endswith('inputEntry'):
input_entry = self._parse_input_output_element(decisionTable,
child,
InputEntry,
input_idx)
rule.inputEntries.append(input_entry)
input_idx += 1
# Load output entries
elif child.tag.endswith('outputEntry'):
output_entry = self._parse_input_output_element(decisionTable,
child,
OutputEntry,
output_idx)
rule.outputEntries.append(output_entry)
output_idx += 1
return rule
def _parse_input_output_element(self, decision_table, element, cls, idx):
input_or_output = (decision_table.inputs if cls == InputEntry else decision_table.outputs)[idx]
entry = cls(element.attrib['id'], input_or_output)
for child in element:
if child.tag.endswith('description'):
entry.description = child.text
elif child.tag.endswith('text'):
entry.text = child.text
if cls == InputEntry:
# DMN renders 'no input specification' with '-'; assume this is intended if somebody has added '-'
entry.lhs.append(entry.text if entry.text != '-' else None)
elif cls == OutputEntry:
if entry.text and entry.text != '':
try:
ast.parse(entry.text)
except Exception as e:
raise ValidationException(
"Malformed Output Expression '%s'. %s " % (entry.text, str(e)),
node=element, file_name=self.filename)
return entry
| 8,193 | Python | .py | 175 | 34.154286 | 110 | 0.587131 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,009 | BpmnDmnParser.py | sartography_SpiffWorkflow/SpiffWorkflow/dmn/parser/BpmnDmnParser.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import glob
import os
from lxml import etree
from ...bpmn.parser.util import full_tag
from ...bpmn.parser.ValidationException import ValidationException
from ...bpmn.parser.BpmnParser import BpmnParser, BpmnValidator
from ...dmn.parser.DMNParser import DMNParser
from ..engine.DMNEngine import DMNEngine
XSD_DIR = os.path.join(os.path.dirname(__file__), 'schema')
SCHEMAS = {
'http://www.omg.org/spec/DMN/20151101/dmn.xsd': os.path.join(XSD_DIR, 'DMN.xsd'),
'http://www.omg.org/spec/DMN/20180521/MODEL/': os.path.join(XSD_DIR, 'DMN12.xsd'),
'https://www.omg.org/spec/DMN/20191111/MODEL/': os.path.join(XSD_DIR, 'DMN13.xsd'),
}
class BpmnDmnParser(BpmnParser):
def __init__(self, namespaces=None, validator=None, dmn_schemas=None):
super().__init__(namespaces, validator)
self.dmn_schemas = dmn_schemas or SCHEMAS
self.dmn_parsers = {}
self.dmn_parsers_by_name = {}
self.dmn_dependencies = set()
def get_engine(self, decision_ref, node):
if decision_ref not in self.dmn_parsers:
options = ', '.join(list(self.dmn_parsers.keys()))
raise ValidationException(
'No DMN Diagram available with id "%s", Available DMN ids are: %s' %(decision_ref, options),
node=node, file_name='')
dmn_parser = self.dmn_parsers[decision_ref]
dmn_parser.parse()
decision = dmn_parser.decision
return DMNEngine(decision.decisionTables[0])
def add_dmn_xml(self, node, filename=None):
"""
Add the given lxml representation of the DMN file to the parser's set.
"""
namespaces = self.namespaces.copy()
namespaces.update(node.nsmap)
if None in namespaces:
namespaces['dmn'] = namespaces.pop(None)
# We have to create a dmn validator on the fly, because we support multiple versions
# If we have a bpmn validator, assume DMN validation should be done as well.
# I don't like this, but I don't see a better solution.
schema = self.dmn_schemas.get(namespaces.get('dmn'))
if self.validator and schema is not None:
validator = BpmnValidator(schema)
validator.validate(node, filename)
dmn_parser = DMNParser(self, node, namespaces, filename=filename)
self.dmn_parsers[dmn_parser.bpmn_id] = dmn_parser
self.dmn_parsers_by_name[dmn_parser.get_name()] = dmn_parser
def add_dmn_file(self, filename):
"""
Add the given DMN filename to the parser's set.
"""
self.add_dmn_files([filename])
def add_dmn_files_by_glob(self, g):
"""
Add all filenames matching the provided pattern (e.g. *.bpmn) to the
parser's set.
"""
self.add_dmn_files(glob.glob(g))
def add_dmn_files(self, filenames):
"""
Add all filenames in the given list to the parser's set.
"""
for filename in filenames:
with open(filename, 'r') as f:
self.add_dmn_io(f, filename=filename)
def add_dmn_io(self, file_like_object, filename=None):
"""
Add the given DMN file like object to the parser's set.
"""
self.add_dmn_xml(etree.parse(file_like_object).getroot(), filename)
def add_dmn_str(self, dmn_str, filename=None):
"""
Add the given DMN string to the parser's set.
"""
self.add_dmn_xml(etree.fromstring(dmn_str), filename)
def get_dependencies(self):
return self.process_dependencies.union(self.dmn_dependencies)
def get_dmn_dependencies(self):
return self.dmn_dependencies
def _find_dependencies(self, process):
super()._find_dependencies(process)
parser_cls, cls = self._get_parser_class(full_tag('businessRuleTask'))
for business_rule in process.xpath('.//bpmn:businessRuleTask', namespaces=self.namespaces):
self.dmn_dependencies.add(parser_cls.get_decision_ref(business_rule))
| 4,818 | Python | .py | 104 | 39.480769 | 108 | 0.674265 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,010 | DMNEngine.py | sartography_SpiffWorkflow/SpiffWorkflow/dmn/engine/DMNEngine.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import logging
import re
from SpiffWorkflow.exceptions import SpiffWorkflowException
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskException
from ..specs.model import HitPolicy
logger = logging.getLogger('spiff.dmn')
class DMNEngine:
"""
Handles the processing of a decision table.
"""
def __init__(self, decision_table):
self.decision_table = decision_table
def decide(self, task):
rules = []
for rule in self.decision_table.rules:
if self.__check_rule(rule, task):
rules.append(rule)
if self.decision_table.hit_policy == HitPolicy.UNIQUE.value:
return rules
return rules
def result(self, task):
"""Returns the results of running this decision table against
a given task."""
result = {}
matched_rules = self.decide(task)
if self.decision_table.hit_policy == HitPolicy.COLLECT.value:
# each output will be an array of values, all outputs will
# be placed in a dict, which we will then merge.
for rule in matched_rules:
rule_output = rule.output_as_dict(task)
for key in rule_output.keys():
if key not in result:
result[key] = []
result[key].append(rule_output[key])
elif len(matched_rules) > 0:
result = matched_rules[0].output_as_dict(task)
return result
def __check_rule(self, rule, task):
for input_entry in rule.inputEntries:
for lhs in input_entry.lhs:
if lhs is not None:
input_val = DMNEngine.__get_input_val(input_entry, task.data)
else:
input_val = None
try:
if not self.evaluate(input_val, lhs, task):
return False
except SpiffWorkflowException as se:
se.add_note(f"Rule failed on row {rule.row_number}")
raise se
except Exception as e:
error = WorkflowTaskException(str(e), task=task, exception=e)
error.add_note(f"Failed to execute DMN Rule on row {rule.row_number}")
raise error
else:
# Empty means ignore decision value
continue # Check the other operators/columns
return True
def needs_eq(self, script_engine, text):
try:
# this should work if we can just do a straight equality
script_engine.validate(text)
return True
except SyntaxError:
# if we have problems parsing, then we introduce a variable on the left hand side
# and try that and see if that parses. If so, then we know that we do not need to
# introduce an equality operator later in the dmn
script_engine.validate(f'v {text}')
return False
def evaluate(self, input_expr, match_expr, task):
"""
Here we need to handle a few things such as if it is an equality or if
the equality has already been taken care of. For now, we just assume
it is equality.
An optional task can be included if this is being executed in the
context of a BPMN task.
"""
if match_expr is None:
return True
script_engine = task.workflow.script_engine
# NB - the question mark allows us to do a double ended test - for
# example - our input expr is 5 and the match expr is 4 < ? < 6 -
# this should evaluate as 4 < 5 < 6 and it should evaluate as 'True'
# NOTE: It should only do this replacement outside of quotes.
# for example, provided "This thing?" in quotes, it should not
# do the replacement.
match_expr = re.sub(r'(\?)(?=(?:[^\'"]|[\'"][^\'"]*[\'"])*$)', 'dmninputexpr', match_expr)
if 'dmninputexpr' in match_expr:
external_context = {
'dmninputexpr': script_engine.evaluate(task, input_expr)
}
return script_engine.evaluate(task, match_expr, external_context=external_context)
# The input expression just has to be something that can be parsed as is by the engine.
script_engine.validate(input_expr)
# If we get here, we need to check whether the match expression includes
# an operator or if can use '=='
needs_eq = self.needs_eq(script_engine, match_expr)
# Disambiguate cases like a == 0 == True when we add '=='
expr = f'({input_expr}) == ({match_expr})' if needs_eq else input_expr + match_expr
return script_engine.evaluate(task, expr)
@staticmethod
def __get_input_val(input_entry, context):
"""
The input of the decision method should be an expression, but will
fallback to the likely very bad idea of trying to use the label.
:param inputEntry:
:param context: # A dictionary that provides some context/local vars.
:return:
"""
if input_entry.input.expression:
return input_entry.input.expression
else:
# Backwards compatibility
return "%r" % context[input_entry.input.label]
| 6,160 | Python | .py | 132 | 36.621212 | 98 | 0.620403 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,011 | __init__.py | sartography_SpiffWorkflow/SpiffWorkflow/dmn/engine/__init__.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA | 805 | Python | .py | 18 | 43.777778 | 69 | 0.786802 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,012 | business_rule_task_mixin.py | sartography_SpiffWorkflow/SpiffWorkflow/dmn/specs/business_rule_task_mixin.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.exceptions import SpiffWorkflowException
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskException
from SpiffWorkflow.specs.base import TaskSpec
from SpiffWorkflow.util.deep_merge import DeepMerge
class BusinessRuleTaskMixin(TaskSpec):
"""Task Spec for a bpmn:businessTask (DMB Decision Reference) node."""
def __init__(self, wf_spec, name, dmnEngine, **kwargs):
super().__init__(wf_spec, name, **kwargs)
self.dmnEngine = dmnEngine
self.resDict = None
def _run_hook(self, my_task):
try:
my_task.data = DeepMerge.merge(my_task.data, self.dmnEngine.result(my_task))
super(BusinessRuleTaskMixin, self)._run_hook(my_task)
except SpiffWorkflowException as we:
we.add_note(f"Business Rule Task '{my_task.task_spec.bpmn_name}'.")
raise we
except Exception as e:
error = WorkflowTaskException(str(e), task=my_task)
error.add_note(f"Business Rule Task '{my_task.task_spec.bpmn_name}'.")
raise error
return True
| 1,890 | Python | .py | 40 | 42.325 | 88 | 0.727913 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,013 | __init__.py | sartography_SpiffWorkflow/SpiffWorkflow/dmn/specs/__init__.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .business_rule_task_mixin import BusinessRuleTaskMixin | 865 | Python | .py | 19 | 44.578947 | 69 | 0.793388 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,014 | model.py | sartography_SpiffWorkflow/SpiffWorkflow/dmn/specs/model.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from collections import OrderedDict
from enum import Enum
from ...util.deep_merge import DeepMerge
class HitPolicy(Enum):
UNIQUE = "UNIQUE"
COLLECT = "COLLECT"
# ANY = "ANY"
# PRIORITY = "PRIORITY"
# FIRST = "FIRST"
# OUTPUT_ORDER = "OUTPUT ORDER"
# RULE_ORDER = "RULE ORDER"
# class Aggregation(Enum):
# SUM = "SUM"
# COUNT = "COUNT"
# MIN = "MIN"
# MAX = "MAX"
class Decision:
def __init__(self, id, name):
self.id = id
self.name = name
self.decisionTables = []
class DecisionTable:
def __init__(self, id, name, hit_policy):
self.id = id
self.name = name
self.hit_policy = hit_policy
self.inputs = []
self.outputs = []
self.rules = []
class Input:
def __init__(self, id, label, name, expression, typeRef):
self.id = id
self.label = label
self.name = name
self.expression = expression
self.typeRef = typeRef
class InputEntry:
def __init__(self, id, input):
self.id = id
self.input = input
self.description = ''
self.lhs = []
class Output:
def __init__(self, id, label, name, typeRef):
self.id = id
self.label = label
self.name = name
self.typeRef = typeRef
class OutputEntry:
def __init__(self, id, output):
self.id = id
self.output = output
self.description = ''
self.text = ''
class Rule:
def __init__(self, id):
self.id = id
self.row_number = 0
self.description = ''
self.inputEntries = []
self.outputEntries = []
def output_as_dict(self, task):
script_engine = task.workflow.script_engine
out = OrderedDict()
for outputEntry in self.outputEntries:
# try to use the id, but fall back to label if no name is provided.
key = outputEntry.output.name or outputEntry.output.label
if hasattr(outputEntry, "text") and outputEntry.text:
outvalue = script_engine.evaluate(task, outputEntry.text)
else:
outvalue = ""
if '.' in key: # we need to allow for dot notation in the DMN -
# I would use box to do this, but they didn't have a feature to build
# a dict based on a dot notation withoug eval
# so we build up a dictionary structure based on the key, and let the parent
# do a deep merge
currentout = {}
subkeylist = list(reversed(key.split('.')))
for subkey in subkeylist[:-1]:
currentout[subkey] = outvalue
outvalue = currentout
currentout = {}
basekey = subkeylist[-1]
out[basekey] = DeepMerge.merge(out.get(basekey,{}),outvalue)
else:
out[key] = outvalue
return out
| 3,861 | Python | .py | 105 | 28.352381 | 111 | 0.596035 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,015 | __init__.py | sartography_SpiffWorkflow/SpiffWorkflow/dmn/serializer/__init__.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .task_spec import BaseBusinessRuleTaskConverter | 859 | Python | .py | 19 | 44.210526 | 69 | 0.794048 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,016 | task_spec.py | sartography_SpiffWorkflow/SpiffWorkflow/dmn/serializer/task_spec.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from ...bpmn.serializer.helpers.spec import TaskSpecConverter
from ..specs.model import DecisionTable, Rule, HitPolicy
from ..specs.model import Input, InputEntry, Output, OutputEntry
from ..engine.DMNEngine import DMNEngine
class BaseBusinessRuleTaskConverter(TaskSpecConverter):
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
# We only ever use one decision table
dct['decision_table'] = self.decision_table_to_dict(spec.dmnEngine.decision_table)
return dct
def decision_table_to_dict(self, table):
return {
'id': table.id,
'name': table.name,
'hit_policy': table.hit_policy,
'inputs': [val.__dict__ for val in table.inputs],
'outputs': [val.__dict__ for val in table.outputs],
'rules': [self.rule_to_dict(rule) for rule in table.rules],
}
def input_entry_to_dict(self, entry):
return {
'id': entry.id,
'input_id': entry.input.id,
'description': entry.description,
'lhs': entry.lhs,
}
def output_entry_to_dict(self, entry):
dct = {
'id': entry.id,
'output_id': entry.output.id,
'description': entry.description,
'text': entry.text,
}
return dct
def rule_to_dict(self, rule):
return {
'id': rule.id,
'row_number': rule.row_number,
'description': rule.description,
'input_entries': [self.input_entry_to_dict(entry) for entry in rule.inputEntries],
'output_entries': [self.output_entry_to_dict(entry) for entry in rule.outputEntries],
}
def from_dict(self, dct):
table = self.decision_table_from_dict(dct.pop('decision_table'))
dct['dmnEngine'] = DMNEngine(table)
return self.task_spec_from_dict(dct)
def decision_table_from_dict(self, dct):
hit_policy = dct.get('hit_policy', HitPolicy.UNIQUE.value)
table = DecisionTable(dct['id'], dct['name'], hit_policy)
table.inputs = [ Input(**val) for val in dct['inputs'] ]
table.outputs = [ Output(**val) for val in dct['outputs'] ]
table.rules = [ self.rule_from_dict(rule, table.inputs, table.outputs)
for rule in dct['rules'] ]
return table
def input_entry_from_dict(self, dct, inputs):
input_id = dct.pop('input_id')
my_input = None
for i in inputs:
if i.id == input_id:
my_input = i
entry = InputEntry(dct['id'], my_input)
entry.description = dct['description']
entry.lhs = dct['lhs']
return entry
def output_entry_from_dict(self, dct, outputs):
output_id = dct['output_id']
my_output = None
for i in outputs:
if i.id == output_id:
my_output = i
entry = OutputEntry(dct['id'], my_output)
entry.description = dct['description']
entry.text = dct['text']
return entry
def rule_from_dict(self, dct, inputs, outputs):
rule = Rule(dct['id'])
rule.description = dct['description']
rule.row_number = dct.get('row_number', 0)
rule.inputEntries = [self.input_entry_from_dict(entry, inputs)
for entry in dct['input_entries']]
rule.outputEntries = [self.output_entry_from_dict(entry, outputs)
for entry in dct['output_entries']]
return rule
| 4,356 | Python | .py | 101 | 34.643564 | 97 | 0.623762 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,017 | __init__.py | sartography_SpiffWorkflow/SpiffWorkflow/spiff/__init__.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA | 805 | Python | .py | 18 | 43.777778 | 69 | 0.786802 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,018 | process.py | sartography_SpiffWorkflow/SpiffWorkflow/spiff/parser/process.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import os
from SpiffWorkflow.bpmn.parser.ProcessParser import ProcessParser
from SpiffWorkflow.dmn.parser import BpmnDmnParser
from SpiffWorkflow.bpmn.parser.BpmnParser import BpmnValidator, full_tag
from SpiffWorkflow.spiff.specs.data_object import DataObject
from SpiffWorkflow.bpmn.specs.defaults import (
StartEvent,
EndEvent,
IntermediateCatchEvent,
IntermediateThrowEvent,
BoundaryEvent,
)
from SpiffWorkflow.spiff.specs.defaults import (
UserTask,
ManualTask,
NoneTask,
ScriptTask,
SendTask,
ReceiveTask,
BusinessRuleTask,
SubWorkflowTask,
CallActivity,
TransactionSubprocess,
ServiceTask
)
from SpiffWorkflow.spiff.parser.task_spec import (
SpiffTaskParser,
SubWorkflowParser,
CallActivityParser,
ServiceTaskParser,
ScriptTaskParser,
BusinessRuleTaskParser
)
from SpiffWorkflow.spiff.parser.event_parsers import (
SpiffStartEventParser,
SpiffEndEventParser,
SpiffBoundaryEventParser,
SpiffIntermediateCatchEventParser,
SpiffIntermediateThrowEventParser,
SpiffSendTaskParser,
SpiffReceiveTaskParser
)
SPIFF_XSD = os.path.join(os.path.dirname(__file__), 'schema', 'spiffworkflow.xsd')
VALIDATOR = BpmnValidator(imports={'spiffworkflow': SPIFF_XSD})
class SpiffProcessParser(ProcessParser):
def parse_data_object(self, obj):
extensions = SpiffTaskParser._parse_extensions(obj)
category = extensions.get('category')
return DataObject(category, obj.get('id'), obj.get('name'))
class SpiffBpmnParser(BpmnDmnParser):
PROCESS_PARSER_CLASS = SpiffProcessParser
OVERRIDE_PARSER_CLASSES = {
full_tag('task'): (SpiffTaskParser, NoneTask),
full_tag('userTask'): (SpiffTaskParser, UserTask),
full_tag('manualTask'): (SpiffTaskParser, ManualTask),
full_tag('scriptTask'): (ScriptTaskParser, ScriptTask),
full_tag('subProcess'): (SubWorkflowParser, SubWorkflowTask),
full_tag('transaction'): (SubWorkflowParser, TransactionSubprocess),
full_tag('callActivity'): (CallActivityParser, CallActivity),
full_tag('serviceTask'): (ServiceTaskParser, ServiceTask),
full_tag('startEvent'): (SpiffStartEventParser, StartEvent),
full_tag('endEvent'): (SpiffEndEventParser, EndEvent),
full_tag('boundaryEvent'): (SpiffBoundaryEventParser, BoundaryEvent),
full_tag('intermediateCatchEvent'): (SpiffIntermediateCatchEventParser, IntermediateCatchEvent),
full_tag('intermediateThrowEvent'): (SpiffIntermediateThrowEventParser, IntermediateThrowEvent),
full_tag('sendTask'): (SpiffSendTaskParser, SendTask),
full_tag('receiveTask'): (SpiffReceiveTaskParser, ReceiveTask),
full_tag('businessRuleTask'): (BusinessRuleTaskParser, BusinessRuleTask)
}
| 3,624 | Python | .py | 87 | 37.264368 | 104 | 0.764606 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,019 | event_parsers.py | sartography_SpiffWorkflow/SpiffWorkflow/spiff/parser/event_parsers.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import warnings
from SpiffWorkflow.bpmn.parser.event_parsers import EventDefinitionParser, ReceiveTaskParser
from SpiffWorkflow.bpmn.parser.event_parsers import (
StartEventParser,
EndEventParser,
IntermediateCatchEventParser,
IntermediateThrowEventParser,
BoundaryEventParser,
SendTaskParser,
)
from SpiffWorkflow.spiff.specs.event_definitions import (
MessageEventDefinition,
SignalEventDefinition,
ErrorEventDefinition,
EscalationEventDefinition,
)
from SpiffWorkflow.bpmn.parser.util import one, first
from SpiffWorkflow.bpmn.specs.event_definitions.message import CorrelationProperty
from SpiffWorkflow.spiff.parser.task_spec import SpiffTaskParser, SPIFFWORKFLOW_NSMAP
class SpiffEventDefinitionParser(SpiffTaskParser, EventDefinitionParser):
def parse_message_extensions(self, node):
expression = first(node.xpath('.//spiffworkflow:messagePayload', namespaces=SPIFFWORKFLOW_NSMAP))
variable = first(node.xpath('.//spiffworkflow:messageVariable', namespaces=SPIFFWORKFLOW_NSMAP))
if expression is not None:
expression = expression.text
if variable is not None:
variable = variable.text
return expression, variable
def parse_process_correlations(self, node):
correlations = []
for prop in node.xpath('.//spiffworkflow:processVariableCorrelation', namespaces=SPIFFWORKFLOW_NSMAP):
key = one(prop.xpath('./spiffworkflow:propertyId', namespaces=SPIFFWORKFLOW_NSMAP))
expression = one(prop.xpath('./spiffworkflow:expression', namespaces=SPIFFWORKFLOW_NSMAP))
correlations.append(CorrelationProperty(key.text, expression.text, []))
return correlations
def parse_message_event(self, message_event):
"""Parse a Spiff message event."""
message_ref = message_event.get('messageRef')
if message_ref:
try:
message = one(self.doc_xpath('.//bpmn:message[@id="%s"]' % message_ref))
except Exception:
self.raise_validation_exception('Expected a Message node', node=message_event)
name = message.get('name')
expression, variable = self.parse_message_extensions(message)
if expression is not None or variable is not None:
warnings.warn(
'spiffworkflow:messagePayload and spiffworkflow:messageVariable have been moved to the bpmn:messageDefinition element',
DeprecationWarning,
stacklevel=2,
)
else:
expression, variable = self.parse_message_extensions(message_event)
correlations = self.get_message_correlations(message_ref)
process_correlations = self.parse_process_correlations(message_event)
event_def = MessageEventDefinition(name, correlations, expression, variable, process_correlations)
else:
name = message_event.getparent().get('name')
event_def = MessageEventDefinition(name)
return event_def
def parse_signal_event(self, signal_event):
"""Parse a Spiff signal event"""
signal_ref = signal_event.get('signalRef')
if signal_ref is not None:
try:
signal = one(self.doc_xpath(f'.//bpmn:signal[@id="{signal_ref}"]'))
except Exception:
self.raise_validation_exception('Expected a Signal node', node=signal_event)
name = signal.get('name')
extensions = self.parse_extensions(signal)
expression = extensions.get('payloadExpression')
variable = extensions.get('variableName')
else:
name = signal_event.getparent().get('name')
expression, variable = None, None
return SignalEventDefinition(name, expression=expression, variable=variable)
def parse_error_event(self, error_event):
"""Parse a Spiff error event"""
error_ref = error_event.get('errorRef')
if error_ref is not None:
try:
error = one(self.doc_xpath(f'.//bpmn:error[@id="{error_ref}"]'))
except Exception:
self.raise_validation_exception('Expected an Error node', node=error_event)
name = error.get('name')
code = error.get('errorCode')
extensions = self.parse_extensions(error)
expression = extensions.get('payloadExpression')
variable = extensions.get('variableName')
else:
name = error_event.getparent().get('name')
code, expression, variable = None, None, None
return ErrorEventDefinition(name, expression=expression, variable=variable, code=code)
def parse_escalation_event(self, escalation_event):
"""Parse a Spiff error event"""
escalation_ref = escalation_event.get('escalationRef')
if escalation_ref is not None:
try:
escalation = one(self.doc_xpath(f'.//bpmn:escalation[@id="{escalation_ref}"]'))
except Exception:
self.raise_validation_exception('Expected an Escalation node', node=escalation_event)
name = escalation.get('name')
code = escalation.get('escalationCode')
extensions = self.parse_extensions(escalation)
expression = extensions.get('payloadExpression')
variable = extensions.get('variableName')
else:
name = escalation_event.getparent().get('name')
code, expression, variable = None, None, None
return EscalationEventDefinition(name, expression=expression, variable=variable, code=code)
class SpiffStartEventParser(SpiffEventDefinitionParser, StartEventParser):
def create_task(self):
return StartEventParser.create_task(self)
class SpiffEndEventParser(SpiffEventDefinitionParser, EndEventParser):
def create_task(self):
return EndEventParser.create_task(self)
class SpiffIntermediateCatchEventParser(SpiffEventDefinitionParser, IntermediateCatchEventParser):
def create_task(self):
return IntermediateCatchEventParser.create_task(self)
class SpiffIntermediateThrowEventParser(SpiffEventDefinitionParser, IntermediateThrowEventParser):
def create_task(self):
return IntermediateThrowEventParser.create_task(self)
class SpiffBoundaryEventParser(SpiffEventDefinitionParser, BoundaryEventParser):
def create_task(self):
return BoundaryEventParser.create_task(self)
class SpiffSendTaskParser(SpiffEventDefinitionParser, SendTaskParser):
def create_task(self):
task = SendTaskParser.create_task(self)
# Duplicate this code from the SpiffTask parser because of parser limitations
extensions = self.parse_extensions()
task.prescript = extensions.get('preScript')
task.postscript = extensions.get('postScript')
return task
class SpiffReceiveTaskParser(SpiffEventDefinitionParser, ReceiveTaskParser):
def create_task(self):
task = ReceiveTaskParser.create_task(self)
# I have given up trying to do any of this right
extensions = self.parse_extensions()
task.prescript = extensions.get('preScript')
task.postscript = extensions.get('postScript')
return task | 8,138 | Python | .py | 159 | 42.566038 | 139 | 0.699121 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,020 | __init__.py | sartography_SpiffWorkflow/SpiffWorkflow/spiff/parser/__init__.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .process import SpiffBpmnParser, VALIDATOR | 854 | Python | .py | 19 | 43.947368 | 69 | 0.791617 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,021 | task_spec.py | sartography_SpiffWorkflow/SpiffWorkflow/spiff/parser/task_spec.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from lxml import etree
from SpiffWorkflow.bpmn.parser.TaskParser import TaskParser
from SpiffWorkflow.bpmn.parser.task_parsers import SubprocessParser
from SpiffWorkflow.bpmn.parser.util import xpath_eval
from SpiffWorkflow.spiff.specs.defaults import (
StandardLoopTask,
ParallelMultiInstanceTask,
SequentialMultiInstanceTask,
BusinessRuleTask
)
SPIFFWORKFLOW_NSMAP = {'spiffworkflow': 'http://spiffworkflow.org/bpmn/schema/1.0/core'}
class SpiffTaskParser(TaskParser):
STANDARD_LOOP_CLASS = StandardLoopTask
PARALLEL_MI_CLASS = ParallelMultiInstanceTask
SEQUENTIAL_MI_CLASS = SequentialMultiInstanceTask
def parse_extensions(self, node=None):
if node is None:
node = self.node
return SpiffTaskParser._parse_extensions(node)
@staticmethod
def _parse_extensions(node):
# Too bad doing this works in such a stupid way.
# We should set a namespace and automatically do this.
extensions = {}
xpath = xpath_eval(node, SPIFFWORKFLOW_NSMAP)
extension_nodes = xpath(f'./bpmn:extensionElements/spiffworkflow:*')
for node in extension_nodes:
name = etree.QName(node).localname
if name == 'properties':
extensions['properties'] = SpiffTaskParser._parse_properties(node)
elif name == 'unitTests':
extensions['unitTests'] = SpiffTaskParser._parse_script_unit_tests(node)
elif name == 'serviceTaskOperator':
extensions['serviceTaskOperator'] = SpiffTaskParser._parse_servicetask_operator(node)
else:
extensions[name] = node.text
return extensions
@classmethod
def _node_children_by_tag_name(cls, node, tag_name):
xpath = cls._spiffworkflow_ready_xpath_for_node(node)
return xpath(f'.//spiffworkflow:{tag_name}')
@classmethod
def _parse_properties(cls, node):
property_nodes = cls._node_children_by_tag_name(node, 'property')
properties = {}
for prop_node in property_nodes:
properties[prop_node.attrib['name']] = prop_node.attrib['value']
return properties
@staticmethod
def _spiffworkflow_ready_xpath_for_node(node):
return xpath_eval(node, SPIFFWORKFLOW_NSMAP)
@classmethod
def _parse_script_unit_tests(cls, node):
unit_test_nodes = cls._node_children_by_tag_name(node, 'unitTest')
unit_tests = []
for unit_test_node in unit_test_nodes:
unit_test_dict = {"id": unit_test_node.attrib['id']}
unit_test_dict['inputJson'] = cls._node_children_by_tag_name(unit_test_node, 'inputJson')[0].text
unit_test_dict['expectedOutputJson'] = cls._node_children_by_tag_name(unit_test_node, 'expectedOutputJson')[0].text
unit_tests.append(unit_test_dict)
return unit_tests
@classmethod
def _parse_servicetask_operator(cls, node):
name = node.attrib['id']
result_variable = node.get('resultVariable', None)
parameter_nodes = cls._node_children_by_tag_name(node, 'parameter')
operator = {'name': name, 'resultVariable': result_variable}
parameters = {}
for param_node in parameter_nodes:
if 'value' in param_node.attrib:
parameters[param_node.attrib['id']] = {
'value': param_node.attrib['value'],
'type': param_node.attrib['type']
}
operator['parameters'] = parameters
return operator
def _copy_task_attrs(self, original, loop_characteristics):
# I am so disappointed I have to do this.
super()._copy_task_attrs(original)
if loop_characteristics.xpath('@spiffworkflow:scriptsOnInstances', namespaces=SPIFFWORKFLOW_NSMAP) != ['true']:
self.task.prescript = original.prescript
self.task.postscript = original.postscript
original.prescript = None
original.postscript = None
def create_task(self):
# The main task parser already calls this, and even sets an attribute, but
# 1. It calls it after creating the task so I don't have access to it here yet and
# 2. I want defined attributes, not a dict of random crap
# (though the dict of random crap will still be there since the base parser adds it).
extensions = self.parse_extensions()
prescript = extensions.get('preScript')
postscript = extensions.get('postScript')
return self.spec_class(self.spec, self.bpmn_id, prescript=prescript, postscript=postscript, **self.bpmn_attributes)
class SubWorkflowParser(SpiffTaskParser):
def create_task(self):
extensions = self.parse_extensions()
prescript = extensions.get('preScript')
postscript = extensions.get('postScript')
subworkflow_spec = SubprocessParser.get_subprocess_spec(self)
return self.spec_class(
self.spec,
self.bpmn_id,
subworkflow_spec=subworkflow_spec,
prescript=prescript,
postscript=postscript,
**self.bpmn_attributes)
class ScriptTaskParser(SpiffTaskParser):
def create_task(self):
script = None
for child_node in self.node:
if child_node.tag.endswith('script'):
script = child_node.text
return self.spec_class(self.spec, self.bpmn_id, script, **self.bpmn_attributes)
class CallActivityParser(SpiffTaskParser):
def create_task(self):
extensions = self.parse_extensions()
prescript = extensions.get('preScript')
postscript = extensions.get('postScript')
subworkflow_spec = SubprocessParser.get_call_activity_spec(self)
return self.spec_class(
self.spec,
self.bpmn_id,
subworkflow_spec=subworkflow_spec,
prescript=prescript,
postscript=postscript,
**self.bpmn_attributes)
class ServiceTaskParser(SpiffTaskParser):
def create_task(self):
extensions = self.parse_extensions()
operator = extensions.get('serviceTaskOperator')
prescript = extensions.get('preScript')
postscript = extensions.get('postScript')
return self.spec_class(
self.spec,
self.bpmn_id,
operation_name=operator['name'],
operation_params=operator['parameters'],
result_variable=operator['resultVariable'],
prescript=prescript,
postscript=postscript,
**self.bpmn_attributes)
class BusinessRuleTaskParser(SpiffTaskParser):
def create_task(self):
decision_ref = self.get_decision_ref(self.node)
extensions = self.parse_extensions()
prescript = extensions.get('preScript')
postscript = extensions.get('postScript')
return BusinessRuleTask(
self.spec,
self.bpmn_id,
dmnEngine=self.process_parser.parser.get_engine(decision_ref, self.node),
prescript=prescript,
postscript=postscript,
**self.bpmn_attributes,
)
@staticmethod
def get_decision_ref(node):
extensions = SpiffTaskParser._parse_extensions(node)
return extensions.get('calledDecisionId')
| 8,161 | Python | .py | 177 | 37.384181 | 127 | 0.668426 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,022 | spiff_task.py | sartography_SpiffWorkflow/SpiffWorkflow/spiff/specs/spiff_task.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from copy import deepcopy
from SpiffWorkflow.exceptions import SpiffWorkflowException
from SpiffWorkflow.util.task import TaskState
from SpiffWorkflow.bpmn.specs.mixins.bpmn_spec_mixin import BpmnSpecMixin
class SpiffBpmnTask(BpmnSpecMixin):
def __init__(self, wf_spec, name, prescript=None, postscript=None, **kwargs):
super().__init__(wf_spec, name, **kwargs)
self.prescript = prescript
self.postscript = postscript
def execute_script(self, my_task, script):
try:
my_task.workflow.script_engine.execute(my_task, script)
except Exception as exc:
my_task._set_state(TaskState.ERROR)
raise exc
def _update_hook(self, my_task):
super()._update_hook(my_task)
if self.prescript is not None:
try:
self.execute_script(my_task, self.prescript)
except SpiffWorkflowException as se:
se.add_note("Error occurred in the Pre-Script")
raise se
return True
def _on_complete_hook(self, my_task):
if self.postscript is not None:
try:
self.execute_script(my_task, self.postscript)
except SpiffWorkflowException as se:
se.add_note("Error occurred in the Post-Script")
raise se
super()._on_complete_hook(my_task)
| 2,184 | Python | .py | 50 | 37 | 81 | 0.695202 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,023 | data_object.py | sartography_SpiffWorkflow/SpiffWorkflow/spiff/specs/data_object.py | from SpiffWorkflow.bpmn.specs.data_spec import DataObject as BpmnDataObject
class DataObject(BpmnDataObject):
def __init__(self, category=None, *args, **kwargs):
super().__init__(*args, **kwargs)
self.category = category
| 242 | Python | .py | 5 | 43.2 | 75 | 0.707627 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,024 | defaults.py | sartography_SpiffWorkflow/SpiffWorkflow/spiff/specs/defaults.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.bpmn.specs.mixins import (
UserTaskMixin,
ManualTaskMixin,
NoneTaskMixin,
ScriptTaskMixin,
SubWorkflowTaskMixin,
CallActivityMixin,
TransactionSubprocessMixin,
StandardLoopTaskMixin,
ParallelMultiInstanceTaskMixin,
SequentialMultiInstanceTaskMixin,
SendTaskMixin,
ReceiveTaskMixin,
)
from SpiffWorkflow.dmn.specs import BusinessRuleTaskMixin
from .mixins.service_task import ServiceTask as ServiceTaskMixin
from .spiff_task import SpiffBpmnTask
class UserTask(UserTaskMixin, SpiffBpmnTask):
pass
class ManualTask(ManualTaskMixin, SpiffBpmnTask):
pass
class NoneTask(NoneTaskMixin, SpiffBpmnTask):
pass
class ScriptTask(ScriptTaskMixin, SpiffBpmnTask):
pass
class SendTask(SendTaskMixin, SpiffBpmnTask):
pass
class ReceiveTask(ReceiveTaskMixin, SpiffBpmnTask):
pass
class StandardLoopTask(StandardLoopTaskMixin, SpiffBpmnTask):
pass
class ParallelMultiInstanceTask(ParallelMultiInstanceTaskMixin, SpiffBpmnTask):
pass
class SequentialMultiInstanceTask(SequentialMultiInstanceTaskMixin, SpiffBpmnTask):
pass
class BusinessRuleTask(BusinessRuleTaskMixin, SpiffBpmnTask):
pass
class SubWorkflowTask(SubWorkflowTaskMixin, SpiffBpmnTask):
pass
class CallActivity(CallActivityMixin, SpiffBpmnTask):
pass
class TransactionSubprocess(TransactionSubprocessMixin, SpiffBpmnTask):
pass
class ServiceTask(ServiceTaskMixin, SpiffBpmnTask):
pass
| 2,277 | Python | .py | 63 | 33.222222 | 83 | 0.820665 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,025 | __init__.py | sartography_SpiffWorkflow/SpiffWorkflow/spiff/specs/__init__.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .spiff_task import SpiffBpmnTask | 844 | Python | .py | 19 | 43.421053 | 69 | 0.790303 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,026 | event_definitions.py | sartography_SpiffWorkflow/SpiffWorkflow/spiff/specs/event_definitions.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.bpmn.specs.event_definitions import (
MessageEventDefinition,
ErrorEventDefinition,
EscalationEventDefinition,
SignalEventDefinition,
)
from SpiffWorkflow.bpmn.specs.event_definitions.item_aware_event import ItemAwareEventDefinition
from SpiffWorkflow.bpmn import BpmnEvent
class MessageEventDefinition(MessageEventDefinition):
def __init__(self, name, correlation_properties=None, expression=None, message_var=None, process_correlations=None, **kwargs):
super(MessageEventDefinition, self).__init__(name, correlation_properties, **kwargs)
self.expression = expression
self.message_var = message_var
self.process_correlations = process_correlations or []
def throw(self, my_task):
if self.expression is not None:
payload = my_task.workflow.script_engine.evaluate(my_task, self.expression)
correlations = self.get_correlations(my_task, payload)
else:
payload, correlations = {}, {}
event = BpmnEvent(self, payload=payload, correlations=correlations)
my_task.workflow.correlations.update(correlations)
my_task.workflow.top_workflow.catch(event)
def update_task(self, my_task):
correlations = self.calculate_correlations(
my_task.workflow.script_engine,
self.process_correlations,
my_task.data
)
my_task.workflow.correlations.update(correlations)
def update_task_data(self, my_task):
if self.message_var is not None:
my_task.data[self.message_var] = my_task.internal_data.pop(self.name)
def reset(self, my_task):
my_task.internal_data.pop(self.message_var, None)
super(MessageEventDefinition, self).reset(my_task)
class SpiffItemAwareEventDefinition(ItemAwareEventDefinition):
def __init__(self, name, expression=None, variable=None, **kwargs):
super().__init__(name, **kwargs)
self.expression = expression
self.variable = variable
def throw(self, my_task):
if self.expression is not None:
payload = my_task.workflow.script_engine.evaluate(my_task, self.expression)
else:
payload = None
event = BpmnEvent(self, payload=payload)
my_task.workflow.top_workflow.catch(event)
def update_task_data(self, my_task):
if self.variable is not None:
my_task.data[self.variable] = my_task.internal_data.pop(self.name, None)
def reset(self, my_task):
my_task.internal_data.pop(self.name, None)
super().reset(my_task)
class SignalEventDefinition(SpiffItemAwareEventDefinition, SignalEventDefinition):
pass
class ErrorEventDefinition(SpiffItemAwareEventDefinition, ErrorEventDefinition):
pass
class EscalationEventDefinition(SpiffItemAwareEventDefinition, EscalationEventDefinition):
pass
| 3,696 | Python | .py | 78 | 41.24359 | 130 | 0.72952 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,027 | service_task.py | sartography_SpiffWorkflow/SpiffWorkflow/spiff/specs/mixins/service_task.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import json
from copy import deepcopy
from SpiffWorkflow.bpmn.specs.mixins.service_task import ServiceTask
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskException
class ServiceTask(ServiceTask):
def __init__(self, wf_spec, name, operation_name, operation_params, result_variable, **kwargs):
super().__init__(wf_spec, name, **kwargs)
self.operation_name = operation_name
self.operation_params = operation_params
self.result_variable = result_variable
def _result_variable(self, task):
if self.result_variable is not None and len(self.result_variable) > 0:
return self.result_variable
escaped_spec_name = task.task_spec.name.replace('-', '_')
return f'spiff__{escaped_spec_name}_result'
def _execute(self, task):
def evaluate(param):
param['value'] = task.workflow.script_engine.evaluate(task, param['value'])
return param
operation_params_copy = deepcopy(self.operation_params)
evaluated_params = {k: evaluate(v) for k, v in operation_params_copy.items()}
try:
result = task.workflow.script_engine.call_service(self.operation_name,
evaluated_params, task.data)
except Exception as e:
wte = WorkflowTaskException("Error executing Service Task",
task=task, exception=e)
wte.add_note(str(e))
raise wte
parsed_result = json.loads(result)
task.data[self._result_variable(task)] = parsed_result
return True
| 2,400 | Python | .py | 50 | 41.28 | 99 | 0.698291 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,028 | event_definition.py | sartography_SpiffWorkflow/SpiffWorkflow/spiff/serializer/event_definition.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.bpmn.serializer.helpers.spec import EventDefinitionConverter
class MessageEventDefinitionConverter(EventDefinitionConverter):
def to_dict(self, event_definition):
dct = super().to_dict(event_definition)
dct['correlation_properties'] = self.correlation_properties_to_dict(event_definition.correlation_properties)
dct['expression'] = event_definition.expression
dct['message_var'] = event_definition.message_var
dct['process_correlations'] = self.correlation_properties_to_dict(event_definition.process_correlations)
return dct
def from_dict(self, dct):
dct['correlation_properties'] = self.correlation_properties_from_dict(dct['correlation_properties'])
dct['process_correlations'] = self.correlation_properties_from_dict(dct.get('process_correlations', []))
event_definition = super().from_dict(dct)
return event_definition
class ItemAwareEventDefinitionConverter(EventDefinitionConverter):
def to_dict(self, event_definition):
dct = super().to_dict(event_definition)
dct['expression'] = event_definition.expression
dct['variable'] = event_definition.variable
return dct
class ErrorEscalationEventDefinitionConverter(ItemAwareEventDefinitionConverter):
def to_dict(self, event_definition):
dct = super().to_dict(event_definition)
dct['code'] = event_definition.code
return dct
| 2,264 | Python | .py | 43 | 47.883721 | 116 | 0.756219 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,029 | config.py | sartography_SpiffWorkflow/SpiffWorkflow/spiff/serializer/config.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from copy import deepcopy
from SpiffWorkflow.bpmn.serializer import DEFAULT_CONFIG
from SpiffWorkflow.bpmn.serializer.config import (
NoneTask as DefaultNoneTask,
ManualTask as DefaultManualTask,
UserTask as DefaultUserTask,
SendTask as DefaultSendTask,
ReceiveTask as DefaultReceiveTask,
ScriptTask as DefaultScriptTask,
SubWorkflowTask as DefaultSubWorkflowTask,
TransactionSubprocess as DefaultTransactionSubprocess,
CallActivity as DefaultCallActivity,
StandardLoopTask as DefaultStandardLoopTask,
ParallelMultiInstanceTask as DefaultParallelMultiInstanceTask,
SequentialMultiInstanceTask as DefaultSequentialMultiInstanceTask,
MessageEventDefinition as DefaultMessageEventDefinition,
SignalEventDefinition as DefaultSignalEventDefinition,
ErrorEventDefinition as DefaultErrorEventDefinition,
EscalationEventDefinition as DefaultEscalationEventDefinition,
)
from SpiffWorkflow.spiff.specs.defaults import (
BusinessRuleTask,
NoneTask,
ManualTask,
UserTask,
SendTask,
ReceiveTask,
ScriptTask,
ServiceTask,
SubWorkflowTask,
TransactionSubprocess,
CallActivity,
StandardLoopTask,
ParallelMultiInstanceTask,
SequentialMultiInstanceTask,
)
from SpiffWorkflow.spiff.specs.event_definitions import (
MessageEventDefinition,
SignalEventDefinition,
ErrorEventDefinition,
EscalationEventDefinition,
)
from .task_spec import (
SpiffBpmnTaskConverter,
SendReceiveTaskConverter,
ScriptTaskConverter,
ServiceTaskConverter,
SubWorkflowTaskConverter,
StandardLoopTaskConverter,
SpiffMultiInstanceConverter,
BusinessRuleTaskConverter,
)
from .event_definition import (
MessageEventDefinitionConverter,
ItemAwareEventDefinitionConverter,
ErrorEscalationEventDefinitionConverter,
)
from SpiffWorkflow.bpmn.specs.data_spec import DataObject as DefaultDataObject
from SpiffWorkflow.spiff.specs.data_object import DataObject
from SpiffWorkflow.spiff.serializer.data_spec import DataObjectConverter
SPIFF_CONFIG = deepcopy(DEFAULT_CONFIG)
SPIFF_CONFIG.pop(DefaultNoneTask)
SPIFF_CONFIG.pop(DefaultManualTask)
SPIFF_CONFIG.pop(DefaultUserTask)
SPIFF_CONFIG.pop(DefaultScriptTask)
SPIFF_CONFIG.pop(DefaultSendTask)
SPIFF_CONFIG.pop(DefaultReceiveTask)
SPIFF_CONFIG.pop(DefaultSubWorkflowTask)
SPIFF_CONFIG.pop(DefaultTransactionSubprocess)
SPIFF_CONFIG.pop(DefaultCallActivity)
SPIFF_CONFIG.pop(DefaultStandardLoopTask)
SPIFF_CONFIG.pop(DefaultParallelMultiInstanceTask)
SPIFF_CONFIG.pop(DefaultSequentialMultiInstanceTask)
SPIFF_CONFIG.pop(DefaultDataObject)
SPIFF_CONFIG[NoneTask] = SpiffBpmnTaskConverter
SPIFF_CONFIG[ManualTask] = SpiffBpmnTaskConverter
SPIFF_CONFIG[UserTask] = SpiffBpmnTaskConverter
SPIFF_CONFIG[ScriptTask] = ScriptTaskConverter
SPIFF_CONFIG[ServiceTask] = ServiceTaskConverter
SPIFF_CONFIG[SendTask] = SendReceiveTaskConverter
SPIFF_CONFIG[ReceiveTask] = SendReceiveTaskConverter
SPIFF_CONFIG[SubWorkflowTask] = SubWorkflowTaskConverter
SPIFF_CONFIG[CallActivity] = SubWorkflowTaskConverter
SPIFF_CONFIG[TransactionSubprocess] = SubWorkflowTaskConverter
SPIFF_CONFIG[ParallelMultiInstanceTask] = SpiffMultiInstanceConverter
SPIFF_CONFIG[SequentialMultiInstanceTask] = SpiffMultiInstanceConverter
SPIFF_CONFIG[StandardLoopTask] = StandardLoopTaskConverter
SPIFF_CONFIG[MessageEventDefinition] = MessageEventDefinitionConverter
SPIFF_CONFIG[SignalEventDefinition] = ItemAwareEventDefinitionConverter
SPIFF_CONFIG[ErrorEventDefinition] = ErrorEscalationEventDefinitionConverter
SPIFF_CONFIG[EscalationEventDefinition] = ErrorEscalationEventDefinitionConverter
SPIFF_CONFIG[BusinessRuleTask] = BusinessRuleTaskConverter
SPIFF_CONFIG[DataObject] = DataObjectConverter
| 4,560 | Python | .py | 111 | 38.396396 | 81 | 0.849392 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,030 | __init__.py | sartography_SpiffWorkflow/SpiffWorkflow/spiff/serializer/__init__.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .config import SPIFF_CONFIG as DEFAULT_CONFIG | 857 | Python | .py | 19 | 44.105263 | 69 | 0.789976 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,031 | task_spec.py | sartography_SpiffWorkflow/SpiffWorkflow/spiff/serializer/task_spec.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.bpmn.serializer.helpers.spec import TaskSpecConverter
from SpiffWorkflow.bpmn.serializer.default.task_spec import MultiInstanceTaskConverter
from SpiffWorkflow.dmn.serializer.task_spec import BaseBusinessRuleTaskConverter
from SpiffWorkflow.spiff.specs.defaults import (
NoneTask,
ManualTask,
UserTask,
ScriptTask,
SendTask,
ReceiveTask,
StandardLoopTask,
ParallelMultiInstanceTask,
SequentialMultiInstanceTask,
BusinessRuleTask,
SubWorkflowTask,
CallActivity,
TransactionSubprocess,
ServiceTask
)
class SpiffBpmnTaskConverter(TaskSpecConverter):
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct['prescript'] = spec.prescript
dct['postscript'] = spec.postscript
return dct
def from_dict(self, dct):
return self.task_spec_from_dict(dct)
class BusinessRuleTaskConverter(BaseBusinessRuleTaskConverter, SpiffBpmnTaskConverter):
def to_dict(self, spec):
dct = BaseBusinessRuleTaskConverter.to_dict(self, spec)
dct.update(SpiffBpmnTaskConverter.to_dict(self, spec))
return dct
class SendReceiveTaskConverter(SpiffBpmnTaskConverter):
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['event_definition'] = self.registry.convert(spec.event_definition)
return dct
def from_dict(self, dct):
dct['event_definition'] = self.registry.restore(dct['event_definition'])
return super().from_dict(dct)
class ScriptTaskConverter(SpiffBpmnTaskConverter):
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['script'] = spec.script
return dct
class ServiceTaskConverter(SpiffBpmnTaskConverter):
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['operation_name'] = spec.operation_name
dct['operation_params'] = spec.operation_params
dct['result_variable'] = spec.result_variable
return dct
def from_dict(self, dct):
return self.task_spec_from_dict(dct)
class SubWorkflowTaskConverter(SpiffBpmnTaskConverter):
def to_dict(self, spec):
dct = super().to_dict(spec)
dct.update(self.get_subworkflow_attributes(spec))
return dct
def from_dict(self, dct):
dct['subworkflow_spec'] = dct.pop('spec')
return super().task_spec_from_dict(dct)
class StandardLoopTaskConverter(SpiffBpmnTaskConverter):
def to_dict(self, spec):
dct = super().to_dict(spec)
dct.update(self.get_standard_loop_attributes(spec))
return dct
class SpiffMultiInstanceConverter(MultiInstanceTaskConverter, SpiffBpmnTaskConverter):
def to_dict(self, spec):
dct = MultiInstanceTaskConverter.to_dict(self, spec)
dct.update(SpiffBpmnTaskConverter.to_dict(self, spec))
return dct
| 3,675 | Python | .py | 90 | 35.422222 | 87 | 0.735377 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,032 | data_spec.py | sartography_SpiffWorkflow/SpiffWorkflow/spiff/serializer/data_spec.py | from SpiffWorkflow.bpmn.serializer.helpers.spec import BpmnDataSpecificationConverter, EventDefinitionConverter
class DataObjectConverter(BpmnDataSpecificationConverter):
def to_dict(self, data_spec):
dct = super().to_dict(data_spec)
dct['category'] = data_spec.category
return dct
| 311 | Python | .py | 6 | 46 | 111 | 0.773026 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,033 | dict.py | sartography_SpiffWorkflow/SpiffWorkflow/serializer/dict.py | # This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import json
import pickle
import sys
import warnings
from base64 import b64encode, b64decode
from ..workflow import Workflow
from ..task import Task
from ..operators import Attrib, PathAttrib, Equal, NotEqual, Operator, GreaterThan, LessThan, Match
from ..specs.base import TaskSpec
from ..specs.AcquireMutex import AcquireMutex
from ..specs.Cancel import Cancel
from ..specs.CancelTask import CancelTask
from ..specs.Choose import Choose
from ..specs.ExclusiveChoice import ExclusiveChoice
from ..specs.Execute import Execute
from ..specs.Gate import Gate
from ..specs.Join import Join
from ..specs.Merge import Merge
from ..specs.MultiChoice import MultiChoice
from ..specs.MultiInstance import MultiInstance
from ..specs.ReleaseMutex import ReleaseMutex
from ..specs.Simple import Simple
from ..specs.StartTask import StartTask
from ..specs.SubWorkflow import SubWorkflow
from ..specs.ThreadStart import ThreadStart
from ..specs.ThreadMerge import ThreadMerge
from ..specs.ThreadSplit import ThreadSplit
from ..specs.Trigger import Trigger
from ..specs.WorkflowSpec import WorkflowSpec
from .base import Serializer
from .exceptions import TaskNotSupportedError, MissingSpecError
def get_class(full_class_name):
parts = full_class_name.rsplit('.', 1)
module_name = parts[0]
class_name = parts[1]
__import__(module_name)
return getattr(sys.modules[module_name], class_name)
class DictionarySerializer(Serializer):
def serialize_dict(self, thedict):
return dict(
(str(k), b64encode(pickle.dumps(v, protocol=pickle.HIGHEST_PROTOCOL)))
for k, v in list(thedict.items())
)
def deserialize_dict(self, s_state):
return dict((k, pickle.loads(b64decode(v))) for k, v in list(s_state.items()))
def serialize_list(self, thelist):
return [b64encode(pickle.dumps(v, protocol=pickle.HIGHEST_PROTOCOL)) for v in thelist]
def deserialize_list(self, s_state):
return [pickle.loads(b64decode(v)) for v in s_state]
def serialize_attrib(self, attrib):
return attrib.name
def deserialize_attrib(self, s_state):
return Attrib(s_state)
def serialize_pathattrib(self, pathattrib):
return pathattrib.path
def deserialize_pathattrib(self, s_state):
return PathAttrib(s_state)
def serialize_operator(self, op):
return [self.serialize_arg(a) for a in op.args]
def deserialize_operator(self, s_state):
return [self.deserialize_arg(c) for c in s_state]
def serialize_operator_equal(self, op):
return self.serialize_operator(op)
def deserialize_operator_equal(self, s_state):
return Equal(*[self.deserialize_arg(c) for c in s_state])
def serialize_operator_not_equal(self, op):
return self.serialize_operator(op)
def deserialize_operator_not_equal(self, s_state):
return NotEqual(*[self.deserialize_arg(c) for c in s_state])
def serialize_operator_greater_than(self, op):
return self.serialize_operator(op)
def deserialize_operator_greater_than(self, s_state):
return GreaterThan(*[self.deserialize_arg(c) for c in s_state])
def serialize_operator_less_than(self, op):
return self.serialize_operator(op)
def deserialize_operator_less_than(self, s_state):
return LessThan(*[self.deserialize_arg(c) for c in s_state])
def serialize_operator_match(self, op):
return self.serialize_operator(op)
def deserialize_operator_match(self, s_state):
return Match(*[self.deserialize_arg(c) for c in s_state])
def serialize_arg(self, arg):
if isinstance(arg, Attrib):
return 'Attrib', self.serialize_attrib(arg)
elif isinstance(arg, PathAttrib):
return 'PathAttrib', self.serialize_pathattrib(arg)
elif isinstance(arg, Operator):
module = arg.__class__.__module__
arg_type = module + '.' + arg.__class__.__name__
return arg_type, arg.serialize(self)
return 'value', arg
def deserialize_arg(self, s_state):
arg_type, arg = s_state
if arg_type == 'Attrib':
return self.deserialize_attrib(arg)
elif arg_type == 'PathAttrib':
return self.deserialize_pathattrib(arg)
elif arg_type == 'value':
return arg
arg_cls = get_class(arg_type)
ret = arg_cls.deserialize(self, arg)
if isinstance(ret,list):
return arg_cls(*ret)
else:
return ret
def serialize_task_spec(self, spec):
s_state = dict(name=spec.name,
description=spec.description,
manual=spec.manual,
lookahead=spec.lookahead)
module_name = spec.__class__.__module__
s_state['class'] = module_name + '.' + spec.__class__.__name__
s_state['inputs'] = spec._inputs
s_state['outputs'] = spec._outputs
s_state['data'] = self.serialize_dict(spec.data)
s_state['defines'] = self.serialize_dict(spec.defines)
s_state['pre_assign'] = self.serialize_list(spec.pre_assign)
s_state['post_assign'] = self.serialize_list(spec.post_assign)
# Note: Events are not serialized; this is documented in
# the TaskSpec API docs.
return s_state
def deserialize_task_spec(self, wf_spec, s_state, spec):
spec.description = s_state.get('description', '')
spec.manual = s_state.get('manual', False)
spec.lookahead = s_state.get('lookahead', 2)
spec.data = self.deserialize_dict(s_state.get('data', {}))
spec.defines = self.deserialize_dict(s_state.get('defines', {}))
spec.pre_assign = self.deserialize_list(s_state.get('pre_assign', []))
spec.post_assign = self.deserialize_list(s_state.get('post_assign', []))
spec._inputs = s_state.get('inputs', [])
spec._outputs = s_state.get('outputs', [])
return spec
def serialize_acquire_mutex(self, spec):
s_state = self.serialize_task_spec(spec)
s_state['mutex'] = spec.mutex
return s_state
def deserialize_acquire_mutex(self, wf_spec, s_state):
spec = AcquireMutex(wf_spec, s_state['name'], s_state['mutex'])
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
spec.mutex = s_state['mutex']
return spec
def serialize_cancel(self, spec):
s_state = self.serialize_task_spec(spec)
s_state['cancel_successfully'] = spec.cancel_successfully
return s_state
def deserialize_cancel(self, wf_spec, s_state):
spec = Cancel(wf_spec, s_state['name'], success=s_state.get('cancel_successfully', False))
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
return spec
def serialize_cancel_task(self, spec):
return self.serialize_trigger(spec)
def deserialize_cancel_task(self, wf_spec, s_state):
spec = CancelTask(wf_spec,
s_state['name'],
s_state['context'],
times=self.deserialize_arg(s_state['times']))
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
return spec
def serialize_choose(self, spec):
s_state = self.serialize_task_spec(spec)
s_state['context'] = spec.context
# despite the various documentation suggesting that choice ought to be
# a collection of objects, here it is a collection of strings. The
# handler in MultiChoice.py converts it to TaskSpecs. So instead of:
# s_state['choice'] = [c.name for c in spec.choice]
# we have:
s_state['choice'] = spec.choice
return s_state
def deserialize_choose(self, wf_spec, s_state):
spec = Choose(wf_spec,
s_state['name'],
s_state['context'],
s_state['choice'])
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
return spec
def serialize_exclusive_choice(self, spec):
s_state = self.serialize_multi_choice(spec)
s_state['default_task_spec'] = spec.default_task_spec
return s_state
def deserialize_exclusive_choice(self, wf_spec, s_state):
spec = ExclusiveChoice(wf_spec, s_state['name'])
self.deserialize_multi_choice(wf_spec, s_state, spec=spec)
spec.default_task_spec = s_state['default_task_spec']
return spec
def serialize_execute(self, spec):
s_state = self.serialize_task_spec(spec)
s_state['args'] = spec.args
return s_state
def deserialize_execute(self, wf_spec, s_state):
spec = Execute(wf_spec, s_state['name'], s_state['args'])
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
return spec
def serialize_gate(self, spec):
s_state = self.serialize_task_spec(spec)
s_state['context'] = spec.context
return s_state
def deserialize_gate(self, wf_spec, s_state):
spec = Gate(wf_spec, s_state['name'], s_state['context'])
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
return spec
def serialize_join(self, spec):
s_state = self.serialize_task_spec(spec)
s_state['split_task'] = spec.split_task
s_state['threshold'] = b64encode(
pickle.dumps(spec.threshold, protocol=pickle.HIGHEST_PROTOCOL))
s_state['cancel_remaining'] = spec.cancel_remaining
return s_state
def deserialize_join(self, wf_spec, s_state):
if isinstance(s_state['threshold'],dict):
byte_payload = s_state['threshold']['__bytes__']
else:
byte_payload = s_state['threshold']
spec = Join(wf_spec,
s_state['name'],
split_task=s_state['split_task'],
threshold=pickle.loads(b64decode(byte_payload)),
cancel=s_state['cancel_remaining'])
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
return spec
def serialize_multi_choice(self, spec):
s_state = self.serialize_task_spec(spec)
s_state['cond_task_specs'] = thestate = []
for condition, spec_name in spec.cond_task_specs:
cond = self.serialize_arg(condition)
thestate.append((cond, spec_name))
# spec.choice is actually a list of strings in MultiChoice: see
# _predict_hook. So, instead of
# s_state['choice'] = spec.choice and spec.choice.name or None
s_state['choice'] = spec.choice or None
return s_state
def deserialize_multi_choice(self, wf_spec, s_state, spec=None):
if spec is None:
spec = MultiChoice(wf_spec, s_state['name'])
if s_state.get('choice') is not None:
# this is done in _predict_hook: it's kept as a string for now.
# spec.choice = wf_spec.get_task_spec_from_name(s_state['choice'])
spec.choice = s_state['choice']
for cond, spec_name in s_state['cond_task_specs']:
condition = self.deserialize_arg(cond)
spec.cond_task_specs.append((condition, spec_name))
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
return spec
def serialize_multi_instance(self, spec):
s_state = self.serialize_task_spec(spec)
# here we need to add in all of the things that would get serialized
# for other classes that the MultiInstance could be -
if isinstance(spec, SubWorkflow):
br_state = self.serialize_sub_workflow(spec)
s_state['file'] = br_state['file']
s_state['in_assign'] = br_state['in_assign']
s_state['out_assign'] = br_state['out_assign']
s_state['times'] = self.serialize_arg(spec.times)
return s_state
def deserialize_multi_instance(self, wf_spec, s_state):
spec = MultiInstance(wf_spec, s_state['name'], times=self.deserialize_arg(s_state['times']))
if isinstance(spec, SubWorkflow):
if s_state.get('file'):
spec.file = self.deserialize_arg(s_state['file'])
else:
spec.file = None
spec.in_assign = self.deserialize_list(s_state['in_assign'])
spec.out_assign = self.deserialize_list(s_state['out_assign'])
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
return spec
def serialize_release_mutex(self, spec):
s_state = self.serialize_task_spec(spec)
s_state['mutex'] = spec.mutex
return s_state
def deserialize_release_mutex(self, wf_spec, s_state):
spec = ReleaseMutex(wf_spec, s_state['name'], s_state['mutex'])
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
return spec
def serialize_simple(self, spec):
assert isinstance(spec, TaskSpec)
return self.serialize_task_spec(spec)
def deserialize_simple(self, wf_spec, s_state):
assert isinstance(wf_spec, WorkflowSpec)
spec = Simple(wf_spec, s_state['name'])
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
return spec
def serialize_start_task(self, spec):
return self.serialize_task_spec(spec)
def deserialize_start_task(self, wf_spec, s_state):
spec = StartTask(wf_spec)
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
return spec
def serialize_sub_workflow(self, spec):
warnings.warn("SubWorkflows cannot be safely serialized as they only" +
" store a reference to the subworkflow specification " +
" as a path to an external XML file.")
s_state = self.serialize_task_spec(spec)
s_state['file'] = spec.file
s_state['in_assign'] = self.serialize_list(spec.in_assign)
s_state['out_assign'] = self.serialize_list(spec.out_assign)
return s_state
def deserialize_sub_workflow(self, wf_spec, s_state):
warnings.warn("SubWorkflows cannot be safely deserialized as they " +
"only store a reference to the subworkflow " +
"specification as a path to an external XML file.")
spec = SubWorkflow(wf_spec, s_state['name'], s_state['file'])
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
spec.in_assign = self.deserialize_list(s_state['in_assign'])
spec.out_assign = self.deserialize_list(s_state['out_assign'])
return spec
def serialize_thread_merge(self, spec):
return self.serialize_join(spec)
def deserialize_thread_merge(self, wf_spec, s_state):
spec = ThreadMerge(wf_spec, s_state['name'], s_state['split_task'])
# while ThreadMerge is a Join, the _deserialise_join isn't what we want
# here: it makes a join from scratch which we don't need (the
# ThreadMerge constructor does it all). Just task_spec it.
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
return spec
def serialize_thread_split(self, spec):
s_state = self.serialize_task_spec(spec)
s_state['times'] = self.serialize_arg(spec.times)
return s_state
def deserialize_thread_split(self, wf_spec, s_state):
spec = ThreadSplit(wf_spec,
s_state['name'],
times=self.deserialize_arg(s_state['times']),
suppress_threadstart_creation=True)
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
return spec
def serialize_thread_start(self, spec):
return self.serialize_task_spec(spec)
def deserialize_thread_start(self, wf_spec, s_state):
spec = ThreadStart(wf_spec)
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
return spec
def deserialize_merge(self, wf_spec, s_state):
spec = Merge(wf_spec, s_state['name'], s_state['split_task'])
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
return spec
def serialize_trigger(self, spec):
s_state = self.serialize_task_spec(spec)
s_state['context'] = spec.context
s_state['times'] = self.serialize_arg(spec.times)
s_state['queued'] = spec.queued
return s_state
def deserialize_trigger(self, wf_spec, s_state):
spec = Trigger(wf_spec,
s_state['name'],
s_state['context'],
self.deserialize_arg(s_state['times']))
self.deserialize_task_spec(wf_spec, s_state, spec=spec)
return spec
def serialize_workflow_spec(self, spec, **kwargs):
s_state = dict(name=spec.name, description=spec.description, file=spec.file)
s_state['task_specs'] = dict(
(k, v.serialize(self))
for k, v in list(spec.task_specs.items())
)
return s_state
def deserialize_workflow_spec(self, s_state, **kwargs):
spec = WorkflowSpec(s_state['name'], filename=s_state['file'])
spec.description = s_state['description']
# Handle Start Task
start_task_spec_state = s_state['task_specs']['Start']
start_task_spec = StartTask.deserialize(self, spec, start_task_spec_state)
spec.start = start_task_spec
spec.task_specs['Start'] = start_task_spec
for name, task_spec_state in list(s_state['task_specs'].items()):
if name == 'Start':
continue
task_spec_cls = get_class(task_spec_state['class'])
task_spec = task_spec_cls.deserialize(self, spec, task_spec_state)
spec.task_specs[name] = task_spec
if s_state.get('end', None):
spec.end = spec.get_task_spec_from_name(s_state['end'])
assert spec.start is spec.get_task_spec_from_name('Start')
return spec
def serialize_workflow(self, workflow, include_spec=True, **kwargs):
assert isinstance(workflow, Workflow)
s_state = dict()
if include_spec:
s_state['wf_spec'] = self.serialize_workflow_spec(workflow.spec, **kwargs)
s_state['data'] = self.serialize_dict(workflow.data)
value = workflow.last_task
s_state['last_task'] = value.id if value is not None else None
s_state['success'] = workflow.success
s_state['task_tree'] = self.serialize_task(workflow.task_tree)
return s_state
def deserialize_workflow(self, s_state, wf_class=Workflow, **kwargs):
"""It is possible to override the workflow class, and specify a
workflow_spec, otherwise the spec is assumed to be serialized in the
s_state['wf_spec']"""
if isinstance(s_state['wf_spec'], str):
spec_dct = json.loads(s_state['wf_spec'])
else:
spec_dct = s_state['wf_spec']
reset_specs = [spec['name'] for spec in spec_dct['task_specs'].values() if spec['class'].endswith('LoopResetTask')]
for name in reset_specs:
s_state['wf_spec']['task_specs'].pop(name)
wf_spec = self.deserialize_workflow_spec(s_state['wf_spec'], **kwargs)
workflow = wf_class(wf_spec)
workflow.data = self.deserialize_dict(s_state['data'])
workflow.success = s_state['success']
workflow.spec = wf_spec
workflow.task_tree = self.deserialize_task(workflow, s_state['task_tree'], reset_specs)
if workflow.last_task is not None:
workflow.last_task = workflow.get_task_from_id(s_state['last_task'])
return workflow
def serialize_task(self, task, skip_children=False):
assert isinstance(task, Task)
if isinstance(task.task_spec, SubWorkflow):
raise TaskNotSupportedError(
"Subworkflow tasks cannot be serialized (due to their use of" +
" internal_data to store the subworkflow).")
s_state = dict()
s_state['id'] = task.id
s_state['parent'] = task.parent.id if task.parent is not None else None
if not skip_children:
s_state['children'] = [self.serialize_task(child) for child in task.children]
s_state['state'] = task.state
s_state['triggered'] = task.triggered
s_state['task_spec'] = task.task_spec.name
s_state['last_state_change'] = task.last_state_change
s_state['data'] = self.serialize_dict(task.data)
s_state['internal_data'] = task.internal_data
return s_state
def deserialize_task(self, workflow, s_state, ignored_specs=None):
assert isinstance(workflow, Workflow)
old_spec_name = s_state['task_spec']
if old_spec_name in ignored_specs:
return None
task_spec = workflow.spec.get_task_spec_from_name(old_spec_name)
if task_spec is None:
raise MissingSpecError("Unknown task spec: " + old_spec_name)
task_id = s_state['id']
parent_id = s_state['parent']
parent = workflow.get_task_from_id(parent_id) if parent_id is not None else None
task = Task(workflow, task_spec, parent, id=task_id)
task.children = self._deserialize_task_children(task, s_state, ignored_specs)
task._state = s_state['state']
task.triggered = s_state['triggered']
task.last_state_change = s_state['last_state_change']
task.data = self.deserialize_dict(s_state['data'])
task.internal_data = s_state['internal_data']
return task
def _deserialize_task_children(self, task, s_state, ignored_specs):
"""This may need to be overridden if you need to support
deserialization of sub-workflows"""
children = [self.deserialize_task(task.workflow, c, ignored_specs) for c in s_state['children']]
return [c for c in children if c is not None]
| 22,609 | Python | .py | 461 | 40.093275 | 123 | 0.642841 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,034 | prettyxml.py | sartography_SpiffWorkflow/SpiffWorkflow/serializer/prettyxml.py | # Copyright (C) 2007-2012 Samuel Abels, 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .. import operators
from ..specs.Simple import Simple
from ..specs.WorkflowSpec import WorkflowSpec
from ..exceptions import SpiffWorkflowException
from .base import Serializer, spec_map, op_map
# Create a list of tag names out of the spec names.
_spec_map = spec_map()
_op_map = op_map()
class XMLParserExcetion(SpiffWorkflowException):
pass
class XmlSerializer(Serializer):
"""Parses XML into a WorkflowSpec object."""
# Note: This is not a serializer. It is a parser for Spiff's XML format
# However, it is too disruptive to rename everything that uses it.
def raise_parser_exception(self, message):
raise XMLParserExcetion(message)
def deserialize_assign(self, workflow, start_node):
"""
Reads the "pre-assign" or "post-assign" tag from the given node.
start_node -- the xml node (xml.dom.minidom.Node)
"""
name = start_node.attrib.get('name')
attrib = start_node.attrib.get('field')
value = start_node.attrib.get('value')
kwargs = {}
if name == '':
self.raise_parser_exception('name attribute required')
if attrib is not None and value is not None:
self.raise_parser_exception('Both, field and right-value attributes found')
elif attrib is None and value is None:
self.raise_parser_exception('field or value attribute required')
elif value is not None:
kwargs['right'] = value
else:
kwargs['right_attribute'] = attrib
return operators.Assign(name, **kwargs)
def deserialize_data(self, workflow, start_node):
"""
Reads a "data" or "define" tag from the given node.
start_node -- the xml node (xml.dom.minidom.Node)
"""
name = start_node.attrib.get('name')
value = start_node.attrib.get('value')
return name, value
def deserialize_assign_list(self, workflow, start_node):
"""
Reads a list of assignments from the given node.
workflow -- the workflow
start_node -- the xml structure (xml.dom.minidom.Node)
"""
# Collect all information.
assignments = []
for node in start_node.getchildren():
if not isinstance(start_node.tag, str):
pass
elif node.tag.lower() == 'assign':
assignments.append(self.deserialize_assign(workflow, node))
else:
self.raise_parser_exception('Unknown node: %s' % node.tag)
return assignments
def deserialize_logical(self, node):
"""
Reads the logical tag from the given node, returns a Condition object.
node -- the xml node (xml.dom.minidom.Node)
"""
term1_attrib = node.attrib.get('left-field')
term1_value = node.attrib.get('left-value')
op = node.tag.lower()
term2_attrib = node.attrib.get('right-field')
term2_value = node.attrib.get('right-value')
if op not in _op_map:
self.raise_parser_exception('Invalid operator')
if term1_attrib is not None and term1_value is not None:
self.raise_parser_exception('Both, left-field and left-value attributes found')
elif term1_attrib is None and term1_value is None:
self.raise_parser_exception('left-field or left-value attribute required')
elif term1_value is not None:
left = term1_value
else:
left = operators.Attrib(term1_attrib)
if term2_attrib is not None and term2_value is not None:
self.raise_parser_exception('Both, right-field and right-value attributes found')
elif term2_attrib is None and term2_value is None:
self.raise_parser_exception('right-field or right-value attribute required')
elif term2_value is not None:
right = term2_value
else:
right = operators.Attrib(term2_attrib)
return _op_map[op](left, right)
def deserialize_condition(self, workflow, start_node):
"""
Reads the conditional statement from the given node.
workflow -- the workflow with which the concurrence is associated
start_node -- the xml structure (xml.dom.minidom.Node)
"""
# Collect all information.
condition = None
spec_name = None
for node in start_node.getchildren():
if not isinstance(node.tag, str):
pass
elif node.tag.lower() == 'successor':
if spec_name is not None:
self.raise_parser_exception('Duplicate task name %s' % spec_name)
if node.text is None:
self.raise_parser_exception('Successor tag without a task name')
spec_name = node.text
elif node.tag.lower() in _op_map:
if condition is not None:
self.raise_parser_exception('Multiple conditions are not yet supported')
condition = self.deserialize_logical(node)
else:
self.raise_parser_exception('Unknown node: %s' % node.tag)
if condition is None:
self.raise_parser_exception('Missing condition in conditional statement')
if spec_name is None:
self.raise_parser_exception('A %s has no task specified' % start_node.tag)
return condition, spec_name
def deserialize_task_spec(self, workflow, start_node, read_specs):
"""
Reads the task from the given node and returns a tuple
(start, end) that contains the stream of objects that model
the behavior.
workflow -- the workflow with which the task is associated
start_node -- the xml structure (xml.dom.minidom.Node)
"""
# Extract attributes from the node.
nodetype = start_node.tag.lower()
name = start_node.attrib.get('name', '').lower()
context = start_node.attrib.get('context', '').lower()
mutex = start_node.attrib.get('mutex', '').lower()
cancel = start_node.attrib.get('cancel', '').lower()
success = start_node.attrib.get('success', '').lower()
times = start_node.attrib.get('times', '').lower()
times_field = start_node.attrib.get('times-field', '').lower()
threshold = start_node.attrib.get('threshold', '').lower()
threshold_field = start_node.attrib.get('threshold-field', '').lower()
file_name = start_node.attrib.get('file', '').lower()
file_field = start_node.attrib.get('file-field', '').lower()
kwargs = {'data': {},
'defines': {},
'pre_assign': [],
'post_assign': []}
if nodetype not in _spec_map:
self.raise_parser_exception('Invalid task type "%s"' % nodetype)
if nodetype == 'start-task':
name = 'start'
if name == '':
self.raise_parser_exception('Invalid task name "%s"' % name)
if name in read_specs:
self.raise_parser_exception('Duplicate task name "%s"' % name)
if cancel != '' and cancel != '0':
kwargs['cancel'] = True
if success != '' and success != '0':
kwargs['success'] = True
if times != '':
kwargs['times'] = int(times)
if times_field != '':
kwargs['times'] = operators.Attrib(times_field)
if threshold != '':
kwargs['threshold'] = int(threshold)
if threshold_field != '':
kwargs['threshold'] = operators.Attrib(threshold_field)
if file_name != '':
kwargs['file'] = file_name
if file_field != '':
kwargs['file'] = operators.Attrib(file_field)
if nodetype == 'choose':
kwargs['choice'] = []
if nodetype == 'trigger':
context = [context]
if mutex != '':
context = mutex
# Walk through the children of the node.
successors = []
for node in start_node.getchildren():
if not isinstance(node.tag, str):
pass
elif node.tag == 'description':
kwargs['description'] = node.text
elif node.tag == 'successor' or node.tag == 'default-successor':
if not node.text:
self.raise_parser_exception('Empty %s tag' % node.tag)
successors.append((None, node.text))
elif node.tag == 'conditional-successor':
successors.append(self.deserialize_condition(workflow, node))
elif node.tag == 'define':
key, value = self.deserialize_data(workflow, node)
kwargs['defines'][key] = value
# "property" tag exists for backward compatibility.
elif node.tag == 'data' or node.tag == 'property':
key, value = self.deserialize_data(workflow, node)
kwargs['data'][key] = value
elif node.tag == 'pre-assign':
kwargs['pre_assign'].append(
self.deserialize_assign(workflow, node))
elif node.tag == 'post-assign':
kwargs['post_assign'].append(
self.deserialize_assign(workflow, node))
elif node.tag == 'in':
kwargs['in_assign'] = self.deserialize_assign_list(
workflow, node)
elif node.tag == 'out':
kwargs['out_assign'] = self.deserialize_assign_list(
workflow, node)
elif node.tag == 'cancel':
if not node.text:
self.raise_parser_exception('Empty %s tag' % node.tag)
if context == '':
context = []
elif not isinstance(context, list):
context = [context]
context.append(node.text)
elif node.tag == 'pick':
if not node.text:
self.raise_parser_exception('Empty %s tag' % node.tag)
kwargs['choice'].append(node.text)
else:
self.raise_parser_exception('Unknown node: %s' % node.tag)
# Create a new instance of the task spec.
module = _spec_map[nodetype]
if nodetype == 'start-task':
spec = module(workflow, **kwargs)
elif nodetype == 'multi-instance' or nodetype == 'thread-split':
if times == '' and times_field == '':
self.raise_parser_exception('Missing "times" or "times-field" in "%s"' % name)
elif times != '' and times_field != '':
self.raise_parser_exception('Both, "times" and "times-field" in "%s"' % name)
spec = module(workflow, name, **kwargs)
elif context == '':
spec = module(workflow, name, **kwargs)
else:
spec = module(workflow, name, context, **kwargs)
read_specs[name] = spec, successors
def deserialize_workflow_spec(self, root_node, filename=None):
"""
Reads the workflow from the given XML structure and returns a
WorkflowSpec instance.
"""
name = root_node.attrib.get('name')
if name == '':
self.raise_parser_exception('%s without a name attribute' % root_node.tag)
# Read all task specs and create a list of successors.
workflow_spec = WorkflowSpec(name, filename)
end = Simple(workflow_spec, 'End'), []
read_specs = dict(end=end)
for child_node in root_node.getchildren():
if not isinstance(child_node.tag, str):
pass
elif child_node.tag == 'name':
workflow_spec.name = child_node.text
elif child_node.tag == 'description':
workflow_spec.description = child_node.text
elif child_node.tag.lower() in _spec_map:
self.deserialize_task_spec(workflow_spec, child_node, read_specs)
else:
self.raise_parser_exception('Unknown node: %s' % child_node.tag)
# Remove the default start-task from the workflow.
workflow_spec.start = read_specs['start'][0]
# Connect all task specs.
for name in read_specs:
spec, successors = read_specs[name]
for condition, successor_name in successors:
if successor_name not in read_specs:
self.raise_parser_exception('Unknown successor: "%s"' % successor_name)
successor, foo = read_specs[successor_name]
if condition is None:
spec.connect(successor)
else:
spec.connect_if(condition, successor)
return workflow_spec
| 13,644 | Python | .py | 289 | 36.276817 | 94 | 0.592451 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,035 | xml.py | sartography_SpiffWorkflow/SpiffWorkflow/serializer/xml.py | # This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import warnings
from uuid import UUID
from lxml import etree
from lxml.etree import SubElement
from ..workflow import Workflow
from ..task import Task, TaskState
from ..operators import (Attrib, Assign, PathAttrib, Equal, NotEqual, GreaterThan, LessThan, Match)
from ..specs.AcquireMutex import AcquireMutex
from ..specs.Cancel import Cancel
from ..specs.CancelTask import CancelTask
from ..specs.Choose import Choose
from ..specs.ExclusiveChoice import ExclusiveChoice
from ..specs.Execute import Execute
from ..specs.Gate import Gate
from ..specs.Join import Join
from ..specs.Merge import Merge
from ..specs.MultiChoice import MultiChoice
from ..specs.MultiInstance import MultiInstance
from ..specs.ReleaseMutex import ReleaseMutex
from ..specs.Simple import Simple
from ..specs.StartTask import StartTask
from ..specs.SubWorkflow import SubWorkflow
from ..specs.ThreadStart import ThreadStart
from ..specs.ThreadMerge import ThreadMerge
from ..specs.ThreadSplit import ThreadSplit
from ..specs.Trigger import Trigger
from ..specs.WorkflowSpec import WorkflowSpec
from .base import Serializer, spec_map, op_map
from .exceptions import TaskNotSupportedError
# Create a list of tag names out of the spec names.
_spec_map = spec_map()
_op_map = op_map()
class XmlSerializer(Serializer):
def serialize_attrib(self, op):
"""
Serializer for :meth:`SpiffWorkflow.operators.Attrib`.
Example::
<attribute>foobar</attribute>
"""
elem = etree.Element('attribute')
elem.text = op.name
return elem
def deserialize_attrib(self, elem):
return Attrib(str(elem.text))
def serialize_pathattrib(self, op):
"""
Serializer for :meth:`SpiffWorkflow.operators.PathAttrib`.
Example::
<path>foobar</path>
"""
elem = etree.Element('path')
elem.text = op.path
return elem
def deserialize_pathattrib(self, elem):
return PathAttrib(str(elem.text))
def serialize_assign(self, op):
"""
Serializer for :meth:`SpiffWorkflow.operators.Assign`.
Example::
<assign>
<name>foobar</name>
<value>doodle</value>
</assign>
"""
elem = etree.Element('assign')
self.serialize_value(SubElement(elem, 'name'), op.left_attribute)
if op.right:
self.serialize_value(SubElement(elem, 'value'), op.right)
if op.right_attribute:
self.serialize_value(
SubElement(elem, 'value-attribute'), op.right_attribute)
return elem
def deserialize_assign(self, elem):
name = elem.findtext('name')
value = elem.findtext('value')
value_attribute = elem.findtext('value-attribute')
return Assign(left_attribute=name,
right_attribute=value_attribute,
right=value)
def serialize_value(self, parent_elem, value):
"""
Serializes str, Attrib, or PathAttrib objects.
Example::
<attribute>foobar</attribute>
"""
if isinstance(value, (str, int)) or type(value).__name__ == 'str':
parent_elem.text = str(value)
elif value is None:
parent_elem.text = None
else:
parent_elem.append(value.serialize(self))
def deserialize_value(self, value_elem):
value = value_elem.text
if value is not None:
return str(value)
value = value_elem[0]
if value.tag == 'attribute':
return Attrib.deserialize(self, value)
elif value.tag == 'path':
return PathAttrib.deserialize(self, value)
elif value.tag == 'assign':
return Assign.deserialize(self, value)
else:
raise ValueError('unsupported tag:', value.tag)
def serialize_value_map(self, map_elem, thedict):
"""
Serializes a dictionary of key/value pairs, where the values are
either strings, or Attrib, or PathAttrib objects.
Example::
<variable>
<name>foo</name>
<value>text</value>
</variable>
<variable>
<name>foo2</name>
<value><attribute>foobar</attribute></value>
</variable>
"""
for key, value in sorted((str(k), v) for (k, v) in thedict.items()):
var_elem = SubElement(map_elem, 'variable')
SubElement(var_elem, 'name').text = str(key)
value_elem = SubElement(var_elem, 'value')
self.serialize_value(value_elem, value)
return map_elem
def deserialize_value_map(self, map_elem):
themap = {}
for var_elem in map_elem:
name = str(var_elem.find('name').text)
value_elem = var_elem.find('value')
themap[name] = self.deserialize_value(value_elem)
return themap
def serialize_value_list(self, list_elem, thelist):
"""
Serializes a list, where the values are objects of type
str, Attrib, or PathAttrib.
Example::
<value>text</value>
<value><attribute>foobar</attribute></value>
<value><path>foobar</path></value>
"""
for value in thelist:
value_elem = SubElement(list_elem, 'value')
self.serialize_value(value_elem, value)
return list_elem
def deserialize_value_list(self, elem):
thelist = []
for value_elem in elem:
thelist.append(self.deserialize_value(value_elem))
return thelist
def serialize_operator_equal(self, op):
"""
Serializer for :meth:`SpiffWorkflow.operators.Equal`.
Example::
<equals>
<value>text</value>
<value><attribute>foobar</attribute></value>
<value><path>foobar</path></value>
</equals>
"""
elem = etree.Element('equals')
return self.serialize_value_list(elem, op.args)
def deserialize_operator_equal(self, elem):
return Equal(*self.deserialize_value_list(elem))
def serialize_operator_not_equal(self, op):
"""
Serializer for :meth:`SpiffWorkflow.operators.NotEqual`.
Example::
<not-equals>
<value>text</value>
<value><attribute>foobar</attribute></value>
<value><path>foobar</path></value>
</not-equals>
"""
elem = etree.Element('not-equals')
return self.serialize_value_list(elem, op.args)
def deserialize_operator_not_equal(self, elem):
return NotEqual(*self.deserialize_value_list(elem))
def serialize_operator_greater_than(self, op):
"""
Serializer for :meth:`SpiffWorkflow.operators.NotEqual`.
Example::
<greater-than>
<value>text</value>
<value><attribute>foobar</attribute></value>
</greater-than>
"""
elem = etree.Element('greater-than')
return self.serialize_value_list(elem, op.args)
def deserialize_operator_greater_than(self, elem):
return GreaterThan(*self.deserialize_value_list(elem))
def serialize_operator_less_than(self, op):
"""
Serializer for :meth:`SpiffWorkflow.operators.NotEqual`.
Example::
<less-than>
<value>text</value>
<value><attribute>foobar</attribute></value>
</less-than>
"""
elem = etree.Element('less-than')
return self.serialize_value_list(elem, op.args)
def deserialize_operator_less_than(self, elem):
return LessThan(*self.deserialize_value_list(elem))
def serialize_operator_match(self, op):
"""
Serializer for :meth:`SpiffWorkflow.operators.NotEqual`.
Example::
<matches>
<value>text</value>
<value><attribute>foobar</attribute></value>
</matches>
"""
elem = etree.Element('matches')
return self.serialize_value_list(elem, op.args)
def deserialize_operator_match(self, elem):
return Match(*self.deserialize_value_list(elem))
def deserialize_operator(self, elem):
cls = _op_map[elem.tag]
return cls.deserialize(self, elem)
def serialize_task_spec(self, spec, elem):
"""
Serializes common attributes of :meth:`SpiffWorkflow.specs.TaskSpec`.
"""
SubElement(elem, 'name').text = spec.name
if spec.description:
SubElement(elem, 'description').text = spec.description
if spec.manual:
SubElement(elem, 'manual')
SubElement(elem, 'lookahead').text = str(spec.lookahead)
self.serialize_value_list(SubElement(elem, 'inputs'), spec._inputs)
self.serialize_value_list(SubElement(elem, 'outputs'), spec._outputs)
self.serialize_value_map(SubElement(elem, 'data'), spec.data)
self.serialize_value_map(SubElement(elem, 'defines'), spec.defines)
self.serialize_value_list(SubElement(elem, 'pre-assign'), spec.pre_assign)
self.serialize_value_list(SubElement(elem, 'post-assign'), spec.post_assign)
# Note: Events are not serialized; this is documented in
# the TaskSpec API docs.
return elem
def deserialize_task_spec(self, wf_spec, elem, spec_cls, **kwargs):
name = elem.findtext('name')
spec = spec_cls(wf_spec, name, **kwargs)
spec.description = elem.findtext('description', spec.description)
spec.manual = elem.findtext('manual', spec.manual)
spec.lookahead = int(elem.findtext('lookahead', spec.lookahead))
data_elem = elem.find('data')
if data_elem is not None:
spec.data = self.deserialize_value_map(data_elem)
defines_elem = elem.find('defines')
if defines_elem is not None:
spec.defines = self.deserialize_value_map(defines_elem)
pre_assign_elem = elem.find('pre-assign')
if pre_assign_elem is not None:
spec.pre_assign = self.deserialize_value_list(pre_assign_elem)
post_assign_elem = elem.find('post-assign')
if post_assign_elem is not None:
spec.post_assign = self.deserialize_value_list(post_assign_elem)
spec._inputs = self.deserialize_value_list(elem.find('inputs'))
spec._outputs = self.deserialize_value_list(elem.find('outputs'))
return spec
def serialize_acquire_mutex(self, spec):
"""
Serializer for :meth:`SpiffWorkflow.specs.AcquireMutex`.
"""
elem = etree.Element('acquire-mutex')
self.serialize_task_spec(spec, elem)
SubElement(elem, 'mutex').text = spec.mutex
return elem
def deserialize_acquire_mutex(self, wf_spec, elem, cls=AcquireMutex,
**kwargs):
mutex = elem.findtext('mutex')
return self.deserialize_task_spec(wf_spec,
elem,
cls,
mutex=mutex,
**kwargs)
def serialize_cancel(self, spec):
elem = etree.Element('cancel')
self.serialize_task_spec(spec, elem)
SubElement(elem, 'cancel-successfully')
return elem
def deserialize_cancel(self, wf_spec, elem, cls=Cancel,
**kwargs):
success = elem.find('cancel-successfully') is not None
return self.deserialize_task_spec(wf_spec,
elem,
cls,
success=success,
**kwargs)
def serialize_cancel_task(self, spec):
elem = etree.Element('cancel-task')
return self.serialize_trigger(spec, elem)
def deserialize_cancel_task(self, wf_spec, elem, cls=CancelTask, **kwargs):
return self.deserialize_trigger(wf_spec, elem, cls, **kwargs)
def serialize_choose(self, spec, elem=None):
if elem is None:
elem = etree.Element('choose')
elem = self.serialize_task_spec(spec, elem)
SubElement(elem, 'context').text = spec.context
choice_elem = SubElement(elem, 'choice')
self.serialize_value_list(choice_elem, spec.choice)
return elem
def deserialize_choose(self, wf_spec, elem, cls=Choose, **kwargs):
choice = self.deserialize_value_list(elem.find('choice'))
context = elem.findtext('context')
return self.deserialize_task_spec(wf_spec, elem, cls, choice=choice,
context=context, **kwargs)
def serialize_exclusive_choice(self, spec, elem=None):
if elem is None:
elem = etree.Element('exclusive-choice')
self.serialize_multi_choice(spec, elem)
SubElement(elem, 'default_task_spec').text = spec.default_task_spec
return elem
def deserialize_exclusive_choice(self, wf_spec, elem, cls=ExclusiveChoice,
**kwargs):
spec = self.deserialize_multi_choice(wf_spec, elem, cls, **kwargs)
spec.default_task_spec = elem.findtext('default_task_spec')
return spec
def serialize_execute(self, spec, elem=None):
if elem is None:
elem = etree.Element('execute')
self.serialize_value_list(SubElement(elem, 'args'), spec.args)
return self.serialize_task_spec(spec, elem)
def deserialize_execute(self, wf_spec, elem, cls=Execute, **kwargs):
args = self.deserialize_value_list(elem.find('args'))
return self.deserialize_task_spec(wf_spec, elem, cls, args=args,
**kwargs)
def serialize_gate(self, spec, elem=None):
if elem is None:
elem = etree.Element('gate')
SubElement(elem, 'context').text = spec.context
return self.serialize_task_spec(spec, elem)
def deserialize_gate(self, wf_spec, elem, cls=Gate, **kwargs):
context = elem.findtext('context')
return self.deserialize_task_spec(wf_spec, elem, cls, context=context,
**kwargs)
def serialize_join(self, spec, elem=None):
if elem is None:
elem = etree.Element('join')
if spec.split_task:
SubElement(elem, 'split-task').text = spec.split_task
if spec.threshold is not None:
self.serialize_value(SubElement(elem, 'threshold'), spec.threshold)
if spec.cancel_remaining:
SubElement(elem, 'cancel-remaining')
return self.serialize_task_spec(spec, elem)
def deserialize_join(self, wf_spec, elem, cls=Join, **kwargs):
split_task = elem.findtext('split-task')
if elem.find('threshold') is None:
threshold = None
else:
threshold = self.deserialize_value(elem.find('threshold'))
cancel = elem.find('cancel-remaining') is not None
return self.deserialize_task_spec(wf_spec, elem, cls,
split_task=split_task,
threshold=threshold,
cancel=cancel,
**kwargs)
def serialize_multi_choice(self, spec, elem=None):
if elem is None:
elem = etree.Element('multi-choice')
if spec.choice:
self.serialize_value_list(SubElement(elem, 'choice'), spec.choice)
options = SubElement(elem, 'options')
for condition, spec_name in spec.cond_task_specs:
option_elem = SubElement(options, 'option')
if condition is not None:
cond_elem = SubElement(option_elem, 'condition')
cond_elem.append(condition.serialize(self))
SubElement(option_elem, 'output').text = spec_name
return self.serialize_task_spec(spec, elem)
def deserialize_multi_choice(self, wf_spec, elem, cls=MultiChoice,
**kwargs):
spec = self.deserialize_task_spec(wf_spec, elem, cls, **kwargs)
if elem.find('choice') is not None:
spec.choice = self.deserialize_value_list(elem.find('choice'))
if elem.find('options') is not None:
for option_elem in elem.find('options'):
condition_elem = option_elem.find('condition')
if condition_elem is not None:
condition = self.deserialize_operator(condition_elem[0])
else:
condition = None
spec_name = option_elem.findtext('output')
spec.cond_task_specs.append((condition, spec_name))
return spec
def serialize_multi_instance(self, spec):
elem = etree.Element('multi-instance')
self.serialize_value(SubElement(elem, 'times'), spec.times)
return self.serialize_task_spec(spec, elem)
def deserialize_multi_instance(self, wf_spec, elem, cls=None,
**kwargs):
if cls is None:
cls = MultiInstance
#cls = MultiInstance(wf_spec,elem.find('name'),elem.find('times'))
times = self.deserialize_value(elem.find('times'))
return self.deserialize_task_spec(wf_spec, elem, cls, times=times,
**kwargs)
def serialize_release_mutex(self, spec):
elem = etree.Element('release-mutex')
SubElement(elem, 'mutex').text = spec.mutex
return self.serialize_task_spec(spec, elem)
def deserialize_release_mutex(self, wf_spec, elem, cls=ReleaseMutex,
**kwargs):
mutex = elem.findtext('mutex')
return self.deserialize_task_spec(wf_spec, elem, cls, mutex=mutex,
**kwargs)
def serialize_simple(self, spec):
elem = etree.Element('simple')
return self.serialize_task_spec(spec, elem)
def deserialize_simple(self, wf_spec, elem, cls=Simple, **kwargs):
return self.deserialize_task_spec(wf_spec, elem, cls, **kwargs)
def serialize_start_task(self, spec):
elem = etree.Element('start-task')
return self.serialize_task_spec(spec, elem)
def deserialize_start_task(self, wf_spec, elem, cls=StartTask, **kwargs):
return self.deserialize_task_spec(wf_spec, elem, cls, **kwargs)
def serialize_sub_workflow(self, spec):
warnings.warn("SubWorkflows cannot be safely serialized as they only" +
" store a reference to the subworkflow specification " +
" as a path to an external XML file.")
elem = etree.Element('sub-workflow')
SubElement(elem, 'filename').text = spec.file
in_elem = SubElement(elem, 'in-assign')
self.serialize_value_list(in_elem, spec.in_assign)
out_elem = SubElement(elem, 'out-assign')
self.serialize_value_list(out_elem, spec.out_assign)
return self.serialize_task_spec(spec, elem)
def deserialize_sub_workflow(self, wf_spec, elem, cls=SubWorkflow,
**kwargs):
warnings.warn("SubWorkflows cannot be safely deserialized as they " +
"only store a reference to the subworkflow " +
"specification as a path to an external XML file.")
filename = elem.findtext('filename')
in_elem = elem.find('in-assign')
in_assign = self.deserialize_value_list(in_elem)
out_elem = elem.find('out-assign')
out_assign = self.deserialize_value_list(out_elem)
return self.deserialize_task_spec(wf_spec, elem, cls, file=filename,
in_assign=in_assign,
out_assign=out_assign, **kwargs)
def serialize_thread_merge(self, spec, elem=None):
if elem is None:
elem = etree.Element('thread-merge')
return self.serialize_join(spec, elem)
def deserialize_thread_merge(self, wf_spec, elem, cls=ThreadMerge,
**kwargs):
return self.deserialize_join(wf_spec, elem, cls, **kwargs)
def serialize_thread_split(self, spec, elem=None):
if elem is None:
elem = etree.Element('thread-split')
self.serialize_value(SubElement(elem, 'times'), spec.times)
return self.serialize_task_spec(spec, elem)
def deserialize_thread_split(self, wf_spec, elem, cls=ThreadSplit,
**kwargs):
times_elem = elem.find('times')
if times_elem is not None:
times = self.deserialize_value(times_elem)
else:
times = 1
return self.deserialize_task_spec(wf_spec, elem, cls, times=times,
suppress_threadstart_creation=True,
**kwargs)
def serialize_thread_start(self, spec, elem=None):
if elem is None:
elem = etree.Element('thread-start')
return self.serialize_task_spec(spec, elem)
def deserialize_thread_start(self, wf_spec, elem, cls=ThreadStart,
**kwargs):
return self.deserialize_task_spec(wf_spec, elem, cls, **kwargs)
def serialize_merge(self, spec, elem=None):
if elem is None:
elem = etree.Element('merge')
SubElement(elem, 'split-task').text = spec.split_task
return self.serialize_task_spec(spec, elem)
def deserialize_merge(self, wf_spec, elem, cls=Merge, **kwargs):
split_task = elem.findtext('split-task')
return self.deserialize_task_spec(wf_spec, elem, cls,
split_task=split_task, **kwargs)
def serialize_trigger(self, spec, elem=None):
if elem is None:
elem = etree.Element('trigger')
self.serialize_value_list(SubElement(elem, 'context'), spec.context)
self.serialize_value(SubElement(elem, 'times'), spec.times)
SubElement(elem, 'queued').text = str(spec.queued)
return self.serialize_task_spec(spec, elem)
def deserialize_trigger(self, wf_spec, elem, cls=Trigger, **kwargs):
context = self.deserialize_value_list(elem.find('context'))
times = self.deserialize_value(elem.find('times'))
spec = self.deserialize_task_spec(wf_spec, elem, cls, context=context,
times=times, **kwargs)
try:
spec.queued = int(elem.findtext('queued'))
except ValueError:
pass
return spec
def serialize_workflow_spec(self, spec, **kwargs):
wf_elem = etree.Element('workflow')
SubElement(wf_elem, 'name').text = spec.name
SubElement(wf_elem, 'description').text = spec.description
if spec.file:
SubElement(wf_elem, 'filename').text = spec.file
tasks_elem = SubElement(wf_elem, 'tasks')
for task_name, task_spec in sorted(spec.task_specs.items()):
tasks_elem.append(task_spec.serialize(self))
return wf_elem
def deserialize_workflow_spec(self, elem, **kwargs):
name = elem.findtext('name')
filename = elem.findtext('filename')
spec = WorkflowSpec(name, filename=filename)
spec.description = elem.findtext('description')
# Add all tasks.
tasks_elem = elem.find('tasks')
for task_elem in tasks_elem:
cls = _spec_map[task_elem.tag]
task_spec = cls.deserialize(self, spec, task_elem)
spec.task_specs[task_spec.name] = task_spec
spec.start = spec.task_specs['Start']
return spec
def serialize_workflow(self, workflow, **kwargs):
assert isinstance(workflow, Workflow)
elem = etree.Element('workflow')
wf_spec_elem = self.serialize_workflow_spec(workflow.spec)
wf_spec_elem.tag = 'spec'
elem.append(wf_spec_elem)
data_elem = SubElement(elem, 'data')
self.serialize_value_map(data_elem, workflow.data)
if workflow.last_task is not None:
SubElement(elem, 'last-task').text = str(workflow.last_task.id)
if workflow.success:
SubElement(elem, 'success')
task_tree_elem = SubElement(elem, 'task-tree')
task_tree_elem.append(self.serialize_task(workflow.task_tree))
return elem
def deserialize_workflow(self, elem, **kwargs):
wf_spec_elem = elem.find('spec')
wf_spec = self.deserialize_workflow_spec(wf_spec_elem, **kwargs)
workflow = Workflow(wf_spec)
workflow.data = self.deserialize_value_map(elem.find('data'))
workflow.success = elem.find('success') is not None
task_tree_elem = elem.find('task-tree')
workflow.task_tree = self.deserialize_task(workflow, task_tree_elem[0])
# last_task
last_task = elem.findtext('last-task')
if last_task is not None:
workflow.last_task = workflow.get_task_from_id(last_task)
return workflow
def serialize_task(self, task, skip_children=False):
assert isinstance(task, Task)
if isinstance(task.task_spec, SubWorkflow):
raise TaskNotSupportedError(
"Subworkflow tasks cannot be serialized (due to their use of" +
" internal_data to store the subworkflow).")
# We are not serializing task.workflow; the deserializer accepts
# an already-deserialized Workflow instead.
elem = etree.Element('task')
if task.id is not None:
SubElement(elem, 'id').text = str(task.id)
if task.parent is not None:
SubElement(elem, 'parent').text = str(task.parent.id)
if not skip_children:
children_elem = SubElement(elem, 'children')
for child in task.children:
child_elem = self.serialize_task(child)
children_elem.append(child_elem)
SubElement(elem, 'state').text = TaskState.get_name(task._state)
if task.triggered:
SubElement(elem, 'triggered')
SubElement(elem, 'spec').text = task.task_spec.name
SubElement(elem, 'last-state-change').text = str(
task.last_state_change)
self.serialize_value_map(SubElement(elem, 'data'), task.data)
internal_data_elem = SubElement(elem, 'internal-data')
self.serialize_value_map(internal_data_elem, task.internal_data)
return elem
def deserialize_task(self, workflow, elem):
assert isinstance(workflow, Workflow)
task_spec_name = elem.findtext('spec')
task_spec = workflow.spec.get_task_spec_from_name(task_spec_name)
task_id = elem.findtext('id')
if task_id is not None:
task_id = UUID(task_id)
# Deserialization is done by traversing the tree, the parent should already exist
# when children are deserialized
parent_id = elem.findtext('parent')
parent = workflow.tasks[UUID(parent_id)] if parent_id is not None else None
task = Task(workflow, task_spec, parent, id=task_id)
for child_elem in elem.find('children'):
child_task = self.deserialize_task(workflow, child_elem)
task.children.append(child_task)
state_name = elem.findtext('state')
state_value = TaskState.get_value(state_name)
assert state_value is not None
task._state = state_value
task.triggered = elem.find('triggered') is not None
task.last_state_change = float(elem.findtext('last-state-change'))
task.data = self.deserialize_value_map(elem.find('data'))
internal_data_elem = elem.find('internal-data')
task.internal_data = self.deserialize_value_map(internal_data_elem)
return task
| 28,931 | Python | .py | 615 | 36.078049 | 99 | 0.612774 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,036 | __init__.py | sartography_SpiffWorkflow/SpiffWorkflow/serializer/__init__.py | # This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA | 770 | Python | .py | 16 | 47.1875 | 69 | 0.788079 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,037 | base.py | sartography_SpiffWorkflow/SpiffWorkflow/serializer/base.py | # This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .. import operators
from ..specs.AcquireMutex import AcquireMutex
from ..specs.Cancel import Cancel
from ..specs.CancelTask import CancelTask
from ..specs.Choose import Choose
from ..specs.ExclusiveChoice import ExclusiveChoice
from ..specs.Execute import Execute
from ..specs.Gate import Gate
from ..specs.Join import Join
from ..specs.Merge import Merge
from ..specs.MultiChoice import MultiChoice
from ..specs.MultiInstance import MultiInstance
from ..specs.ReleaseMutex import ReleaseMutex
from ..specs.Simple import Simple
from ..specs.StartTask import StartTask
from ..specs.SubWorkflow import SubWorkflow
from ..specs.ThreadStart import ThreadStart
from ..specs.ThreadMerge import ThreadMerge
from ..specs.ThreadSplit import ThreadSplit
from ..specs.Transform import Transform
from ..specs.Trigger import Trigger
from ..specs.WorkflowSpec import WorkflowSpec
# Create a list of tag names out of the spec names.
def spec_map():
return {
'acquire-mutex': AcquireMutex,
'cancel': Cancel,
'cancel-task': CancelTask,
'choose': Choose,
'exclusive-choice': ExclusiveChoice,
'execute': Execute,
'gate': Gate,
'join': Join,
'merge': Merge,
'multi-choice': MultiChoice,
'multi-instance': MultiInstance,
'release-mutex': ReleaseMutex,
'simple': Simple,
'start-task': StartTask,
'sub-workflow': SubWorkflow,
'thread-start': ThreadStart,
'thread-merge': ThreadMerge,
'thread-split': ThreadSplit,
'transform': Transform,
'trigger': Trigger,
'workflow-spec': WorkflowSpec,
'task': Simple,
}
def op_map():
return {
'equals': operators.Equal,
'not-equals': operators.NotEqual,
'less-than': operators.LessThan,
'greater-than': operators.GreaterThan,
'matches': operators.Match
}
class Serializer(object):
def serialize_workflow_spec(self, wf_spec, **kwargs):
raise NotImplementedError(
"You must implement the serialize_workflow_spec method.")
def deserialize_workflow_spec(self, s_state, **kwargs):
raise NotImplementedError(
"You must implement the deserialize_workflow_spec method.")
def serialize_workflow(self, workflow, **kwargs):
raise NotImplementedError(
"You must implement the serialize_workflow method.")
def deserialize_workflow(self, s_state, **kwargs):
raise NotImplementedError(
"You must implement the deserialize_workflow method.")
| 3,368 | Python | .py | 85 | 34.658824 | 71 | 0.718693 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,038 | dotv.py | sartography_SpiffWorkflow/SpiffWorkflow/serializer/dotv.py | # This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
# requires: https://github.com/stricaud/gvgen
import gvgen
from .base import Serializer
class dotVisualizer(Serializer):
def serialize_workflow_spec(self, wf_spec):
nodes = set()
linked = set()
graph = gvgen.GvGen()
parent = graph.newItem("Workflow")
# these built in shapes are available:
# http://www.graphviz.org/doc/info/shapes.html
graph.styleAppend("Cancel", "shape", "oval")
graph.styleAppend("CancelTask", "shape", "oval")
graph.styleAppend("Choose", "shape", "diamond")
graph.styleAppend("ExclusiveChoice", "shape", "diamond")
graph.styleAppend("Execute", "shape", "rect")
graph.styleAppend("Gate", "shape", "trapezium")
graph.styleAppend("Join", "shape", "invtriangle")
graph.styleAppend("Merge", "shape", "invtriangle")
graph.styleAppend("MultiChoice", "shape", "diamond")
graph.styleAppend("MultiInstance", "shape", "box")
graph.styleAppend("ReleaseMutex", "shape", "diamond")
graph.styleAppend("Simple", "shape", "rect")
graph.styleAppend("StartTask", "shape", "oval")
graph.styleAppend("SubWorkflow", "shape", "invhouse")
graph.styleAppend("ThreadMerge", "shape", "invtriangle")
graph.styleAppend("ThreadSplit", "shape", "triangle")
graph.styleAppend("ThreadStart", "shape", "oval")
graph.styleAppend("Transform", "shape", "rect")
graph.styleAppend("Trigger", "shape", "oval")
# build graph with all the nodes first
def recurisvelyAddNodes(task_spec):
if task_spec in nodes:
return
task_spec.gv = graph.newItem(task_spec.name, parent)
# add a default style for this class so that if we don't have one
# when we apply it doesn't break the GvGen library
graph.styleAppend(task_spec.__class__.__name__, "ignore", "this")
graph.styleApply(task_spec.__class__.__name__, task_spec.gv)
nodes.add(task_spec)
sub_specs = ([task_spec.spec.start] if hasattr(
task_spec, 'spec') else []) + task_spec.outputs
for t in sub_specs:
recurisvelyAddNodes(t)
# then link all the nodes together
def recursive_linking(task_spec):
if task_spec in linked:
return
linked.add(task_spec)
sub_specs = ([task_spec.spec.start] if hasattr(
task_spec, 'spec') else []) + task_spec.outputs
for i, t in enumerate(sub_specs):
graph.newLink(task_spec.gv, t.gv)
recursive_linking(t)
recurisvelyAddNodes(wf_spec.start)
recursive_linking(wf_spec.start)
return (graph.dot() if graph.dot() else '')
| 3,696 | Python | .py | 73 | 42.287671 | 77 | 0.625173 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,039 | exceptions.py | sartography_SpiffWorkflow/SpiffWorkflow/serializer/exceptions.py | # This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
class TaskSpecNotSupportedError(ValueError):
pass
class TaskNotSupportedError(ValueError):
pass
class MissingSpecError(ValueError):
pass
| 925 | Python | .py | 22 | 40.272727 | 69 | 0.7951 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,040 | json.py | sartography_SpiffWorkflow/SpiffWorkflow/serializer/json.py | # This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import json
import uuid
from ..operators import Attrib
from .dict import DictionarySerializer
class JSONSerializer(DictionarySerializer):
def serialize_workflow_spec(self, wf_spec, **kwargs):
thedict = super(JSONSerializer, self).serialize_workflow_spec(wf_spec, **kwargs)
return self._dumps(thedict)
def deserialize_workflow_spec(self, s_state, **kwargs):
thedict = self._loads(s_state)
return super(JSONSerializer, self).deserialize_workflow_spec(thedict, **kwargs)
def serialize_workflow(self, workflow, **kwargs):
thedict = super(JSONSerializer, self).serialize_workflow(workflow, **kwargs)
return self._dumps(thedict)
def deserialize_workflow(self, s_state, **kwargs):
thedict = self._loads(s_state)
return super(JSONSerializer, self).deserialize_workflow(thedict, **kwargs)
def _object_hook(self, dct):
if '__uuid__' in dct:
return uuid.UUID(dct['__uuid__'])
if '__bytes__' in dct:
return dct['__bytes__'].encode('ascii')
if '__attrib__' in dct:
return Attrib(dct['__attrib__'])
return dct
def _default(self, obj):
if isinstance(obj, uuid.UUID):
return {'__uuid__': obj.hex}
if isinstance(obj, bytes):
return {'__bytes__': obj.decode('ascii')}
if isinstance(obj, Attrib):
return {'__attrib__': obj.name}
raise TypeError('%r is not JSON serializable' % obj)
def _loads(self, text):
return json.loads(text, object_hook=lambda o: self._object_hook(o))
def _dumps(self, dct):
return json.dumps(dct, sort_keys=True, default=lambda o:
self._default(o))
| 2,504 | Python | .py | 54 | 40.185185 | 88 | 0.681594 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,041 | workflow.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/workflow.py | # Copyright (C) 2012 Matthew Hampton, 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.task import Task
from SpiffWorkflow.util.task import TaskState
from SpiffWorkflow.exceptions import WorkflowException
from SpiffWorkflow.bpmn.specs.mixins.events.event_types import CatchingEvent
from SpiffWorkflow.bpmn.specs.mixins.events.start_event import StartEvent
from SpiffWorkflow.bpmn.specs.mixins.subworkflow_task import CallActivity
from SpiffWorkflow.bpmn.specs.event_definitions.item_aware_event import CodeEventDefinition
from SpiffWorkflow.bpmn.specs.control import BoundaryEventSplit
from SpiffWorkflow.bpmn.util.subworkflow import BpmnBaseWorkflow, BpmnSubWorkflow
from .script_engine.python_engine import PythonScriptEngine
class BpmnWorkflow(BpmnBaseWorkflow):
"""
The engine that executes a BPMN workflow. This specialises the standard
Spiff Workflow class with a few extra methods and attributes.
"""
def __init__(self, spec, subprocess_specs=None, script_engine=None, **kwargs):
"""
Constructor.
:param script_engine: set to an extension of PythonScriptEngine if you
need a specialised version. Defaults to the script engine of the top
most workflow, or to the PythonScriptEngine if none is provided.
"""
self.subprocess_specs = subprocess_specs or {}
self.subprocesses = {}
self.bpmn_events = []
self.correlations = {}
super(BpmnWorkflow, self).__init__(spec, **kwargs)
self.__script_engine = script_engine or PythonScriptEngine()
@property
def script_engine(self):
return self.__script_engine
@script_engine.setter
def script_engine(self, engine):
self.__script_engine = engine
@property
def top_workflow(self):
return self
@property
def parent_task_id(self):
return None
@property
def parent_workflow(self):
return None
@property
def depth(self):
return 0
def create_subprocess(self, my_task, spec_name):
# This creates a subprocess for an existing task
subprocess = BpmnSubWorkflow(
self.subprocess_specs[spec_name],
parent_task_id=my_task.id,
top_workflow=self)
self.subprocesses[my_task.id] = subprocess
return subprocess
def get_subprocess(self, my_task):
return self.subprocesses.get(my_task.id)
def delete_subprocess(self, my_task):
subprocess = self.subprocesses.get(my_task.id)
tasks = subprocess.get_tasks()
for sp in [c for c in self.subprocesses.values() if c.parent_workflow == subprocess]:
tasks.extend(self.delete_subprocess(self.get_task_from_id(sp.parent_task_id)))
del self.subprocesses[my_task.id]
return tasks
def get_active_subprocesses(self):
return [sp for sp in self.subprocesses.values() if not sp.completed]
def catch(self, event):
"""
Tasks can always catch events, regardless of their state. The event information is stored in the task's
internal data and processed when the task is reached in the workflow. If a task should only receive messages
while it is running (eg a boundary event), the task should call the event_definition's reset method before
executing to clear out a stale message.
:param event: the thrown event
"""
if event.target is not None:
# This limits results to tasks in the specified workflow
tasks = event.target.get_tasks(skip_subprocesses=True, state=TaskState.NOT_FINISHED_MASK, catches_event=event)
if isinstance(event.event_definition, CodeEventDefinition) and len(tasks) == 0:
event.target = event.target.parent_workflow
self.catch(event)
else:
self.update_collaboration(event)
tasks = self.get_tasks(state=TaskState.NOT_FINISHED_MASK, catches_event=event)
# Figure out if we need to create an external event
if len(tasks) == 0:
self.bpmn_events.append(event)
for task in tasks:
task.task_spec.catch(task, event)
if len(tasks) > 0:
self.refresh_waiting_tasks()
def send_event(self, event):
"""Allows this workflow to catch an externally generated event."""
if event.target is not None:
self.catch(event)
else:
tasks = self.get_tasks(state=TaskState.NOT_FINISHED_MASK, catches_event=event)
if len(tasks) == 0:
raise WorkflowException(f"This process is not waiting for {event.event_definition.name}")
for task in tasks:
task.task_spec.catch(task, event)
self.refresh_waiting_tasks()
def get_events(self):
"""Returns the list of events that cannot be handled from within this workflow."""
events = self.bpmn_events
self.bpmn_events = []
return events
def waiting_events(self):
iter = self.get_tasks_iterator(state=TaskState.WAITING, spec_class=CatchingEvent)
return [t.task_spec.event_definition.details(t) for t in iter]
def do_engine_steps(self, will_complete_task=None, did_complete_task=None):
"""
Execute any READY tasks that are engine specific (for example, gateways
or script tasks). This is done in a loop, so it will keep completing
those tasks until there are only READY User tasks, or WAITING tasks
left.
:param will_complete_task: Callback that will be called prior to completing a task
:param did_complete_task: Callback that will be called after completing a task
"""
def update_workflow(wf):
count = 0
# Wanted to use the iterator method here, but at least this is a shorter list
for task in wf.get_tasks(state=TaskState.READY):
if not task.task_spec.manual:
if will_complete_task is not None:
will_complete_task(task)
task.run()
count += 1
if did_complete_task is not None:
did_complete_task(task)
return count
active_subprocesses = self.get_active_subprocesses()
for subprocess in sorted(active_subprocesses, key=lambda v: v.depth, reverse=True):
count = None
while count is None or count > 0:
count = update_workflow(subprocess)
if subprocess.parent_task_id is not None:
task = self.get_task_from_id(subprocess.parent_task_id)
task.task_spec._update(task)
count = update_workflow(self)
if count > 0 or len(self.get_active_subprocesses()) > len(active_subprocesses):
self.do_engine_steps(will_complete_task, did_complete_task)
def refresh_waiting_tasks(self, will_refresh_task=None, did_refresh_task=None):
"""
Refresh the state of all WAITING tasks. This will, for example, update
Catching Timer Events whose waiting time has passed.
:param will_refresh_task: Callback that will be called prior to refreshing a task
:param did_refresh_task: Callback that will be called after refreshing a task
"""
def update_task(task):
if will_refresh_task is not None:
will_refresh_task(task)
task.task_spec._update(task)
if did_refresh_task is not None:
did_refresh_task(task)
for subprocess in sorted(self.get_active_subprocesses(), key=lambda v: v.depth, reverse=True):
for task in subprocess.get_tasks_iterator(skip_subprocesses=True, state=TaskState.WAITING):
update_task(task)
for task in self.get_tasks_iterator(skip_subprocesses=True, state=TaskState.WAITING):
update_task(task)
def get_task_from_id(self, task_id):
if task_id not in self.tasks:
for subprocess in self.subprocesses.values():
task = subprocess.get_task_from_id(task_id)
if task is not None:
return task
return super().get_task_from_id(task_id)
def reset_from_task_id(self, task_id, data=None, remove_subprocess=True):
task = self.get_task_from_id(task_id)
# Since recursive deletion of subprocesses requires access to the tasks, we have to delete any subprocesses first
# We also need diffeent behavior for the case where we explictly reset to a subprocess (in which case we delete it)
# vs resetting inside (where we leave it and reset the tasks that descend from it)
descendants = []
# If we're resetting to a boundary event, we also have to delete subprocesses underneath the attached events
top = task if not isinstance(task.parent.task_spec, BoundaryEventSplit) else task.parent
for desc in filter(lambda t: t.id in self.subprocesses, top):
if desc != task or remove_subprocess:
descendants.extend(self.delete_subprocess(desc))
# This resets the boundary event branches
if isinstance(task.parent.task_spec, BoundaryEventSplit):
for child in task.parent.children:
descendants.extend(super().reset_from_task_id(child.id, data if child == task else None))
else:
descendants.extend(super().reset_from_task_id(task.id, data))
if task.workflow.parent_task_id is not None:
sp_task = self.get_task_from_id(task.workflow.parent_task_id)
descendants.extend(self.reset_from_task_id(sp_task.id, remove_subprocess=False))
sp_task._set_state(TaskState.STARTED)
return descendants
def cancel(self, workflow=None):
wf = workflow or self
cancelled = BpmnBaseWorkflow.cancel(wf)
cancelled_ids = [t.id for t in cancelled]
to_cancel = []
for sp_id, sp in self.subprocesses.items():
if sp_id in cancelled_ids:
to_cancel.append(sp)
for sp in to_cancel:
cancelled.extend(self.cancel(sp))
return cancelled
def update_collaboration(self, event):
def get_or_create_subprocess(task_spec, wf_spec):
for sp in self.subprocesses.values():
if sp.get_next_task(state=TaskState.WAITING, spec_name=task_spec.name) is not None:
return sp
# This creates a new task associated with a process when an event that kicks of a process is received
# I need to know what class is being used to create new processes in this case, and this seems slightly
# less bad than adding yet another argument. Still sucks though.
# TODO: Make collaborations a class rather than trying to shoehorn them into a process.
for spec in self.spec.task_specs.values():
if isinstance(spec, CallActivity):
spec_class = spec.__class__
break
else:
# Default to the mixin class, which will probably fail in many cases.
spec_class = CallActivity
new = spec_class(self.spec, f'{wf_spec.name}_{len(self.subprocesses)}', wf_spec.name)
self.spec.start.connect(new)
task = Task(self, new, parent=self.task_tree)
# This (indirectly) calls create_subprocess
task.task_spec._update(task)
return self.subprocesses[task.id]
# Start a subprocess for known specs with start events that catch this
for spec in self.subprocess_specs.values():
for task_spec in spec.task_specs.values():
if isinstance(task_spec, StartEvent) and task_spec.event_definition == event.event_definition:
subprocess = get_or_create_subprocess(task_spec, spec)
subprocess.correlations.update(event.correlations)
| 12,826 | Python | .py | 244 | 42.5 | 123 | 0.662008 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,042 | __init__.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/__init__.py | # Copyright (C) 2012 Matthew Hampton, 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .workflow import BpmnWorkflow
from .util import BpmnEvent | 891 | Python | .py | 20 | 43.55 | 69 | 0.793341 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,043 | exceptions.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/exceptions.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import re
from SpiffWorkflow.util import levenshtein
from SpiffWorkflow.exceptions import WorkflowException
class WorkflowTaskException(WorkflowException):
"""WorkflowException that provides task_trace information."""
def __init__(self, error_msg, task=None, exception=None, line_number=None, offset=None, error_line=None):
"""
Exception initialization.
:param task: the task that threw the exception
:type task: Task
:param error_msg: a human readable error message
:type error_msg: str
:param exception: an exception to wrap, if any
:type exception: Exception
"""
self.task = task
self.line_number = line_number
self.offset = offset
self.error_line = error_line
if exception:
self.error_type = exception.__class__.__name__
else:
self.error_type = "unknown"
super().__init__(error_msg, task_spec=task.task_spec)
if isinstance(exception, SyntaxError) and not line_number:
# Line number and offset can be recovered directly from syntax errors,
# otherwise they must be passed in.
self.line_number = exception.lineno
self.offset = exception.offset
elif isinstance(exception, NameError):
self.add_note(self.did_you_mean_from_name_error(exception, list(task.data.keys())))
# If encountered in a sub-workflow, this traces back up the stack,
# so we can tell how we got to this particular task, no matter how
# deeply nested in sub-workflows it is. Takes the form of:
# task-description (file-name)
self.task_trace = self.get_task_trace(task)
@staticmethod
def get_task_trace(task):
task_trace = [f"{task.task_spec.bpmn_name} ({task.workflow.spec.file})"]
top = task.workflow.top_workflow
parent = None if task.workflow is top else task.workflow.parent_workflow
# cap the iterations to ensure we do not infinitely loop and tie up all CPU's
max_iterations = 1000
iteration = 0
caller = task
while parent is not None:
if iteration > max_iterations:
raise WorkflowException(
f"Could not find full task trace after {max_iterations} iterations.",
task_spec=task.task_spec,
)
caller = parent.get_task_from_id(caller.workflow.parent_task_id)
task_trace.append(f"{caller.task_spec.bpmn_name} ({parent.spec.file})")
parent = None if caller.workflow is top else caller.workflow.parent_workflow
iteration += 1
return task_trace
@staticmethod
def did_you_mean_from_name_error(name_exception, options):
"""Returns a string along the lines of 'did you mean 'dog'? Given
a name_error, and a set of possible things that could have been called,
or an empty string if no valid suggestions come up. """
def_match = re.match("name '(.+)' is not defined", str(name_exception))
if def_match:
bad_variable = re.match("name '(.+)' is not defined", str(name_exception)).group(1)
most_similar = levenshtein.most_similar(bad_variable, options, 3)
error_msg = ""
if len(most_similar) == 1:
error_msg += f' Did you mean \'{most_similar[0]}\'?'
if len(most_similar) > 1:
error_msg += f' Did you mean one of \'{most_similar}\'?'
return error_msg
class WorkflowDataException(WorkflowTaskException):
def __init__(self, message, task, data_input=None, data_output=None):
"""
:param task: the task that generated the error
:param data_input: the spec of the input variable (if a data input)
:param data_output: the spec of the output variable (if a data output)
"""
super().__init__(message, task)
self.data_input = data_input
self.data_output = data_output
| 4,845 | Python | .py | 99 | 40.656566 | 109 | 0.658145 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,044 | util.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/parser/util.py | # Copyright (C) 2012 Matthew Hampton
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
BPMN_MODEL_NS = 'http://www.omg.org/spec/BPMN/20100524/MODEL'
DIAG_INTERCHANGE_NS = "http://www.omg.org/spec/BPMN/20100524/DI"
DIAG_COMMON_NS = "http://www.omg.org/spec/DD/20100524/DC"
DEFAULT_NSMAP = {
'bpmn': BPMN_MODEL_NS,
'dc': DIAG_COMMON_NS,
'bpmndi': DIAG_INTERCHANGE_NS,
}
def one(nodes, or_none=False):
"""
Assert that there is exactly one node in the give list, and return it.
"""
if not nodes and or_none:
return None
assert len(nodes) == 1, 'Expected 1 result. Received %d results.' % (len(nodes))
return nodes[0]
def first(nodes):
"""
Return the first node in the given list, or None, if the list is empty.
"""
if len(nodes) >= 1:
return nodes[0]
else:
return None
def xpath_eval(node, extra_ns=None):
"""
Returns an XPathEvaluator, with namespace prefixes 'bpmn' for
http://www.omg.org/spec/BPMN/20100524/MODEL, and additional specified ones
"""
namespaces = DEFAULT_NSMAP.copy()
if extra_ns:
namespaces.update(extra_ns)
return lambda path: node.xpath(path, namespaces=namespaces)
def full_tag(tag):
"""
Return the full tag name including namespace for the given BPMN tag. In
other words, the name with namespace
http://www.omg.org/spec/BPMN/20100524/MODEL
"""
return '{%s}%s' % (BPMN_MODEL_NS, tag)
| 2,178 | Python | .py | 58 | 33.896552 | 84 | 0.711776 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,045 | spec_description.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/parser/spec_description.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .util import full_tag
# Having this configurable via the parser makes a lot more sense than requiring a subclass
# This can be further streamlined if we ever replace our parser
SPEC_DESCRIPTIONS = {
full_tag('startEvent'): 'Start Event',
full_tag('endEvent'): 'End Event',
full_tag('userTask'): 'User Task',
full_tag('task'): 'Task',
full_tag('subProcess'): 'Subprocess',
full_tag('manualTask'): 'Manual Task',
full_tag('exclusiveGateway'): 'Exclusive Gateway',
full_tag('parallelGateway'): 'Parallel Gateway',
full_tag('inclusiveGateway'): 'Inclusive Gateway',
full_tag('callActivity'): 'Call Activity',
full_tag('transaction'): 'Transaction',
full_tag('scriptTask'): 'Script Task',
full_tag('serviceTask'): 'Service Task',
full_tag('intermediateCatchEvent'): 'Intermediate Catch Event',
full_tag('intermediateThrowEvent'): 'Intermediate Throw Event',
full_tag('boundaryEvent'): 'Boundary Event',
full_tag('receiveTask'): 'Receive Task',
full_tag('sendTask'): 'Send Task',
full_tag('eventBasedGateway'): 'Event Based Gateway',
full_tag('cancelEventDefinition'): 'Cancel',
full_tag('errorEventDefinition'): 'Error',
full_tag('escalationEventDefinition'): 'Escalation',
full_tag('terminateEventDefinition'): 'Terminate',
full_tag('messageEventDefinition'): 'Message',
full_tag('signalEventDefinition'): 'Signal',
full_tag('timerEventDefinition'): 'Timer',
full_tag('conditionalEventDefinition'): 'Conditional',
}
| 2,334 | Python | .py | 50 | 43.46 | 90 | 0.735204 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,046 | task_parsers.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/parser/task_parsers.py | # Copyright (C) 2012 Matthew Hampton, 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .ValidationException import ValidationException
from .TaskParser import TaskParser
from .util import one
class GatewayParser(TaskParser):
def handles_multiple_outgoing(self):
return True
class ConditionalGatewayParser(GatewayParser):
"""
Parses an Exclusive Gateway, setting up the outgoing conditions
appropriately.
"""
def connect_outgoing(self, outgoing_task, sequence_flow_node, is_default):
if is_default:
super().connect_outgoing(outgoing_task, sequence_flow_node, is_default)
else:
cond = self.parse_condition(sequence_flow_node)
if cond is None:
raise ValidationException(
'Non-default exclusive outgoing sequence flow without condition',
sequence_flow_node,
self.filename)
self.task.connect_outgoing_if(cond, outgoing_task)
class SubprocessParser:
# Not really a fan of this, but I need a way of calling these methods from a task
# parser that extends the base parser to override extension parsing. I can't inherit
# from my extended task parser AND the original subworkflow parsers because they
# both inherit from the same base.
@staticmethod
def get_subprocess_spec(task_parser):
workflow_start_event = task_parser.xpath('./bpmn:startEvent')
workflow_end_event = task_parser.xpath('./bpmn:endEvent')
if len(workflow_start_event) != 1:
raise ValidationException(
f'Exactly one start event is required in a SubWorkflow Task; found {len(workflow_start_event)}.',
node=task_parser.node,
file_name=task_parser.filename)
if len(workflow_end_event) == 0:
raise ValidationException('A SubWorkflow Must contain an End event',
node=task_parser.node,
file_name=task_parser.filename)
task_parser.process_parser.parser.create_parser(
task_parser.node,
filename=task_parser.filename,
lane=task_parser.lane
)
spec_id = task_parser.node.get('id')
# This parser makes me want to cry
spec_parser = task_parser.process_parser.parser.process_parsers[spec_id]
spec_parser.parent = task_parser.process_parser
return spec_id
@staticmethod
def get_call_activity_spec(task_parser):
called_element = task_parser.node.get('calledElement', None)
if not called_element:
raise ValidationException(
'No "calledElement" attribute for Call Activity.',
node=task_parser.node,
file_name=task_parser.filename)
return called_element
class SubWorkflowParser(TaskParser):
def create_task(self):
subworkflow_spec = SubprocessParser.get_subprocess_spec(self)
return self.spec_class(self.spec, self.bpmn_id, subworkflow_spec=subworkflow_spec, **self.bpmn_attributes)
class CallActivityParser(TaskParser):
"""Parses a CallActivity node."""
def create_task(self):
subworkflow_spec = SubprocessParser.get_call_activity_spec(self)
return self.spec_class(self.spec, self.bpmn_id, subworkflow_spec=subworkflow_spec, **self.bpmn_attributes)
class ScriptTaskParser(TaskParser):
"""Parses a script task"""
def create_task(self):
return self.spec_class(self.spec, self.bpmn_id, script=self.get_script(), **self.bpmn_attributes)
def get_script(self):
"""
Gets the script content from the node. A subclass can override this
method, if the script needs to be pre-parsed. The result of this call
will be passed to the Script Engine for execution.
"""
try:
return one(self.xpath('.//bpmn:script')).text
except AssertionError as ae:
raise ValidationException(
"Invalid Script Task. No Script Provided. " + str(ae),
node=self.node, file_name=self.filename)
| 4,861 | Python | .py | 102 | 39.637255 | 114 | 0.687434 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,047 | event_parsers.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/parser/event_parsers.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from lxml import etree
from .ValidationException import ValidationException
from .TaskParser import TaskParser
from .util import first, one
from SpiffWorkflow.bpmn.specs.event_definitions.simple import (
NoneEventDefinition,
CancelEventDefinition,
TerminateEventDefinition
)
from SpiffWorkflow.bpmn.specs.event_definitions.timer import (
TimeDateEventDefinition,
DurationTimerEventDefinition,
CycleTimerEventDefinition
)
from SpiffWorkflow.bpmn.specs.event_definitions.item_aware_event import (
SignalEventDefinition,
ErrorEventDefinition,
EscalationEventDefinition
)
from SpiffWorkflow.bpmn.specs.event_definitions.message import (
MessageEventDefinition,
CorrelationProperty
)
from SpiffWorkflow.bpmn.specs.event_definitions.multiple import MultipleEventDefinition
from SpiffWorkflow.bpmn.specs.event_definitions.conditional import ConditionalEventDefinition
CANCEL_EVENT_XPATH = './/bpmn:cancelEventDefinition'
CONDITIONAL_EVENT_XPATH = './/bpmn:conditionalEventDefinition'
ERROR_EVENT_XPATH = './/bpmn:errorEventDefinition'
ESCALATION_EVENT_XPATH = './/bpmn:escalationEventDefinition'
TERMINATION_EVENT_XPATH = './/bpmn:terminateEventDefinition'
MESSAGE_EVENT_XPATH = './/bpmn:messageEventDefinition'
SIGNAL_EVENT_XPATH = './/bpmn:signalEventDefinition'
TIMER_EVENT_XPATH = './/bpmn:timerEventDefinition'
class EventDefinitionParser(TaskParser):
"""This class provvides methods for parsing different event definitions."""
def __init__(self, process_parser, spec_class, node, nsmap=None, lane=None):
super().__init__(process_parser, spec_class, node, nsmap, lane)
self.event_nodes = []
def get_description(self):
spec_description = super().get_description()
if spec_description is not None:
if len(self.event_nodes) == 0:
event_description = 'Default'
elif len(self.event_nodes) > 1:
event_description = 'Multiple'
elif len(self.event_nodes) == 1:
event_description = self.process_parser.parser.spec_descriptions.get(self.event_nodes[0].tag)
return f'{event_description} {spec_description}'
def get_event_description(self, event):
return self.process_parser.parser.spec_descriptions.get(event.tag)
def parse_cancel_event(self, event):
return CancelEventDefinition(description=self.get_event_description(event))
def parse_conditional_event(self, event):
expression = self.xpath('.//bpmn:condition')
if len(expression) == 0:
raise ValidationException('Conditional event definition with missing condition', node=self.node, file_name=self.filename)
return ConditionalEventDefinition(expression[0].text, description=self.get_event_description(event))
def parse_error_event(self, error_event):
"""Parse the errorEventDefinition node and return an instance of ErrorEventDefinition."""
error_ref = error_event.get('errorRef')
if error_ref:
try:
error = one(self.doc_xpath('.//bpmn:error[@id="%s"]' % error_ref))
except Exception:
self.raise_validation_exception('Expected an error node', node=error_event)
error_code = error.get('errorCode')
name = error.get('name')
else:
name, error_code = 'None Error Event', None
return ErrorEventDefinition(name, error_code, description=self.get_event_description(error_event))
def parse_escalation_event(self, escalation_event):
"""Parse the escalationEventDefinition node and return an instance of EscalationEventDefinition."""
escalation_ref = escalation_event.get('escalationRef')
if escalation_ref:
try:
escalation = one(self.doc_xpath('.//bpmn:escalation[@id="%s"]' % escalation_ref))
except Exception:
self.raise_validation_exception('Expected an Escalation node', node=escalation_event)
escalation_code = escalation.get('escalationCode')
name = escalation.get('name')
else:
name, escalation_code = 'None Escalation Event', None
return EscalationEventDefinition(name, escalation_code, description=self.get_event_description(escalation_event))
def parse_message_event(self, message_event):
message_ref = message_event.get('messageRef')
if message_ref is not None:
try:
message = one(self.doc_xpath('.//bpmn:message[@id="%s"]' % message_ref))
except Exception:
self.raise_validation_exception('Expected a Message node', node=message_event)
name = message.get('name')
description = self.get_event_description(message_event)
correlations = self.get_message_correlations(message_ref)
else:
name = message_event.getparent().get('name')
description = 'Message'
correlations = {}
return MessageEventDefinition(name, correlations, description=description)
def parse_signal_event(self, signal_event):
"""Parse the signalEventDefinition node and return an instance of SignalEventDefinition."""
signal_ref = signal_event.get('signalRef')
if signal_ref:
try:
signal = one(self.doc_xpath('.//bpmn:signal[@id="%s"]' % signal_ref))
except Exception:
self.raise_validation_exception('Expected a Signal node', node=signal_event)
name = signal.get('name')
else:
name = signal_event.getparent().get('name')
return SignalEventDefinition(name, description=self.get_event_description(signal_event))
def parse_terminate_event(self, event):
"""Parse the terminateEventDefinition node and return an instance of TerminateEventDefinition."""
return TerminateEventDefinition(description=self.get_event_description(event))
def parse_timer_event(self, event):
"""Parse the timerEventDefinition node and return an instance of TimerEventDefinition."""
try:
description = self.get_event_description(event)
name = self.node.get('name', self.node.get('id'))
time_date = first(self.xpath('.//bpmn:timeDate'))
if time_date is not None:
return TimeDateEventDefinition(name, time_date.text, description=description)
time_duration = first(self.xpath('.//bpmn:timeDuration'))
if time_duration is not None:
return DurationTimerEventDefinition(name, time_duration.text, description=description)
time_cycle = first(self.xpath('.//bpmn:timeCycle'))
if time_cycle is not None:
return CycleTimerEventDefinition(name, time_cycle.text, description=description)
raise ValidationException("Unknown Time Specification", node=self.node, file_name=self.filename)
except Exception as e:
raise ValidationException("Time Specification Error. " + str(e), node=self.node, file_name=self.filename)
def get_message_correlations(self, message_ref):
correlations = []
for correlation in self.doc_xpath(f".//bpmn:correlationPropertyRetrievalExpression[@messageRef='{message_ref}']"):
key = correlation.getparent().get('id')
children = correlation.getchildren()
expression = children[0].text if len(children) > 0 else None
used_by = [ e.getparent().get('name') for e in
self.doc_xpath(f".//bpmn:correlationKey/bpmn:correlationPropertyRef[text()='{key}']") ]
if key is not None and expression is not None:
correlations.append(CorrelationProperty(key, expression, used_by))
return correlations
def _create_task(self, event_definition, cancel_activity=None, parallel=None):
if isinstance(event_definition, MessageEventDefinition):
for prop in event_definition.correlation_properties:
for key in prop.correlation_keys:
if key not in self.spec.correlation_keys:
self.spec.correlation_keys[key] = []
if prop.name not in self.spec.correlation_keys[key]:
self.spec.correlation_keys[key].append(prop.name)
kwargs = self.bpmn_attributes
if cancel_activity is not None:
kwargs['cancel_activity'] = cancel_activity
interrupt = 'Interrupting' if cancel_activity else 'Non-Interrupting'
kwargs['description'] = interrupt + ' ' + kwargs['description']
if parallel is not None:
kwargs['parallel'] = parallel
return self.spec_class(self.spec, self.bpmn_id, event_definition=event_definition, **kwargs)
def get_event_definition(self, xpaths):
"""Returns all event definitions it can find in given list of xpaths"""
event_definitions = []
for path in xpaths:
for event in self.xpath(path):
if event is not None:
self.event_nodes.append(event)
if path == MESSAGE_EVENT_XPATH:
event_definitions.append(self.parse_message_event(event))
elif path == SIGNAL_EVENT_XPATH:
event_definitions.append(self.parse_signal_event(event))
elif path == TIMER_EVENT_XPATH:
event_definitions.append(self.parse_timer_event(event))
elif path == CANCEL_EVENT_XPATH:
event_definitions.append(self.parse_cancel_event(event))
elif path == ERROR_EVENT_XPATH:
event_definitions.append(self.parse_error_event(event))
elif path == ESCALATION_EVENT_XPATH:
event_definitions.append(self.parse_escalation_event(event))
elif path == TERMINATION_EVENT_XPATH:
event_definitions.append(self.parse_terminate_event(event))
elif path == CONDITIONAL_EVENT_XPATH:
event_definitions.append(self.parse_conditional_event(event))
parallel = self.node.get('parallelMultiple') == 'true'
if len(event_definitions) == 0:
return NoneEventDefinition(description='Default')
elif len(event_definitions) == 1:
return event_definitions[0]
else:
return MultipleEventDefinition(event_definitions, parallel, description='Multiple')
class StartEventParser(EventDefinitionParser):
"""Parses a Start Event, and connects it to the internal spec.start task.
Support Message, Signal, and Timer events."""
def create_task(self):
event_definition = self.get_event_definition(
[MESSAGE_EVENT_XPATH, SIGNAL_EVENT_XPATH, TIMER_EVENT_XPATH, CONDITIONAL_EVENT_XPATH])
task = self._create_task(event_definition)
self.spec.start.connect(task)
return task
def handles_multiple_outgoing(self):
return True
class EndEventParser(EventDefinitionParser):
"""Parses an End Event. Handles Termination, Escalation, Cancel, and Error End Events."""
def create_task(self):
event_definition = self.get_event_definition(
[MESSAGE_EVENT_XPATH, CANCEL_EVENT_XPATH, ERROR_EVENT_XPATH, ESCALATION_EVENT_XPATH, TERMINATION_EVENT_XPATH])
task = self._create_task(event_definition)
task.connect(self.spec.end)
return task
class IntermediateCatchEventParser(EventDefinitionParser):
"""Parses an Intermediate Catch Event. Currently supports Message, Signal, and Timer definitions."""
def create_task(self):
event_definition = self.get_event_definition(
[MESSAGE_EVENT_XPATH, SIGNAL_EVENT_XPATH, TIMER_EVENT_XPATH, CONDITIONAL_EVENT_XPATH])
return super()._create_task(event_definition)
class IntermediateThrowEventParser(EventDefinitionParser):
"""Parses an Intermediate Catch Event. Currently supports Message, Signal and Timer event definitions."""
def create_task(self):
event_definition = self.get_event_definition(
[ESCALATION_EVENT_XPATH, MESSAGE_EVENT_XPATH, SIGNAL_EVENT_XPATH, TIMER_EVENT_XPATH])
return self._create_task(event_definition)
class SendTaskParser(IntermediateThrowEventParser):
def create_task(self):
if self.node.get('messageRef') is not None:
event_definition = self.parse_message_event(self.node)
else:
message_event = first(self.xpath(MESSAGE_EVENT_XPATH))
if message_event is not None:
event_definition = self.parse_message_event(message_event)
else:
event_definition = NoneEventDefinition()
return self._create_task(event_definition)
class ReceiveTaskParser(SendTaskParser):
"""Identical to the SendTaskParser - check for a message event definition"""
pass
class BoundaryEventParser(EventDefinitionParser):
"""
Parse a Catching Boundary Event. This extends the
IntermediateCatchEventParser in order to parse the event definition.
"""
def create_task(self):
cancel_activity = self.node.get('cancelActivity', default='true').lower() == 'true'
event_definition = self.get_event_definition(
[CANCEL_EVENT_XPATH, ERROR_EVENT_XPATH, ESCALATION_EVENT_XPATH,
MESSAGE_EVENT_XPATH, SIGNAL_EVENT_XPATH, TIMER_EVENT_XPATH, CONDITIONAL_EVENT_XPATH])
if isinstance(event_definition, NoneEventDefinition):
raise NotImplementedError('Unsupported Catch Event: %r', etree.tostring(self.node))
return self._create_task(event_definition, cancel_activity)
class EventBasedGatewayParser(EventDefinitionParser):
def create_task(self):
return self._create_task(MultipleEventDefinition())
def handles_multiple_outgoing(self):
return True
def connect_outgoing(self, outgoing_task, sequence_flow_node, is_default):
self.task.event_definition.event_definitions.append(outgoing_task.event_definition)
self.task.connect(outgoing_task)
| 14,982 | Python | .py | 271 | 45.867159 | 133 | 0.685205 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,048 | __init__.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/parser/__init__.py | # Copyright (C) 2012 Matthew Hampton, 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .BpmnParser import BpmnParser, BpmnValidator
from .TaskParser import TaskParser
from .event_parsers import EventDefinitionParser | 962 | Python | .py | 21 | 44.809524 | 69 | 0.801275 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,049 | node_parser.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/parser/node_parser.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.bpmn.parser.ValidationException import ValidationException
from SpiffWorkflow.bpmn.specs.bpmn_task_spec import BpmnIoSpecification
from SpiffWorkflow.bpmn.specs.data_spec import TaskDataReference
from .util import first
DEFAULT_NSMAP = {
'bpmn': 'http://www.omg.org/spec/BPMN/20100524/MODEL',
'bpmndi': 'http://www.omg.org/spec/BPMN/20100524/DI',
'dc': 'http://www.omg.org/spec/DD/20100524/DC',
}
class NodeParser:
def __init__(self, node, nsmap=None, filename=None, lane=None):
self.node = node
self.nsmap = nsmap or DEFAULT_NSMAP
self.filename = filename
self.lane = self._get_lane() or lane
@property
def bpmn_id(self):
return self.node.get('id')
@property
def bpmn_attributes(self):
return {
'description': self.get_description(),
'lane': self.lane,
'bpmn_name': self.node.get('name'),
'documentation': self.parse_documentation(),
'data_input_associations': self.parse_incoming_data_references(),
'data_output_associations': self.parse_outgoing_data_references(),
}
def get_description(self):
return self.process_parser.parser.spec_descriptions.get(self.node.tag)
def xpath(self, xpath):
return self._xpath(self.node, xpath)
def doc_xpath(self, xpath):
root = self.node.getroottree().getroot()
return self._xpath(root, xpath)
def attribute(self, attribute, namespace=None, node=None):
if node is None:
node = self.node
prefix = '{' + self.nsmap.get(namespace or 'bpmn') + '}'
return node.attrib.get(f'{prefix}{attribute}')
def parse_condition(self, sequence_flow):
expression = first(self._xpath(sequence_flow, './/bpmn:conditionExpression'))
return expression.text if expression is not None else None
def parse_documentation(self, sequence_flow=None):
node = sequence_flow if sequence_flow is not None else self.node
documentation_node = first(self._xpath(node, './/bpmn:documentation'))
return None if documentation_node is None else documentation_node.text
def parse_incoming_data_references(self):
specs = []
for name in self.xpath('./bpmn:dataInputAssociation/bpmn:sourceRef'):
ref = first(self.doc_xpath(f".//bpmn:dataObjectReference[@id='{name.text}']"))
data_obj = self._resolve_data_object_ref(ref)
if data_obj is not None:
specs.append(data_obj)
else:
ref = first(self.doc_xpath(f".//bpmn:dataStoreReference[@id='{name.text}']"))
if ref is not None and ref.get('dataStoreRef') in self.process_parser.data_stores:
specs.append(self.process_parser.data_stores[ref.get('dataStoreRef')])
else:
raise ValidationException(f'Cannot resolve dataInputAssociation {name}', self.node, self.filename)
return specs
def parse_outgoing_data_references(self):
specs = []
for name in self.xpath('./bpmn:dataOutputAssociation/bpmn:targetRef'):
ref = first(self.doc_xpath(f".//bpmn:dataObjectReference[@id='{name.text}']"))
data_obj = self._resolve_data_object_ref(ref)
if data_obj is not None:
specs.append(data_obj)
else:
ref = first(self.doc_xpath(f".//bpmn:dataStoreReference[@id='{name.text}']"))
if ref is not None and ref.get('dataStoreRef') in self.process_parser.data_stores:
specs.append(self.process_parser.data_stores[ref.get('dataStoreRef')])
else:
raise ValidationException(f'Cannot resolve dataOutputAssociation {name}', self.node, self.filename)
return specs
def parse_io_spec(self):
data_refs = {}
for elem in self.xpath('./bpmn:ioSpecification/bpmn:dataInput'):
ref = self.create_data_spec(elem, TaskDataReference)
data_refs[ref.bpmn_id] = ref
for elem in self.xpath('./bpmn:ioSpecification/bpmn:dataOutput'):
ref = self.create_data_spec(elem, TaskDataReference)
data_refs[ref.bpmn_id] = ref
inputs, outputs = [], []
for ref in self.xpath('./bpmn:ioSpecification/bpmn:inputSet/bpmn:dataInputRefs'):
if ref.text in data_refs:
inputs.append(data_refs[ref.text])
for ref in self.xpath('./bpmn:ioSpecification/bpmn:outputSet/bpmn:dataOutputRefs'):
if ref.text in data_refs:
outputs.append(data_refs[ref.text])
return BpmnIoSpecification(inputs, outputs)
def _resolve_data_object_ref(self, ref):
if ref is not None:
current = self.process_parser
while current is not None:
data_obj = current.spec.data_objects.get(ref.get('dataObjectRef'))
if data_obj is None:
current = self.process_parser.parent
else:
return data_obj
def create_data_spec(self, item, cls):
return cls(item.attrib.get('id'), item.attrib.get('name'))
def parse_extensions(self, node=None):
return {}
def get_position(self, node=None):
node = node if node is not None else self.node
nodeid = node.get('id')
if nodeid is not None:
bounds = first(self.doc_xpath(f".//bpmndi:BPMNShape[@bpmnElement='{nodeid}']//dc:Bounds"))
if bounds is not None:
return {'x': float(bounds.get('x', 0)), 'y': float(bounds.get('y', 0))}
return {'x': 0.0, 'y': 0.0}
def _get_lane(self):
noderef = first(self.doc_xpath(f".//bpmn:flowNodeRef[text()='{self.bpmn_id}']"))
if noderef is not None:
return noderef.getparent().get('name')
def _xpath(self, node, xpath):
return node.xpath(xpath, namespaces=self.nsmap)
def raise_validation_exception(self, message):
raise ValidationException(message, self.node, self.filename)
| 6,920 | Python | .py | 138 | 40.992754 | 119 | 0.646083 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,050 | ProcessParser.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/parser/ProcessParser.py | # Copyright (C) 2012 Matthew Hampton
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .ValidationException import ValidationException
from ..specs.bpmn_process_spec import BpmnProcessSpec
from ..specs.data_spec import DataObject
from ..specs.control import StartEventJoin, StartEventSplit
from .node_parser import NodeParser
from .util import first
class ProcessParser(NodeParser):
"""
Parses a single BPMN process, including all of the tasks within that
process.
"""
def __init__(self, p, node, nsmap, data_stores, filename=None, lane=None):
"""
Constructor.
:param p: the owning BpmnParser instance
:param node: the XML node for the process
:param data_stores: map of ids to data store implementations
:param filename: the source BPMN filename (optional)
:param lane: the lane of a subprocess (optional)
"""
super().__init__(node, nsmap, filename=filename, lane=lane)
self.parser = p
self.lane = lane
self.spec = None
self.process_executable = node.get('isExecutable', 'true') == 'true'
self.data_stores = data_stores
self.parent = None
def get_name(self):
"""
Returns the process name (or ID, if no name is included in the file)
"""
return self.node.get('name', default=self.bpmn_id)
def has_lanes(self) -> bool:
"""Returns true if this process has one or more named lanes """
elements = self.xpath("//bpmn:lane")
for el in elements:
if el.get("name"):
return True
return False
def start_messages(self):
""" This returns a list of message names that would cause this
process to start. """
message_names = []
messages = self.xpath("//bpmn:message")
message_event_definitions = self.xpath(
"//bpmn:startEvent/bpmn:messageEventDefinition")
for message_event_definition in message_event_definitions:
message_model_identifier = message_event_definition.attrib.get(
"messageRef"
)
if message_model_identifier is None:
raise ValidationException(
"Could not find messageRef from message event definition: {message_event_definition}"
)
# Convert the id into a Message Name
message_name = next((m for m in messages if m.attrib.get('id') == message_model_identifier), None)
message_names.append(message_name.attrib.get('name'))
return message_names
def called_element_ids(self):
"""
Returns a list of ids referenced by `bpmn:callActivity` nodes.
"""
return self.xpath(".//bpmn:callActivity/@calledElement")
def parse_node(self, node):
"""
Parses the specified child task node, and returns the task spec. This
can be called by a TaskParser instance, that is owned by this
ProcessParser.
"""
if node.get('id') in self.spec.task_specs:
return self.spec.task_specs[node.get('id')]
(node_parser, spec_class) = self.parser._get_parser_class(node.tag)
if not node_parser or not spec_class:
raise ValidationException("There is no support implemented for this task type.",
node=node, file_name=self.filename)
np = node_parser(self, spec_class, node, self.nsmap, lane=self.lane)
task_spec = np.parse_node()
return task_spec
def _parse(self):
# here we only look in the top level, We will have another
# bpmn:startEvent if we have a subworkflow task
start_node_list = self.xpath('./bpmn:startEvent')
if not start_node_list and self.process_executable:
raise ValidationException("No start event found", node=self.node, file_name=self.filename)
if not self.process_executable:
raise ValidationException(f"Process {self.bpmn_id} is not executable.", node=self.node, file_name=self.filename)
self.spec = BpmnProcessSpec(name=self.bpmn_id, description=self.get_name(), filename=self.filename)
# Get the data objects
for obj in self.xpath('./bpmn:dataObject'):
data_object = self.parse_data_object(obj)
self.spec.data_objects[data_object.bpmn_id] = data_object
# Check for an IO Specification.
io_spec = first(self.xpath('./bpmn:ioSpecification'))
if io_spec is not None:
self.spec.io_specification = self.parse_io_spec()
# set the data stores on the process spec so they can survive
# serialization
self.spec.data_stores = self.data_stores
for node in start_node_list:
self.parse_node(node)
if len(start_node_list) > 1:
split_task = StartEventSplit(self.spec, 'StartEventSplit')
join_task = StartEventJoin(self.spec, 'StartEventJoin', split_task='StartEventSplit', threshold=1, cancel=True)
for spec in self.spec.start.outputs:
spec.inputs = [split_task]
spec.connect(join_task)
split_task.outputs = self.spec.start.outputs
self.spec.start.outputs = [split_task]
split_task.inputs = [self.spec.start]
def parse_data_object(self, obj):
return self.create_data_spec(obj, DataObject)
def get_spec(self):
"""
Parse this process (if it has not already been parsed), and return the
workflow spec.
"""
if self.spec is None:
self._parse()
return self.spec
| 6,402 | Python | .py | 137 | 38.072993 | 124 | 0.651937 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,051 | ValidationException.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/parser/ValidationException.py | # Copyright (C) 2012 Matthew Hampton, 2023 Dan Funk
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .util import BPMN_MODEL_NS
from ...exceptions import SpiffWorkflowException
class ValidationException(SpiffWorkflowException):
"""
A ValidationException should be thrown with enough information for the user
to diagnose the problem and sort it out.
If available, please provide the offending XML node and filename.
"""
def __init__(self, msg, node=None, file_name=None, *args, **kwargs):
if node is not None:
self.tag = self._shorten_tag(node.tag)
self.id = node.get('id', '')
self.name = node.get('name', '')
self.line_number = getattr(node, 'line_number', '')
else:
self.tag = kwargs.get('tag', '')
self.id = kwargs.get('id', '')
self.name = kwargs.get('name', '')
self.line_number = kwargs.get('line_number', '')
self.file_name = file_name or ''
super(ValidationException, self).__init__(msg, *args)
@classmethod
def _shorten_tag(cls, tag):
prefix = '{%s}' % BPMN_MODEL_NS
if tag.startswith(prefix):
return 'bpmn:' + tag[len(prefix):]
return tag
| 1,981 | Python | .py | 45 | 38.511111 | 79 | 0.67859 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,052 | BpmnParser.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/parser/BpmnParser.py | # Copyright (C) 2012 Matthew Hampton
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import glob
import os
from lxml import etree
from lxml.etree import LxmlError
from SpiffWorkflow.bpmn.specs.bpmn_process_spec import BpmnProcessSpec
from SpiffWorkflow.bpmn.specs.defaults import (
UserTask,
ManualTask,
NoneTask,
ScriptTask,
ServiceTask,
CallActivity,
SubWorkflowTask,
TransactionSubprocess,
InclusiveGateway,
ExclusiveGateway,
ParallelGateway,
StartEvent,
EndEvent,
IntermediateCatchEvent,
IntermediateThrowEvent,
SendTask,
ReceiveTask,
BoundaryEvent,
EventBasedGateway
)
from SpiffWorkflow.bpmn.specs.event_definitions.simple import NoneEventDefinition
from SpiffWorkflow.bpmn.specs.event_definitions.timer import TimerEventDefinition
from SpiffWorkflow.bpmn.specs.mixins.subworkflow_task import SubWorkflowTask as SubWorkflowTaskMixin
from SpiffWorkflow.bpmn.specs.mixins.events.start_event import StartEvent as StartEventMixin
from .ValidationException import ValidationException
from .ProcessParser import ProcessParser
from .node_parser import DEFAULT_NSMAP
from .spec_description import SPEC_DESCRIPTIONS
from .util import full_tag, xpath_eval, first
from .TaskParser import TaskParser
from .task_parsers import (
GatewayParser,
ConditionalGatewayParser,
CallActivityParser,
ScriptTaskParser,
SubWorkflowParser,
)
from .event_parsers import (
EventBasedGatewayParser,
StartEventParser, EndEventParser,
BoundaryEventParser,
IntermediateCatchEventParser,
IntermediateThrowEventParser,
SendTaskParser,
ReceiveTaskParser
)
XSD_PATH = os.path.join(os.path.dirname(__file__), 'schema', 'BPMN20.xsd')
class BpmnValidator:
def __init__(self, xsd_path=XSD_PATH, imports=None):
with open(xsd_path) as xsd:
schema = etree.parse(xsd)
if imports is not None:
for ns, fn in imports.items():
elem = etree.Element(
'{http://www.w3.org/2001/XMLSchema}import',
namespace=ns,
schemaLocation=fn
)
schema.getroot().insert(0, elem)
self.validator = etree.XMLSchema(schema)
def validate(self, bpmn, filename=None):
self.preprocess(bpmn)
try:
self.validator.assertValid(bpmn)
except ValidationException as ve:
ve.file_name = filename
ve.line_number = self.validator.error_log.last_error.line
except LxmlError:
last_error = self.validator.error_log.last_error
raise ValidationException(last_error.message, file_name=filename,
line_number=last_error.line)
def preprocess(self, bpmn):
# BPMN js creates invalid XML for message correlation properties and it is apparently too difficult to change
# therefore, I'll just preprocess the XML and replace the tag in order to continue validating the XML
for expr in bpmn.xpath('.//bpmn:correlationPropertyRetrievalExpression/bpmn:formalExpression', namespaces=DEFAULT_NSMAP):
expr.tag = '{' + DEFAULT_NSMAP['bpmn'] + '}messagePath'
class BpmnParser(object):
"""
The BpmnParser class is a pluggable base class that manages the parsing of
a set of BPMN files. It is intended that this class will be overriden by an
application that implements a BPMN engine.
Extension points: OVERRIDE_PARSER_CLASSES provides a map from full BPMN tag
name to a TaskParser and Task class. PROCESS_PARSER_CLASS provides a
subclass of ProcessParser. DATA_STORE_CLASSES provides a mapping of names to
subclasses of BpmnDataStoreSpecification that provide a data store
implementation.
"""
PARSER_CLASSES = {
full_tag('startEvent'): (StartEventParser, StartEvent),
full_tag('endEvent'): (EndEventParser, EndEvent),
full_tag('userTask'): (TaskParser, UserTask),
full_tag('task'): (TaskParser, NoneTask),
full_tag('subProcess'): (SubWorkflowParser, SubWorkflowTask),
full_tag('manualTask'): (TaskParser, ManualTask),
full_tag('exclusiveGateway'): (ConditionalGatewayParser, ExclusiveGateway),
full_tag('parallelGateway'): (GatewayParser, ParallelGateway),
full_tag('inclusiveGateway'): (ConditionalGatewayParser, InclusiveGateway),
full_tag('callActivity'): (CallActivityParser, CallActivity),
full_tag('transaction'): (SubWorkflowParser, TransactionSubprocess),
full_tag('scriptTask'): (ScriptTaskParser, ScriptTask),
full_tag('serviceTask'): (TaskParser, ServiceTask),
full_tag('intermediateCatchEvent'): (IntermediateCatchEventParser, IntermediateCatchEvent),
full_tag('intermediateThrowEvent'): (IntermediateThrowEventParser, IntermediateThrowEvent),
full_tag('boundaryEvent'): (BoundaryEventParser, BoundaryEvent),
full_tag('receiveTask'): (ReceiveTaskParser, ReceiveTask),
full_tag('sendTask'): (SendTaskParser, SendTask),
full_tag('eventBasedGateway'): (EventBasedGatewayParser, EventBasedGateway),
}
OVERRIDE_PARSER_CLASSES = {}
PROCESS_PARSER_CLASS = ProcessParser
DATA_STORE_CLASSES = {}
def __init__(self, namespaces=None, validator=None, spec_descriptions=SPEC_DESCRIPTIONS):
"""
Constructor.
"""
self.namespaces = namespaces or DEFAULT_NSMAP
self.validator = validator
self.spec_descriptions = spec_descriptions
self.process_parsers = {}
self.collaborations = {}
self.process_dependencies = set()
self.messages = {}
self.correlations = {}
self.data_stores = {}
def _get_parser_class(self, tag):
if tag in self.OVERRIDE_PARSER_CLASSES:
return self.OVERRIDE_PARSER_CLASSES[tag]
elif tag in self.PARSER_CLASSES:
return self.PARSER_CLASSES[tag]
return None, None
def get_process_parser(self, process_id):
"""
Returns the ProcessParser for the given process ID or name. It matches
by name first.
"""
if process_id in self.process_parsers:
return self.process_parsers[process_id]
def get_process_ids(self):
"""Returns a list of process IDs"""
return list(proc_id for proc_id, parser in self.process_parsers.items() if parser.process_executable)
def add_bpmn_file(self, filename):
"""
Add the given BPMN filename to the parser's set.
"""
self.add_bpmn_files([filename])
def add_bpmn_files_by_glob(self, g):
"""
Add all filenames matching the provided pattern (e.g. *.bpmn) to the
parser's set.
"""
self.add_bpmn_files(glob.glob(g))
def add_bpmn_files(self, filenames):
"""
Add all filenames in the given list to the parser's set.
"""
for filename in filenames:
with open(filename, 'r') as f:
self.add_bpmn_io(f, filename)
def add_bpmn_io(self, file_like_object, filename=None):
"""
Add the given BPMN file like object to the parser's set.
"""
self.add_bpmn_xml(etree.parse(file_like_object), filename)
def add_bpmn_str(self, bpmn_str, filename=None):
"""
Add the given BPMN string to the parser's set.
"""
self.add_bpmn_xml(etree.fromstring(bpmn_str), filename)
def add_bpmn_xml(self, bpmn, filename=None):
"""
Add the given lxml representation of the BPMN file to the parser's set.
:param svg: Optionally, provide the text data for the SVG of the BPMN
file
:param filename: Optionally, provide the source filename.
"""
if self.validator:
self.validator.validate(bpmn, filename)
# we need to parse the data stores before _add_process since it creates
# the parser instances, which need to know about the data stores to
# resolve data references.
self._add_data_stores(bpmn)
self._add_processes(bpmn, filename)
self._add_collaborations(bpmn)
self._add_messages(bpmn)
self._add_correlations(bpmn)
def _add_processes(self, bpmn, filename=None):
for process in bpmn.xpath('.//bpmn:process', namespaces=self.namespaces):
self._find_dependencies(process)
self.create_parser(process, filename)
def _add_collaborations(self, bpmn):
collaboration = first(bpmn.xpath('.//bpmn:collaboration', namespaces=self.namespaces))
if collaboration is not None:
collaboration_xpath = xpath_eval(collaboration)
name = collaboration.get('id')
self.collaborations[name] = [ participant.get('processRef') for participant in collaboration_xpath('.//bpmn:participant') ]
def _add_messages(self, bpmn):
for message in bpmn.xpath('.//bpmn:message', namespaces=self.namespaces):
if message.attrib.get("id") is None:
raise ValidationException(
"Message identifier is missing from bpmn xml"
)
self.messages[message.attrib.get("id")] = message.attrib.get("name")
def _add_correlations(self, bpmn):
for correlation in bpmn.xpath('.//bpmn:correlationProperty', namespaces=self.namespaces):
correlation_identifier = correlation.attrib.get("id")
if correlation_identifier is None:
raise ValidationException("Correlation identifier is missing from bpmn xml")
correlation_property_retrieval_expressions = correlation.xpath(
".//bpmn:correlationPropertyRetrievalExpression", namespaces = self.namespaces)
if not correlation_property_retrieval_expressions:
raise ValidationException(
f"Correlation is missing correlation property retrieval expressions: {correlation_identifier}"
)
retrieval_expressions = []
for cpre in correlation_property_retrieval_expressions:
message_model_identifier = cpre.attrib.get("messageRef")
if message_model_identifier is None:
raise ValidationException(
f"Message identifier is missing from correlation property: {correlation_identifier}"
)
children = cpre.getchildren()
expression = children[0].text if len(children) > 0 else None
retrieval_expressions.append({"messageRef": message_model_identifier,
"expression": expression})
self.correlations[correlation_identifier] = {
"name": correlation.attrib.get("name"),
"retrieval_expressions": retrieval_expressions
}
def _add_data_stores(self, bpmn):
for data_store in bpmn.xpath('.//bpmn:dataStore', namespaces=self.namespaces):
data_store_id = data_store.attrib.get("id")
if data_store_id is None:
raise ValidationException(
"Data Store identifier is missing from bpmn xml"
)
data_store_name = data_store.attrib.get("name")
if data_store_name is None:
raise ValidationException(
"Data Store name is missing from bpmn xml"
)
if data_store_name not in self.DATA_STORE_CLASSES:
raise ValidationException(
f"Data Store with name {data_store_name} has no implementation"
)
data_store_spec = self.DATA_STORE_CLASSES[data_store_name](
data_store_id,
data_store_name,
data_store.attrib.get('capacity'),
data_store.attrib.get('isUnlimited'))
self.data_stores[data_store_id] = data_store_spec
def _find_dependencies(self, process):
"""Locate all calls to external BPMN, and store their ids in our list of dependencies"""
for call_activity in process.xpath('.//bpmn:callActivity', namespaces=self.namespaces):
self.process_dependencies.add(call_activity.get('calledElement'))
def create_parser(self, node, filename=None, lane=None):
parser = self.PROCESS_PARSER_CLASS(self, node, self.namespaces, self.data_stores, filename=filename, lane=lane)
if parser.bpmn_id in self.process_parsers:
raise ValidationException(f'Duplicate process ID: {parser.bpmn_id}', node=node, file_name=filename)
self.process_parsers[parser.bpmn_id] = parser
def get_process_dependencies(self):
return self.process_dependencies
def get_spec(self, process_id, required=True):
"""
Parses the required subset of the BPMN files, in order to provide an
instance of BpmnProcessSpec (i.e. WorkflowSpec)
for the given process ID or name. The Name is matched first.
"""
parser = self.get_process_parser(process_id)
if required and parser is None:
raise ValidationException(
f"The process '{process_id}' was not found. "
f"Did you mean one of the following: "
f"{', '.join(self.get_process_ids())}?")
elif parser is not None:
return parser.get_spec()
def get_subprocess_specs(self, name, specs=None, require_call_activity_specs=True):
used = specs or {}
wf_spec = self.get_spec(name)
for task_spec in wf_spec.task_specs.values():
if isinstance(task_spec, SubWorkflowTaskMixin) and task_spec.spec not in used:
subprocess_spec = self.get_spec(task_spec.spec, required=require_call_activity_specs)
used[task_spec.spec] = subprocess_spec
if subprocess_spec is not None:
self.get_subprocess_specs(task_spec.spec, used)
return used
def find_all_specs(self):
# This is a little convoluted, but we might add more processes as we generate
# the dictionary if something refers to another subprocess that we haven't seen.
processes = dict((id, self.get_spec(id)) for id in self.get_process_ids())
while sorted(processes.keys()) != sorted(self.get_process_ids()):
for process_id in self.get_process_ids():
processes[process_id] = self.get_spec(process_id)
return processes
def get_collaboration(self, name):
self.find_all_specs()
spec = BpmnProcessSpec(name)
subprocesses = {}
participant_type = self._get_parser_class(full_tag('callActivity'))[1]
start_type = self._get_parser_class(full_tag('startEvent'))[1]
end_type = self._get_parser_class(full_tag('endEvent'))[1]
start = start_type(spec, 'Start Collaboration', NoneEventDefinition())
spec.start.connect(start)
end = end_type(spec, 'End Collaboration', NoneEventDefinition())
end.connect(spec.end)
for process in self.collaborations[name]:
process_parser = self.get_process_parser(process)
if process_parser and process_parser.process_executable:
sp_spec = self.get_spec(process)
subprocesses[process] = sp_spec
subprocesses.update(self.get_subprocess_specs(process))
if len([s for s in sp_spec.task_specs.values() if
isinstance(s, StartEventMixin) and
isinstance(s.event_definition, (NoneEventDefinition, TimerEventDefinition))
]):
participant = participant_type(spec, process, process)
start.connect(participant)
participant.connect(end)
return spec, subprocesses
| 16,657 | Python | .py | 342 | 39.149123 | 135 | 0.660809 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,053 | TaskParser.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/parser/TaskParser.py | # Copyright (C) 2012 Matthew Hampton, 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.bpmn.specs.mixins.subworkflow_task import TransactionSubprocess
from SpiffWorkflow.bpmn.specs.mixins.exclusive_gateway import ExclusiveGateway
from SpiffWorkflow.bpmn.specs.mixins.inclusive_gateway import InclusiveGateway
from SpiffWorkflow.bpmn.specs.defaults import (
StandardLoopTask,
SequentialMultiInstanceTask,
ParallelMultiInstanceTask
)
from SpiffWorkflow.bpmn.specs.control import BoundaryEventSplit, BoundaryEventJoin
from SpiffWorkflow.bpmn.specs.event_definitions.simple import CancelEventDefinition
from SpiffWorkflow.bpmn.specs.data_spec import TaskDataReference
from .util import one
from .node_parser import NodeParser
from .ValidationException import ValidationException
class TaskParser(NodeParser):
"""
This class parses a single BPMN task node, and returns the Task Spec for
that node.
It also results in the recursive parsing of connected tasks, connecting all
outgoing transitions, once the child tasks have all been parsed.
"""
# I hate myself for this. I wanted to at least relegate it to the top-level
# parser where the rest of the similar nonsense is, but it's inaccessible here.
STANDARD_LOOP_CLASS = StandardLoopTask
PARALLEL_MI_CLASS = ParallelMultiInstanceTask
SEQUENTIAL_MI_CLASS = SequentialMultiInstanceTask
def __init__(self, process_parser, spec_class, node, nsmap=None, lane=None):
"""
Constructor.
:param process_parser: the owning process parser instance
:param spec_class: the type of spec that should be created. This allows
a subclass of BpmnParser to provide a specialised spec class, without
extending the TaskParser.
:param node: the XML node for this task
"""
super().__init__(node, nsmap, filename=process_parser.filename, lane=lane)
self.process_parser = process_parser
self.spec_class = spec_class
self.spec = self.process_parser.spec
def _copy_task_attrs(self, original, loop_characteristics=None):
self.task.inputs = original.inputs
self.task.outputs = original.outputs
self.task.io_specification = original.io_specification
self.task.data_input_associations = original.data_input_associations
self.task.data_output_associations = original.data_output_associations
original.inputs = [self.task]
original.outputs = []
original.io_specification = None
original.data_input_associations = []
original.data_output_associations = []
original.name = f'{original.name} [child]'
self.task.task_spec = original.name
self.spec.task_specs[original.name] = original
def _add_loop_task(self, loop_characteristics):
maximum = loop_characteristics.attrib.get('loopMaximum')
if maximum is not None:
maximum = int(maximum)
condition = self.xpath('./bpmn:standardLoopCharacteristics/bpmn:loopCondition')
condition = condition[0].text if len(condition) > 0 else None
test_before = loop_characteristics.get('testBefore', 'false') == 'true'
if maximum is None and condition is None:
self.raise_validation_exception('A loopMaximum or loopCondition must be specified for Loop Tasks')
original = self.spec.task_specs.pop(self.task.name)
self.task = self.STANDARD_LOOP_CLASS(self.spec, original.name, '', maximum, condition, test_before, description='Loop Task')
self._copy_task_attrs(original, loop_characteristics)
def _add_multiinstance_task(self, loop_characteristics):
sequential = loop_characteristics.get('isSequential') == 'true'
prefix = 'bpmn:multiInstanceLoopCharacteristics'
cardinality = self.xpath(f'./{prefix}/bpmn:loopCardinality')
loop_input = self.xpath(f'./{prefix}/bpmn:loopDataInputRef')
if len(cardinality) == 0 and len(loop_input) == 0:
self.raise_validation_exception(
"A multiinstance task must specify a cardinality or a loop input data reference")
elif len(cardinality) > 0 and len(loop_input) > 0:
self.raise_validation_exception(
"A multiinstance task must specify exactly one of cardinality or loop input data reference")
cardinality = cardinality[0].text if len(cardinality) > 0 else None
loop_input = loop_input[0].text if len(loop_input) > 0 else None
if loop_input is not None:
if self.task.io_specification is not None:
try:
loop_input = [v for v in self.task.io_specification.data_inputs if v.name == loop_input][0]
except Exception:
self.raise_validation_exception('The loop input data reference is missing from the IO specification')
else:
loop_input = TaskDataReference(loop_input)
input_item = self.xpath(f'./{prefix}/bpmn:inputDataItem')
input_item = self.create_data_spec(input_item[0], TaskDataReference) if len(input_item) > 0 else None
loop_output = self.xpath(f'./{prefix}/bpmn:loopDataOutputRef')
loop_output = loop_output[0].text if len(loop_output) > 0 else None
if loop_output is not None:
if self.task.io_specification is not None:
try:
refs = set(self.task.io_specification.data_inputs + self.task.io_specification.data_outputs)
loop_output = [v for v in refs if v.name == loop_output][0]
except Exception:
self.raise_validation_exception('The loop output data reference is missing from the IO specification')
else:
loop_output = TaskDataReference(loop_output)
output_item = self.xpath(f'./{prefix}/bpmn:outputDataItem')
output_item = self.create_data_spec(output_item[0], TaskDataReference) if len(output_item) > 0 else None
condition = self.xpath(f'./{prefix}/bpmn:completionCondition')
condition = condition[0].text if len(condition) > 0 else None
original = self.spec.task_specs.pop(self.task.name)
params = {
'task_spec': '',
'cardinality': cardinality,
'data_input': loop_input,
'data_output':loop_output,
'input_item': input_item,
'output_item': output_item,
'condition': condition,
}
if sequential:
self.task = self.SEQUENTIAL_MI_CLASS(self.spec, original.name, description='Sequential MultiInstance', **params)
else:
self.task = self.PARALLEL_MI_CLASS(self.spec, original.name, description='Parallel MultiInstance', **params)
self._copy_task_attrs(original, loop_characteristics)
def _add_boundary_event(self, children):
split_task = BoundaryEventSplit(self.spec, f'{self.bpmn_id}.BoundaryEventSplit', lane=self.task.lane)
join_task = BoundaryEventJoin(
self.spec,
f'{self.bpmn_id}.BoundaryEventJoin',
lane=self.task.lane,
split_task=split_task.name,
cancel=True
)
split_task.connect(self.task)
self.task.connect(join_task)
for event in children:
child = self.process_parser.parse_node(event)
if isinstance(child.event_definition, CancelEventDefinition) and not isinstance(self.task, TransactionSubprocess):
self.raise_validation_exception('Cancel Events may only be used with transactions')
split_task.connect(child)
child.connect(join_task)
return split_task
def parse_node(self):
"""
Parse this node, and all children, returning the connected task spec.
"""
try:
self.task = self.create_task()
# Why do we just set random attributes willy nilly everywhere in the code????
# And we still pass around a gigantic kwargs dict whenever we create anything!
self.task.extensions = self.parse_extensions()
io_spec = self.xpath('./bpmn:ioSpecification')
if len(io_spec) > 0:
self.task.io_specification = self.parse_io_spec()
loop_characteristics = self.xpath('./bpmn:standardLoopCharacteristics')
if len(loop_characteristics) > 0:
self._add_loop_task(loop_characteristics[0])
mi_loop_characteristics = self.xpath('./bpmn:multiInstanceLoopCharacteristics')
if len(mi_loop_characteristics) > 0:
self._add_multiinstance_task(mi_loop_characteristics[0])
boundary_event_nodes = self.doc_xpath('.//bpmn:boundaryEvent[@attachedToRef="%s"]' % self.bpmn_id)
if boundary_event_nodes:
parent = self._add_boundary_event(boundary_event_nodes)
children = []
outgoing = self.doc_xpath('.//bpmn:sequenceFlow[@sourceRef="%s"]' % self.bpmn_id)
if len(outgoing) > 1 and not self.handles_multiple_outgoing():
self.raise_validation_exception('Multiple outgoing flows are not supported for tasks of type')
for sequence_flow in outgoing:
target_ref = sequence_flow.get('targetRef')
try:
target_node = one(self.doc_xpath('.//bpmn:*[@id="%s"]'% target_ref))
except Exception:
self.raise_validation_exception('When looking for a task spec, we found two items, '
'perhaps a form has the same ID? (%s)' % target_ref)
split_task = self.spec.task_specs.get(f'{target_ref}.BoundaryEventSplit')
c = self.process_parser.parse_node(target_node) if split_task is None else split_task
position = self.get_position(target_node)
children.append((position, c, sequence_flow))
if children:
# Sort children by their y coordinate.
# Why?? Isn't the point of parallel tasks that they can be executed in any order (or simultaneously)?
# And what if they're arranged horizontally?
children = sorted(children, key=lambda tup: float(tup[0]["y"]))
default_outgoing = self.node.get('default')
if len(children) == 1 and isinstance(self.task, (ExclusiveGateway, InclusiveGateway)):
(position, c, sequence_flow) = children[0]
if self.parse_condition(sequence_flow) is None:
default_outgoing = sequence_flow.get('id')
for (position, c, sequence_flow) in children:
self.connect_outgoing(c, sequence_flow, sequence_flow.get('id') == default_outgoing)
return parent if boundary_event_nodes else self.task
except ValidationException as ve:
raise ve
except Exception as ex:
raise ValidationException("%r" % (ex), node=self.node, file_name=self.filename)
def get_task_spec_name(self, target_ref=None):
"""
Returns a unique task spec name for this task (or the targeted one)
"""
return target_ref or self.bpmn_id
def create_task(self):
"""
Create an instance of the task appropriately. A subclass can override
this method to get extra information from the node.
"""
return self.spec_class(self.spec, self.bpmn_id, **self.bpmn_attributes)
def connect_outgoing(self, outgoing_task, sequence_flow_node, is_default):
"""
Connects this task to the indicating outgoing task, with the details in
the sequence flow. A subclass can override this method to get extra
information from the node.
"""
self.task.connect(outgoing_task)
def handles_multiple_outgoing(self):
"""
A subclass should override this method if the task supports multiple
outgoing sequence flows.
"""
return False
| 12,858 | Python | .py | 231 | 45.502165 | 132 | 0.662668 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,054 | bpmn_task_spec.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/bpmn_task_spec.py |
from SpiffWorkflow.bpmn.exceptions import WorkflowDataException
from SpiffWorkflow.operators import Operator
from SpiffWorkflow.specs.base import TaskSpec
class _BpmnCondition(Operator):
def __init__(self, *args):
if len(args) > 1:
raise TypeError("Too many arguments")
super(_BpmnCondition, self).__init__(*args)
def _matches(self, task):
return task.workflow.script_engine.evaluate(task, self.args[0], external_context=task.workflow.data_objects)
class BpmnIoSpecification:
def __init__(self, data_inputs, data_outputs):
self.data_inputs = data_inputs
self.data_outputs = data_outputs
class BpmnTaskSpec(TaskSpec):
"""
This class provides BPMN-specific attributes.
It is intended to be used with all tasks in a BPMN workflow. Spiff internal tasks (such
as Root, EndJoin, etc) inherit directly from this.
Visible tasks inherit from `BpmnSpecMixin`, which will assign the `bpmn_id` and `bpmn_name`.
The intent is to (1) give all tasks in the workflow the same attributes and (2) provide an
easy way of knowing whether a task appearson the diagram.
"""
def __init__(self, wf_spec, name, lane=None, documentation=None,
data_input_associations=None, data_output_associations=None,
io_specification=None, **kwargs):
"""
:param lane: Indicates the name of the lane that this task belongs to
:param documentation: the contents of the documentation element
:param data_input_associations: a list of data references to be used as inputs to the task
:param data_output_associations: a list of data references to be used as inputs to the task
"""
super().__init__(wf_spec, name, **kwargs)
self.bpmn_id = None
self.bpmn_name = None
self.lane = lane
self.documentation = documentation
self.data_input_associations = data_input_associations or []
self.data_output_associations = data_output_associations or []
self.io_specification = io_specification
if self.description is None:
self.description = 'BPMN Task'
def connect_outgoing_if(self, condition, taskspec):
"""
Connect this task spec to the indicated child, if the condition
evaluates to true. This should only be called if the task has a
connect_if method (e.g. ExclusiveGateway).
"""
if condition is None:
self.connect(taskspec)
else:
self.connect_if(_BpmnCondition(condition), taskspec)
def _update_hook(self, my_task):
super()._update_hook(my_task)
# This copies data from data objects
for obj in self.data_input_associations:
obj.get(my_task)
# If an IO spec was given, require all inputs are present, and remove all other inputs.
if self.io_specification is not None and len(self.io_specification.data_inputs) > 0:
data = {}
for var in self.io_specification.data_inputs:
if not var.exists(my_task):
raise WorkflowDataException("Missing data input", task=my_task, data_input=var)
data[var.bpmn_id] = var.get(my_task)
my_task.data = data
return True
def _on_complete_hook(self, my_task):
if self.io_specification is not None and len(self.io_specification.data_outputs) > 0:
data = {}
for var in self.io_specification.data_outputs:
if not var.exists(my_task):
raise WorkflowDataException("Missing data ouput", task=my_task, data_output=var)
data[var.bpmn_id] = var.get(my_task)
my_task.data = data
for obj in self.data_output_associations:
obj.set(my_task)
for obj in self.data_input_associations:
# Remove the any copied input variables that might not have already been removed
my_task.data.pop(obj.bpmn_id, None)
super()._on_complete_hook(my_task)
def task_info(self, my_task):
# This method can be extended to provide task specific info for different spec types
# Since almost all spec types can be MI, add instance info here if present
info = {}
if 'key_or_index' in my_task.internal_data:
info['instance'] = my_task.internal_data.get('key_or_index')
if 'item' in my_task.internal_data:
info['instance'] = my_task.internal_data.get('item')
if 'iteration' in my_task.internal_data:
info['iteration'] = my_task.internal_data.get('iteration')
return info
| 4,686 | Python | .py | 91 | 41.89011 | 116 | 0.653249 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,055 | defaults.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/defaults.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .mixins import (
BpmnSpecMixin,
ManualTaskMixin,
NoneTaskMixin,
UserTaskMixin,
ExclusiveGatewayMixin,
InclusiveGatewayMixin,
ParallelGatewayMixin,
ScriptTaskMixin,
ServiceTaskMixin,
StandardLoopTaskMixin,
ParallelMultiInstanceTaskMixin,
SequentialMultiInstanceTaskMixin,
SubWorkflowTaskMixin,
CallActivityMixin,
TransactionSubprocessMixin,
StartEventMixin,
EndEventMixin,
IntermediateCatchEventMixin,
IntermediateThrowEventMixin,
SendTaskMixin,
ReceiveTaskMixin,
EventBasedGatewayMixin,
BoundaryEventMixin,
)
# In the future, we could have the parser take a bpmn task spec and construct these classes automatically
# However, I am NOT going to try to do that with the parser we have now
class ManualTask(ManualTaskMixin, BpmnSpecMixin):
pass
class NoneTask(NoneTaskMixin, BpmnSpecMixin):
pass
class UserTask(UserTaskMixin, BpmnSpecMixin):
pass
class ExclusiveGateway(ExclusiveGatewayMixin, BpmnSpecMixin):
pass
class InclusiveGateway(InclusiveGatewayMixin, BpmnSpecMixin):
pass
class ParallelGateway(ParallelGatewayMixin, BpmnSpecMixin):
pass
class ScriptTask(ScriptTaskMixin, BpmnSpecMixin):
pass
class ServiceTask(ServiceTaskMixin, BpmnSpecMixin):
pass
class StandardLoopTask(StandardLoopTaskMixin, BpmnSpecMixin):
pass
class ParallelMultiInstanceTask(ParallelMultiInstanceTaskMixin, BpmnSpecMixin):
pass
class SequentialMultiInstanceTask(SequentialMultiInstanceTaskMixin, BpmnSpecMixin):
pass
class SubWorkflowTask(SubWorkflowTaskMixin, BpmnSpecMixin):
pass
class CallActivity(CallActivityMixin, BpmnSpecMixin):
pass
class TransactionSubprocess(TransactionSubprocessMixin, BpmnSpecMixin):
pass
class StartEvent(StartEventMixin, BpmnSpecMixin):
pass
class EndEvent(EndEventMixin, BpmnSpecMixin):
pass
class IntermediateCatchEvent(IntermediateCatchEventMixin, BpmnSpecMixin):
pass
class IntermediateThrowEvent(IntermediateThrowEventMixin, BpmnSpecMixin):
pass
class SendTask(SendTaskMixin, BpmnSpecMixin):
pass
class ReceiveTask(ReceiveTaskMixin, BpmnSpecMixin):
pass
class EventBasedGateway(EventBasedGatewayMixin, BpmnSpecMixin):
pass
class BoundaryEvent(BoundaryEventMixin, BpmnSpecMixin):
pass
| 3,114 | Python | .py | 89 | 31.696629 | 105 | 0.817727 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,056 | __init__.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/__init__.py | # Copyright (C) 2012 Matthew Hampton
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .bpmn_task_spec import BpmnTaskSpec
from .data_spec import BpmnDataSpecification, BpmnDataStoreSpecification
from .bpmn_process_spec import BpmnProcessSpec | 971 | Python | .py | 21 | 45.238095 | 72 | 0.802105 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,057 | control.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/control.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.exceptions import WorkflowException
from SpiffWorkflow.util.task import TaskState, TaskFilter, TaskIterator
from SpiffWorkflow.specs.StartTask import StartTask
from SpiffWorkflow.specs.Join import Join
from SpiffWorkflow.bpmn.specs.bpmn_task_spec import BpmnTaskSpec
from SpiffWorkflow.bpmn.specs.mixins.unstructured_join import UnstructuredJoin
from SpiffWorkflow.bpmn.specs.mixins.events.intermediate_event import BoundaryEvent
from SpiffWorkflow.bpmn.specs.mixins.events.start_event import StartEvent
class BpmnStartTask(BpmnTaskSpec, StartTask):
pass
class SimpleBpmnTask(BpmnTaskSpec):
pass
class EventSplit(SimpleBpmnTask):
def __init__(self, event_type, wf_spec, name, **kwargs):
super().__init__(wf_spec, name, **kwargs)
self.event_type = event_type
def _predict_hook(self, my_task):
# Events attached to the main task might occur
my_task._sync_children(self.outputs, state=TaskState.MAYBE)
# The main child's state is based on this task's state
state = TaskState.FUTURE if my_task.has_state(TaskState.DEFINITE_MASK) else my_task.state
for child in my_task.children:
if not isinstance(child.task_spec, self.event_type):
child._set_state(state)
def _run_hook(self, my_task):
for task in my_task.children:
if isinstance(task.task_spec, self.event_type) and task.has_state(TaskState.PREDICTED_MASK):
task._set_state(TaskState.WAITING)
return True
class BoundaryEventSplit(EventSplit):
def __init__(self, wf_spec, name, **kwargs):
super().__init__(BoundaryEvent, wf_spec, name, **kwargs)
class StartEventSplit(EventSplit):
def __init__(self, wf_spec, name, **kwargs):
super().__init__(StartEvent, wf_spec, name, **kwargs)
class BoundaryEventJoin(Join, BpmnTaskSpec):
"""This task is inserted before a task with boundary events."""
def __init__(self, wf_spec, name, **kwargs):
super().__init__(wf_spec, name, **kwargs)
def _check_threshold_structured(self, my_task):
split_task = my_task.find_ancestor(self.split_task)
if split_task is None:
raise WorkflowException(f'Split at {self.split_task} was not reached', task_spec=self)
main, interrupting, noninterrupting = None, [], []
for task in split_task.children:
if not isinstance(task.task_spec, BoundaryEvent):
main = task
elif task.task_spec.cancel_activity:
interrupting.append(task)
else:
noninterrupting.append(task)
if main is None:
raise WorkflowException(f'No main task found', task_spec=self)
interrupt = any([t.has_state(TaskState.READY|TaskState.COMPLETED) for t in interrupting])
finished = main.has_state(TaskState.FINISHED_MASK) or interrupt
if finished:
cancel = [t for t in interrupting + noninterrupting if t.state == TaskState.WAITING]
if interrupt:
cancel += [main]
else:
cancel = []
return finished, cancel
class StartEventJoin(Join, BpmnTaskSpec):
def __init__(self, wf_spec, name, **kwargs):
super().__init__(wf_spec, name, **kwargs)
def _check_threshold_structured(self, my_task):
split_task = my_task.find_ancestor(self.split_task)
if split_task is None:
raise WorkflowException(f'Split at {self.split_task} was not reached', task_spec=self)
may_fire, waiting = False, []
for task in split_task.children:
if task.state == TaskState.COMPLETED:
may_fire = True
else:
waiting.append(task)
return may_fire, waiting
class _EndJoin(UnstructuredJoin, BpmnTaskSpec):
def _check_threshold_unstructured(self, my_task):
# Look at the tree to find all ready and waiting tasks (excluding ourself). The EndJoin waits for everyone!
for task in TaskIterator(my_task.workflow.task_tree, state=TaskState.NOT_FINISHED_MASK, end_at_spec=self.name):
if task == my_task:
continue
may_fire = False
break
else:
may_fire = True
return may_fire
def _run_hook(self, my_task):
result = super(_EndJoin, self)._run_hook(my_task)
my_task.workflow.data.update(my_task.data)
return result
| 5,282 | Python | .py | 109 | 40.825688 | 119 | 0.683677 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,058 | data_spec.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/data_spec.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import logging
from copy import deepcopy
from SpiffWorkflow.bpmn.exceptions import WorkflowDataException
logger = logging.getLogger('spiff.data')
class BpmnDataSpecification:
def __init__(self, bpmn_id, bpmn_name=None):
"""
:param name: the variable (the BPMN ID)
:param description: a human readable name (the BPMN name)
"""
self.bpmn_id = bpmn_id
self.bpmn_name = bpmn_name
# In the future, we can add schemas defining the objects here.
def get(self, my_task, **kwargs):
raise NotImplementedError
def set(self, my_task, **kwargs):
raise NotImplementedError
class BpmnDataStoreSpecification(BpmnDataSpecification):
def __init__(self, bpmn_id, bpmn_name, capacity=None, is_unlimited=None):
"""
:param name: the name of the task data variable and data store key (the BPMN ID)
:param description: the task description (the BPMN name)
:param capacity: the capacity of the data store
:param is_unlimited: if true capacity is ignored
"""
self.capacity = capacity or 0
self.is_unlimited = is_unlimited or True
# In the future, we can add schemas defining the objects here.
super().__init__(bpmn_id, bpmn_name)
class DataObject(BpmnDataSpecification):
"""Copy data between process variables and tasks"""
def get(self, my_task):
"""Copy a value form the workflow data to the task data."""
# Find the spec where the data object is defined and put it there
wf = my_task.workflow
while wf is not None and self.bpmn_id not in wf.spec.data_objects:
wf = wf.parent_workflow
if wf is None or self.bpmn_id not in wf.data_objects:
message = f"The data object could not be read; '{self.bpmn_id}' does not exist in the process."
raise WorkflowDataException(message, my_task, data_input=self)
my_task.data[self.bpmn_id] = deepcopy(wf.data_objects[self.bpmn_id])
logger.info(f'Read workflow variable', extra=my_task.collect_log_extras({'bpmn_id': self.bpmn_id}))
def set(self, my_task):
"""Copy a value from the task data to the workflow data"""
if self.bpmn_id not in my_task.data:
message = f"A data object could not be set; '{self.bpmn_id}' not exist in the task."
raise WorkflowDataException(message, my_task, data_output=self)
wf = my_task.workflow
while wf is not None and self.bpmn_id not in wf.spec.data_objects:
wf = wf.parent_workflow
wf.data_objects[self.bpmn_id] = deepcopy(my_task.data[self.bpmn_id])
del my_task.data[self.bpmn_id]
logger.info(f'Set workflow variable', extra=my_task.collect_log_extras({'bpmn_id': self.bpmn_id}))
def delete(self, my_task):
my_task.data.pop(self.bpmn_id, None)
class TaskDataReference(BpmnDataSpecification):
"""A representation of task data that can be used in a BPMN diagram"""
def get(self, my_task):
return my_task.data.get(self.bpmn_id)
def set(self, my_task, value):
my_task.data[self.bpmn_id] = value
def exists(self, my_task):
return self.bpmn_id in my_task.data | 4,044 | Python | .py | 81 | 43.481481 | 107 | 0.691976 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,059 | bpmn_process_spec.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/bpmn_process_spec.py | # Copyright (C) 2012 Matthew Hampton, 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.specs.WorkflowSpec import WorkflowSpec
from SpiffWorkflow.bpmn.specs.control import _EndJoin, BpmnStartTask, SimpleBpmnTask
class BpmnProcessSpec(WorkflowSpec):
"""
This class represents the specification of a BPMN process workflow. This
specialises the standard Spiff WorkflowSpec class with a few extra methods
and attributes.
"""
def __init__(self, name=None, description=None, filename=None, svg=None):
"""
Constructor.
:param svg: This provides the SVG representation of the workflow as an
LXML node. (optional)
"""
super(BpmnProcessSpec, self).__init__(name=name, filename=filename)
self.start = BpmnStartTask(self, 'Start')
self.end = _EndJoin(self, '%s.EndJoin' % (self.name))
self.end.connect(SimpleBpmnTask(self, 'End'))
self.svg = svg
self.description = description
self.io_specification = None
self.data_objects = {}
self.data_stores = {}
self.correlation_keys = {}
| 1,874 | Python | .py | 42 | 40.071429 | 84 | 0.727422 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,060 | user_task.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/mixins/user_task.py | # Copyright (C) 2012 Matthew Hampton, 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.specs.base import TaskSpec
class UserTask(TaskSpec):
"""Task Spec for a bpmn:userTask node."""
def __init__(self, wf_spec, bpmn_id, **kwargs):
super().__init__(wf_spec, bpmn_id, **kwargs)
self.manual = True
| 1,081 | Python | .py | 24 | 42.916667 | 69 | 0.756167 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,061 | multiinstance_task.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/mixins/multiinstance_task.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from copy import deepcopy
from collections.abc import Iterable, Sequence, Mapping, MutableSequence, MutableMapping
from SpiffWorkflow.specs.base import TaskSpec
from SpiffWorkflow.util.task import TaskState
from SpiffWorkflow.util.deep_merge import DeepMerge
from SpiffWorkflow.bpmn.specs.bpmn_task_spec import BpmnTaskSpec
from SpiffWorkflow.bpmn.exceptions import WorkflowDataException
class LoopTask(BpmnTaskSpec):
def _merged_children(self, my_task):
return my_task.internal_data.get('merged', [])
def _instances(self, my_task):
return filter(lambda c: c.task_spec.name == self.task_spec, my_task.children)
class StandardLoopTask(LoopTask):
def __init__(self, wf_spec, bpmn_id, task_spec, maximum, condition, test_before, **kwargs):
super().__init__(wf_spec, bpmn_id, **kwargs)
self.task_spec = task_spec
self.maximum = maximum
self.condition = condition
self.test_before = test_before
def task_info(self, my_task):
info = super().task_info(my_task)
info['iterations_completed'] = len(self._merged_children(my_task))
if self.maximum:
info['iterations_remaining'] = self.maximum - info['iterations_completed']
info['instance_map'] = dict((idx, str(t.id)) for idx, t in enumerate(self._instances(my_task)))
return info
def _update_hook(self, my_task):
super()._update_hook(my_task)
if self.test_before and self.loop_complete(my_task):
return True
else:
my_task._set_state(TaskState.STARTED)
my_task.internal_data['merged'] = []
self.create_child(my_task)
def create_child(self, my_task):
task_spec = my_task.workflow.spec.task_specs[self.task_spec]
if not task_spec.completed_event.is_connected(self.merge_child):
task_spec.completed_event.connect(self.merge_child)
child = my_task._add_child(task_spec, TaskState.WAITING)
child.triggered = True
child.internal_data['iteration'] = len(self._merged_children(my_task))
child.task_spec._update(child)
def merge_child(self, workflow, child):
my_task = child.parent
DeepMerge.merge(my_task.data, child.data)
my_task.internal_data['merged'].append(str(child.id))
if self.loop_complete(my_task):
my_task._set_state(TaskState.READY)
else:
self.create_child(my_task)
def loop_complete(self, my_task):
merged = my_task.internal_data.get('merged', [])
max_complete = self.maximum is not None and len(merged) >= self.maximum
cond_complete = self.condition is not None and my_task.workflow.script_engine.evaluate(my_task, self.condition)
return max_complete or cond_complete
class MultiInstanceTask(LoopTask):
def __init__(self, wf_spec, bpmn_id, task_spec, cardinality=None, data_input=None,
data_output=None, input_item=None, output_item=None, condition=None,
**kwargs):
super().__init__(wf_spec, bpmn_id, **kwargs)
self.task_spec = task_spec
self.cardinality = cardinality
self.data_input = data_input
self.data_output = data_output
self.input_item = input_item
self.output_item = output_item
self.condition = condition
def task_info(self, my_task):
info = super().task_info(my_task)
info.update({
'completed': [],
'running': [],
'future': my_task.internal_data.get('remaining', []),
'instance_map': {},
})
for task in self._instances(my_task):
key_or_index = task.internal_data.get('key_or_index')
value = task.internal_data.get('item') if key_or_index is None else key_or_index
if task.has_state(TaskState.FINISHED_MASK):
info['completed'].append(value)
else:
info['running'].append(value)
try:
info['instance_map'][value] = str(task.id)
except TypeError:
info['instance_map'][str(value)] = str(task.id)
return info
def merge_child(self, workflow, child):
"""This merges child data into this task's data."""
my_task = child.parent
if self.data_output is not None and self.output_item is not None:
if not self.output_item.exists(child):
self.raise_data_exception("Expected an output item", child)
item = self.output_item.get(child)
key_or_index = child.internal_data.get('key_or_index')
data_output = self.data_output.get(my_task)
data_input = self.data_input.get(my_task) if self.data_input is not None else None
if key_or_index is not None and (isinstance(data_output, Mapping) or data_input is data_output):
data_output[key_or_index] = item
else:
data_output.append(item)
else:
DeepMerge.merge(my_task.data, child.data)
my_task.internal_data['merged'].append(str(child.id))
def create_child(self, my_task, item, key_or_index=None):
task_spec = my_task.workflow.spec.task_specs[self.task_spec]
if not task_spec.completed_event.is_connected(self.merge_child):
task_spec.completed_event.connect(self.merge_child)
child = my_task._add_child(task_spec, TaskState.WAITING)
child.triggered = True
if self.input_item is not None and self.input_item.exists(my_task):
raise WorkflowDataException(f'Multiinstance input item {self.input_item.bpmn_id} already exists.', my_task)
if self.output_item is not None and self.output_item.exists(my_task):
raise WorkflowDataException(f'Multiinstance output item {self.output_item.bpmn_id} already exists.', my_task)
if self.input_item is not None:
self.input_item.set(child, deepcopy(item))
if key_or_index is not None:
child.internal_data['key_or_index'] = key_or_index
else:
child.internal_data['item'] = item
child.task_spec._update(child)
def check_completion_condition(self, my_task):
merged = my_task.internal_data.get('merged', [])
if len(merged) > 0 and self.condition is not None:
last_child = [c for c in my_task.children if str(c.id) == merged[-1]][0]
return my_task.workflow.script_engine.evaluate(last_child, self.condition)
def init_data_output_with_input_data(self, my_task, input_data):
if not self.data_output.exists(my_task):
if isinstance(input_data, (MutableMapping, MutableSequence)):
# We can use the same class if it implements __setitem__
self.data_output.set(my_task, input_data.__class__())
elif isinstance(input_data, Mapping):
# If we have a map without __setitem__, use a dict
self.data_output.set(my_task, dict())
else:
# For all other types, we'll append to a list
self.data_output.set(my_task, list())
else:
output_data = self.data_output.get(my_task)
if not isinstance(output_data, (MutableSequence, MutableMapping)):
self.raise_data_exception("Only a mutable map (dict) or sequence (list) can be used for output", my_task)
if input_data is not output_data and not isinstance(output_data, Mapping) and len(output_data) > 0:
self.raise_data_exception(
"If the input is not being updated in place, the output must be empty or it must be a map (dict)", my_task)
def init_data_output_with_cardinality(self, my_task):
if not self.data_output.exists(my_task):
self.data_output.set(my_task, list())
else:
data_output = self.data_output.get(my_task)
if not isinstance(data_ouput, MutableMapping) and len(data_output) > 0:
self.raise_data_exception(
"If loop cardinality is specificied, the output must be a map (dict) or empty sequence (list)",
my_task
)
def raise_data_exception(self, message, my_task):
raise WorkflowDataException(message, my_task, data_input=self.data_input, data_output=self.data_output)
class SequentialMultiInstanceTask(MultiInstanceTask):
def _update_hook(self, my_task):
super()._update_hook(my_task)
my_task.internal_data['merged'] = []
if self.data_input is not None:
input_data = self.data_input.get(my_task)
my_task.internal_data['remaining'] = self.init_remaining_items(my_task)
if self.data_output is not None:
self.init_data_output_with_input_data(my_task, input_data)
else:
my_task.internal_data['cardinality'] = my_task.workflow.script_engine.evaluate(my_task, self.cardinality)
my_task.internal_data['current'] = 0
if self.data_output is not None:
self.init_data_output_with_cardinality(my_task)
self.add_next_child(my_task)
if not self.children_complete(my_task):
my_task._set_state(TaskState.STARTED)
else:
return True
def task_info(self, my_task):
info = super().task_info(my_task)
cardinality = my_task.internal_data.get('cardinality')
if cardinality is not None:
info['future'] = [v for v in range(len(info['completed']) + len(info['running']), cardinality)]
return info
def add_next_child(self, my_task):
if self.data_input is not None:
key_or_index, item = self.get_next_input_item(my_task)
else:
key_or_index, item = self.get_next_index(my_task)
if item is not None:
self.create_child(my_task, item, key_or_index)
def get_next_input_item(self, my_task):
input_data = self.data_input.get(my_task)
remaining = my_task.internal_data.get('remaining')
if len(remaining) > 0:
if isinstance(input_data, (Mapping, Sequence)):
# In this case, we want to preserve a key or index
# We definitely need it if the output is a map, or if we're udpating a sequence in place
key_or_index, item = remaining[0], input_data[remaining[0]]
else:
key_or_index, item = None, remaining[0]
my_task.internal_data['remaining'] = remaining[1:]
return key_or_index, item
else:
return None, None
def get_next_index(self, my_task):
current = my_task.internal_data.get('current')
cardinality = my_task.internal_data.get('cardinality')
if current < cardinality:
# If using loop cardinality, use the index as the "item"
my_task.internal_data['current'] = current + 1
return None, current
else:
return None, None
def merge_child(self, workflow, child):
super().merge_child(workflow, child)
my_task = child.parent
if self.children_complete(my_task) or self.check_completion_condition(my_task):
my_task._set_state(TaskState.READY)
else:
self.add_next_child(my_task)
def init_remaining_items(self, my_task):
if not self.data_input.exists(my_task):
self.raise_data_exception("Missing data input for multiinstance task", my_task)
input_data = self.data_input.get(my_task)
# This is internal bookkeeping, so we know where we are; we get the actual items when we create the task
if isinstance(input_data, Sequence):
# For lists, keep track of the index
remaining = [idx for idx in range(len(input_data))]
elif isinstance(input_data, Mapping):
# For dicts, use the keys
remaining = [key for key in input_data]
elif isinstance(input_data, Iterable):
# Otherwise, just copy the objects as a last resort
remaining = [val for val in input_data]
else:
self.raise_data_exception("Multiinstance data input must be iterable", my_task)
return remaining
def children_complete(self, my_task):
if self.data_input is not None:
return len(my_task.internal_data.get('remaining', [])) == 0
else:
return my_task.internal_data.get('current', 0) == my_task.internal_data.get('cardinality', 0)
class ParallelMultiInstanceTask(MultiInstanceTask):
def _update_hook(self, my_task):
super()._update_hook(my_task)
my_task.internal_data['merged'] = []
self.create_children(my_task)
# If the input collection or cardinalty is 0, there won't be any children to cause the task to become ready
if not self.children_complete(my_task):
my_task._set_state(TaskState.STARTED)
else:
return True
def merge_child(self, workflow, child):
super().merge_child(workflow, child)
my_task = child.parent
if self.check_completion_condition(my_task):
for child in self._instances(my_task):
child.cancel()
my_task._set_state(TaskState.READY)
elif self.children_complete(my_task):
my_task._set_state(TaskState.READY)
def create_children(self, my_task):
if self.data_input is not None:
data_input = self.data_input.get(my_task)
# We have to preserve the key or index for maps/sequences, in case we're updating in place, or the output is a mapping
if isinstance(data_input, Mapping):
children = data_input.items()
elif isinstance(data_input, Sequence):
children = enumerate(data_input)
else:
# We can use other iterables as inputs, but key or index isn't meaningful
children = ((None, item) for item in data_input)
else:
# For tasks specifying the cardinality, use the index as the "item"
cardinality = my_task.workflow.script_engine.evaluate(my_task, self.cardinality)
children = ((idx, idx) for idx in range(cardinality))
if self.data_output is not None:
if self.data_input is not None:
self.init_data_output_with_input_data(my_task, self.data_input.get(my_task))
else:
self.init_data_output_with_cardinality(my_task)
for key_or_index, item in children:
self.create_child(my_task, item, key_or_index)
def children_complete(self, my_task):
return all(c.state == TaskState.COMPLETED for c in self._instances(my_task)) | 15,654 | Python | .py | 301 | 41.770764 | 130 | 0.635443 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,062 | none_task.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/mixins/none_task.py | # Copyright (C) 2012 Matthew Hampton, 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.specs.base import TaskSpec
class NoneTask(TaskSpec):
"""Task Spec for a bpmn:task node."""
def __init__(self, wf_spec, bpmn_id, **kwargs):
super().__init__(wf_spec, bpmn_id, **kwargs)
self.manual = True
| 1,078 | Python | .py | 24 | 42.75 | 69 | 0.755238 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,063 | bpmn_spec_mixin.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/mixins/bpmn_spec_mixin.py | # Copyright (C) 2012 Matthew Hampton, 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from ..bpmn_task_spec import BpmnTaskSpec
class BpmnSpecMixin(BpmnTaskSpec):
def __init__(self, wf_spec, bpmn_id, **kwargs):
super().__init__(wf_spec, bpmn_id, **kwargs)
self.bpmn_id = bpmn_id
self.bpmn_name = kwargs.get('bpmn_name')
| 1,094 | Python | .py | 24 | 43.25 | 69 | 0.753283 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,064 | inclusive_gateway.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/mixins/inclusive_gateway.py | # Copyright (C) 2012 Matthew Hampton, 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.bpmn.exceptions import WorkflowTaskException
from SpiffWorkflow.util.task import TaskState
from SpiffWorkflow.specs.MultiChoice import MultiChoice
from .unstructured_join import UnstructuredJoin
class InclusiveGateway(MultiChoice, UnstructuredJoin):
"""
Task Spec for a bpmn:parallelGateway node. From the specification of BPMN
(http://www.omg.org/spec/BPMN/2.0/PDF - document number:formal/2011-01-03):
The Inclusive Gateway is activated if
* At least one incoming Sequence Flow has at least one token and
* For every directed path formed by sequence flow that
* starts with a Sequence Flow f of the diagram that has a token,
* ends with an incoming Sequence Flow of the inclusive gateway that has
no token, and
* does not visit the Inclusive Gateway.
* There is also a directed path formed by Sequence Flow that
* starts with f,
* ends with an incoming Sequence Flow of the inclusive gateway that has
a token, and
* does not visit the Inclusive Gateway.
Upon execution, a token is consumed from each incoming Sequence Flow that
has a token. A token will be produced on some of the outgoing Sequence
Flows.
TODO: Not implemented: At the moment, we can't handle having more than one
token at a single incoming sequence
TODO: At the moment only converging Inclusive Gateways are supported.
In order to determine the outgoing Sequence Flows that receive a token, all
conditions on the outgoing Sequence Flows are evaluated. The evaluation
does not have to respect a certain order.
For every condition which evaluates to true, a token MUST be passed on the
respective Sequence Flow.
If and only if none of the conditions evaluates to true, the token is
passed on the default Sequence Flow.
In case all conditions evaluate to false and a default flow has not been
specified, the Inclusive Gateway throws an exception.
"""
def test(self):
MultiChoice.test(self)
UnstructuredJoin.test(self)
def _check_threshold_unstructured(self, my_task):
# Look at the tree to find all places where this task is used and unfinished tasks that may be ancestors
# If there are any, we may have to check whether this gateway is reachable from any of them.
tasks, sources = [], []
for task in my_task.workflow.get_tasks(end_at_spec=self.name):
if task.task_spec == self:
tasks.append(task)
elif task.has_state(TaskState.READY|TaskState.WAITING):
sources.append(task.task_spec)
# Look up which tasks have parents completed.
completed_inputs = set([ task.parent.task_spec for task in tasks if task.parent.state == TaskState.COMPLETED ])
# If any parents of this join have not been finished, this task must wait.
# A parent spec only has to be completed once, even it is on multiple paths
tasks_waiting = False
for task in tasks:
if task.parent.has_state(TaskState.DEFINITE_MASK) and task.parent.task_spec not in completed_inputs:
tasks_waiting = True
break
if tasks_waiting:
complete = False
else:
# Handle the case where there are paths from active tasks that must go through waiting inputs
waiting_inputs = [i for i in self.inputs if i not in completed_inputs]
checked = []
# This will go back through a task spec's ancestors and return the source, if applicable
def check(spec):
checked.append(spec)
for parent in spec.inputs:
if parent not in checked:
return parent if parent in sources else check(parent)
# Start with the completed inputs and recurse back through its ancestors, removing any waiting tasks that
# could reach one of them.
for spec in completed_inputs:
source = check(spec)
if source is not None:
sources.remove(source)
# Now check the rest of the waiting inputs and see if they can be reached from any of the remaining tasks
unfinished_paths = []
for spec in waiting_inputs:
if check(spec) is not None:
unfinished_paths.append(spec)
break
complete = len(unfinished_paths) == 0
return complete
def _run_hook(self, my_task):
matches, defaults = self._get_matching_outputs(my_task)
if len(matches + defaults) == 0:
raise WorkflowTaskException('No conditions satisfied on gateway', task=my_task)
my_task._sync_children(matches or defaults, TaskState.FUTURE)
for child in my_task.children:
child.task_spec._predict(child, mask=TaskState.FUTURE|TaskState.PREDICTED_MASK)
return True
| 5,840 | Python | .py | 109 | 45.045872 | 119 | 0.690489 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,065 | script_task.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/mixins/script_task.py | # Copyright (C) 2012 Matthew Hampton, 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.specs.base import TaskSpec
class ScriptEngineTask(TaskSpec):
"""Task Spec for a bpmn:scriptTask node"""
def _execute(self, task):
"""Please override for specific Implementations, see ScriptTask below for an example"""
pass
def _run_hook(self, task):
return self._execute(task)
class ScriptTask(ScriptEngineTask):
def __init__(self, wf_spec, bpmn_id, script, **kwargs):
"""
Constructor.
:param script: the script that must be executed by the script engine.
"""
super(ScriptTask, self).__init__(wf_spec, bpmn_id, **kwargs)
self.script = script
def _execute(self, task):
return task.workflow.script_engine.execute(task, self.script)
| 1,587 | Python | .py | 36 | 40.027778 | 95 | 0.730694 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,066 | service_task.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/mixins/service_task.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .script_task import ScriptEngineTask
class ServiceTask(ScriptEngineTask):
"""
Task Spec for a bpmn:serviceTask node.
"""
def __init__(self, wf_spec, bpmn_id, **kwargs):
super(ServiceTask, self).__init__(wf_spec, bpmn_id, **kwargs)
| 1,071 | Python | .py | 25 | 40.68 | 69 | 0.758886 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,067 | subworkflow_task.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/mixins/subworkflow_task.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from copy import deepcopy
from SpiffWorkflow.util.task import TaskState
from SpiffWorkflow.specs.base import TaskSpec
from SpiffWorkflow.bpmn.exceptions import WorkflowDataException
class SubWorkflowTask(TaskSpec):
"""
Task Spec for a bpmn node containing a subworkflow.
"""
def __init__(self, wf_spec, bpmn_id, subworkflow_spec, transaction=False, **kwargs):
"""
Constructor.
:param bpmn_wf_spec: the BpmnProcessSpec for the sub process.
:param bpmn_wf_class: the BpmnWorkflow class to instantiate
"""
super(SubWorkflowTask, self).__init__(wf_spec, bpmn_id, **kwargs)
self.spec = subworkflow_spec
self.transaction = transaction
def _on_subworkflow_completed(self, subworkflow, my_task):
self.update_data(my_task, subworkflow)
# I don't like manually moving back to ready, but don't want to run it
# Ideally, update hook would create the subprocess and return True, _run would start the subprocess and
# return None (so that the state would transition to started), and the completed event for this task
# could be used to run post-completed actions automatically.
# However, until I align the events with state transitions, I don't want to encourage external use of
# callback methods (though completed event is not going to change).
my_task._set_state(TaskState.READY)
def _update_hook(self, my_task):
subprocess = my_task.workflow.top_workflow.subprocesses.get(my_task.id)
if subprocess is None:
super()._update_hook(my_task)
self.start_workflow(my_task)
my_task._set_state(TaskState.STARTED)
else:
return subprocess.completed
def _on_cancel(self, my_task):
subworkflow = my_task.workflow.top_workflow.get_subprocess(my_task)
if subworkflow is not None:
subworkflow.cancel()
def copy_data(self, my_task, subworkflow):
start = subworkflow.get_next_task(spec_name='Start')
start.set_data(**deepcopy(my_task.data))
def update_data(self, my_task, subworkflow):
my_task.data = deepcopy(subworkflow.last_task.data)
def start_workflow(self, my_task):
subworkflow = my_task.workflow.top_workflow.create_subprocess(my_task, self.spec)
subworkflow.completed_event.connect(self._on_subworkflow_completed, my_task)
self.copy_data(my_task, subworkflow)
start = subworkflow.get_next_task(spec_name='Start')
start.run()
class CallActivity(SubWorkflowTask):
def __init__(self, wf_spec, bpmn_id, subworkflow_spec, **kwargs):
super(CallActivity, self).__init__(wf_spec, bpmn_id, subworkflow_spec, False, **kwargs)
def copy_data(self, my_task, subworkflow):
start = subworkflow.get_next_task(subworkflow.task_tree, skip_subprocesses=True, spec_name='Start')
if subworkflow.spec.io_specification is None or len(subworkflow.spec.io_specification.data_inputs) == 0:
# Copy all task data into start task if no inputs specified
start.set_data(**my_task.data)
else:
# Otherwise copy only task data with the specified names
for var in subworkflow.spec.io_specification.data_inputs:
if not var.exists(my_task):
raise WorkflowDataException(
"You are missing a required Data Input for a call activity.",
task=my_task,
data_input=var,
)
var.set(start, var.get(my_task))
def update_data(self, my_task, subworkflow):
if subworkflow.spec.io_specification is None or len(subworkflow.spec.io_specification.data_outputs) == 0:
# Copy all workflow data if no outputs are specified
my_task.data = deepcopy(subworkflow.last_task.data)
else:
end = subworkflow.get_next_task(subworkflow.task_tree, skip_subprocesses=True, spec_name='End')
# Otherwise only copy data with the specified names
for var in subworkflow.spec.io_specification.data_outputs:
if var.bpmn_id not in end.data:
raise WorkflowDataException(
"The Data Output was not available in the subprocess output.",
task=my_task,
data_output=var,
)
var.set(my_task, var.get(end))
class TransactionSubprocess(SubWorkflowTask):
def __init__(self, wf_spec, bpmn_id, subworkflow_spec, **kwargs):
super(TransactionSubprocess, self).__init__(wf_spec, bpmn_id, subworkflow_spec, True, **kwargs)
| 5,535 | Python | .py | 103 | 44.757282 | 113 | 0.674737 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,068 | __init__.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/mixins/__init__.py | # Copyright (C) 2012 Matthew Hampton, 2024 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .bpmn_spec_mixin import BpmnSpecMixin
from .user_task import UserTask as UserTaskMixin
from .manual_task import ManualTask as ManualTaskMixin
from .none_task import NoneTask as NoneTaskMixin
from .script_task import ScriptTask as ScriptTaskMixin
from .service_task import ServiceTask as ServiceTaskMixin
from .parallel_gateway import ParallelGateway as ParallelGatewayMixin
from .exclusive_gateway import ExclusiveGateway as ExclusiveGatewayMixin
from .inclusive_gateway import InclusiveGateway as InclusiveGatewayMixin
from .multiinstance_task import (
StandardLoopTask as StandardLoopTaskMixin,
ParallelMultiInstanceTask as ParallelMultiInstanceTaskMixin,
SequentialMultiInstanceTask as SequentialMultiInstanceTaskMixin,
)
from .subworkflow_task import (
SubWorkflowTask as SubWorkflowTaskMixin,
CallActivity as CallActivityMixin,
TransactionSubprocess as TransactionSubprocessMixin,
)
from .events.start_event import StartEvent as StartEventMixin
from .events.end_event import EndEvent as EndEventMixin
from .events.intermediate_event import (
IntermediateCatchEvent as IntermediateCatchEventMixin,
IntermediateThrowEvent as IntermediateThrowEventMixin,
BoundaryEvent as BoundaryEventMixin,
EventBasedGateway as EventBasedGatewayMixin,
SendTask as SendTaskMixin,
ReceiveTask as ReceiveTaskMixin,
) | 2,183 | Python | .py | 47 | 44.404255 | 72 | 0.833255 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,069 | unstructured_join.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/mixins/unstructured_join.py | # Copyright (C) 2012 Matthew Hampton, 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from copy import deepcopy
from SpiffWorkflow.util.task import TaskState, TaskIterator
from SpiffWorkflow.specs.Join import Join
class UnstructuredJoin(Join):
"""
A helper subclass of Join that makes it work in a slightly friendlier way
for the BPMN style threading
"""
def _update_hook(self, my_task):
may_fire = self._check_threshold_unstructured(my_task)
other_tasks = [t for t in my_task.workflow.tasks.values()
if t.task_spec == self and t != my_task and t.state is TaskState.WAITING]
for task in other_tasks:
# By cancelling other waiting tasks immediately, we can prevent them from being updated repeeatedly and pointlessly
task.cancel()
if not may_fire:
# Only the most recent instance of the spec needs to wait.
my_task._set_state(TaskState.WAITING)
else:
# Only copy the data to the task that will proceed
my_task._inherit_data()
return may_fire
def _run_hook(self, my_task):
other_tasks = filter(
lambda t: t.task_spec == self and t.has_state(TaskState.FINISHED_MASK) and not my_task.is_descendant_of(t),
my_task.workflow.tasks.values()
)
for task in sorted(other_tasks, key=lambda t: t.last_state_change):
# By inheriting directly from parent tasks, we can avoid copying previouly merged data
my_task.set_data(**deepcopy(task.parent.data))
# This condition only applies when a workflow is reset inside a parallel branch.
# If reset to a branch that was originally cancelled, all the descendants of the previously completed branch will still
# appear in the tree, potentially corrupting the structure and data.
if task.has_state(TaskState.COMPLETED):
task._drop_children(force=True)
# My task is not finished, so won't be included above.
my_task._inherit_data()
return True
| 2,838 | Python | .py | 56 | 43.857143 | 131 | 0.699928 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,070 | exclusive_gateway.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/mixins/exclusive_gateway.py | # Copyright (C) 2012 Matthew Hampton
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.specs.ExclusiveChoice import ExclusiveChoice
from SpiffWorkflow.specs.MultiChoice import MultiChoice
class ExclusiveGateway(ExclusiveChoice):
"""
Task Spec for a bpmn:exclusiveGateway node.
"""
def test(self):
# Bypass the check for no default output -- this is not required in BPMN
MultiChoice.test(self)
| 1,172 | Python | .py | 27 | 41.037037 | 80 | 0.778947 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,071 | parallel_gateway.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/mixins/parallel_gateway.py | # Copyright (C) 2012 Matthew Hampton, 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.util.task import TaskState
from .unstructured_join import UnstructuredJoin
class ParallelGateway(UnstructuredJoin):
"""
Task Spec for a bpmn:parallelGateway node. From the specification of BPMN
(http://www.omg.org/spec/BPMN/2.0/PDF - document number:formal/2011-01-03):
The Parallel Gateway is activated if there is at least one token on
each incoming Sequence Flow.
The Parallel Gateway consumes exactly one token from each incoming
Sequence Flow and produces exactly one token at each outgoing
Sequence Flow.
TODO: Not implemented:
If there are excess tokens at an incoming Sequence Flow, these tokens
remain at this Sequence Flow after execution of the Gateway.
Essentially, this means that we must wait until we have a completed parent
task on each incoming sequence.
"""
def _check_threshold_unstructured(self, my_task):
tasks = my_task.workflow.get_tasks(spec_name=self.name)
waiting_inputs = set(self.inputs)
def remove_ancestor(task):
# This traces a tasks parents until it finds a spec in the list of sources
if task.task_spec in waiting_inputs:
waiting_inputs.remove(task.task_spec)
elif task.parent is not None:
remove_ancestor(task.parent)
for task in tasks:
# Handle the case where the parallel gateway is part of a loop.
if task.is_descendant_of(my_task):
# This is the first iteration; we should not wait on this task, because it will not be reached
# until after this join completes
remove_ancestor(task)
elif my_task.is_descendant_of(task):
# This is an subsequent iteration; we need to ignore the parents of previous iterations
continue
elif task.parent.state == TaskState.COMPLETED and task.parent.task_spec in waiting_inputs:
waiting_inputs.remove(task.parent.task_spec)
return len(waiting_inputs) == 0
| 2,926 | Python | .py | 56 | 44.892857 | 110 | 0.708188 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,072 | manual_task.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/mixins/manual_task.py | # Copyright (C) 2012 Matthew Hampton
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.specs.base import TaskSpec
class ManualTask(TaskSpec):
"""Task Spec for a bpmn:manualTask node."""
def __init__(self, wf_spec, bpmn_id, **kwargs):
super().__init__(wf_spec, bpmn_id, **kwargs)
self.manual = True
| 1,068 | Python | .py | 24 | 42.333333 | 69 | 0.755769 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,073 | intermediate_event.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/mixins/events/intermediate_event.py | # Copyright (C) 2012 Matthew Hampton, 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.util.task import TaskState
from .event_types import ThrowingEvent, CatchingEvent
class SendTask(ThrowingEvent):
pass
class ReceiveTask(CatchingEvent):
pass
class IntermediateCatchEvent(CatchingEvent):
pass
class IntermediateThrowEvent(ThrowingEvent):
pass
class BoundaryEvent(CatchingEvent):
"""Task Spec for a bpmn:boundaryEvent node."""
def __init__(self, wf_spec, bpmn_id, event_definition, cancel_activity, **kwargs):
"""
Constructor.
:param cancel_activity: True if this is a Cancelling boundary event.
"""
super(BoundaryEvent, self).__init__(wf_spec, bpmn_id, event_definition, **kwargs)
self.cancel_activity = cancel_activity
def catches(self, my_task, event):
# Boundary events should only be caught while waiting
return my_task.state == TaskState.WAITING and super().catches(my_task, event)
class EventBasedGateway(CatchingEvent):
def _predict_hook(self, my_task):
my_task._sync_children(self.outputs, state=TaskState.MAYBE)
def _on_ready_hook(self, my_task):
for child in my_task.children:
if not child.internal_data.get('event_fired'):
child.cancel()
| 2,068 | Python | .py | 47 | 39.617021 | 89 | 0.739282 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,074 | start_event.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/mixins/events/start_event.py | # Copyright (C) 2012 Matthew Hampton, 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.util.task import TaskState
from .event_types import CatchingEvent
class StartEvent(CatchingEvent):
"""Task Spec for a bpmn:startEvent node with an optional event definition."""
def catch(self, my_task, event):
# We might need to revisit a start event after it completes or
# if it got cancelled so we'll still catch messages even if we're finished
if my_task.state == TaskState.COMPLETED or my_task.state == TaskState.CANCELLED:
my_task.workflow.reset_from_task_id(my_task.id)
super(StartEvent, self).catch(my_task, event)
| 1,426 | Python | .py | 28 | 47.928571 | 88 | 0.761836 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,075 | __init__.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/mixins/events/__init__.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA | 805 | Python | .py | 18 | 43.777778 | 69 | 0.786802 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,076 | event_types.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/mixins/events/event_types.py | # Copyright (C) 2012 Matthew Hampton, 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import time
from SpiffWorkflow.util.task import TaskState
from SpiffWorkflow.specs.base import TaskSpec
from SpiffWorkflow.bpmn.specs.event_definitions.simple import NoneEventDefinition
from SpiffWorkflow.bpmn.specs.event_definitions.timer import CycleTimerEventDefinition
class CatchingEvent(TaskSpec):
"""Base Task Spec for Catching Event nodes."""
def __init__(self, wf_spec, bpmn_id, event_definition, **kwargs):
"""
Constructor.
:param event_definition: the EventDefinition that we must wait for.
"""
super().__init__(wf_spec, bpmn_id, **kwargs)
self.event_definition = event_definition
def catches(self, my_task, event):
return my_task.task_spec.event_definition.catches(my_task, event)
def catch(self, my_task, event):
"""
Catch is called by the workflow when the task has matched an event
definition, at which point we can update our task's state.
"""
self.event_definition.catch(my_task, event)
my_task._set_state(TaskState.WAITING)
def _update_hook(self, my_task):
super()._update_hook(my_task)
# None events don't propogate, so as soon as we're ready, we fire our event
if isinstance(self.event_definition, NoneEventDefinition):
my_task._set_internal_data(event_fired=True)
if self.event_definition.has_fired(my_task):
return True
elif my_task.state != TaskState.WAITING:
my_task._set_state(TaskState.WAITING)
self.event_definition.update_task(my_task)
def _run_hook(self, my_task):
self.event_definition.update_task_data(my_task)
self.event_definition.reset(my_task)
return super()._run_hook(my_task)
def _predict_hook(self, my_task):
if not isinstance(self.event_definition, CycleTimerEventDefinition):
super()._predict_hook(my_task)
class ThrowingEvent(TaskSpec):
"""Base Task Spec for Throwing Event nodes."""
def __init__(self, wf_spec, bpmn_id, event_definition, **kwargs):
"""
Constructor.
:param event_definition: the EventDefinition to be thrown.
"""
super().__init__(wf_spec, bpmn_id, **kwargs)
self.event_definition = event_definition
def _run_hook(self, my_task):
super()._run_hook(my_task)
self.event_definition.throw(my_task)
return True
| 3,254 | Python | .py | 71 | 39.71831 | 86 | 0.701643 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,077 | end_event.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/mixins/events/end_event.py | # Copyright (C) 2012 Matthew Hampton, 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.util.task import TaskState
from SpiffWorkflow.bpmn.specs.event_definitions.simple import TerminateEventDefinition, CancelEventDefinition
from .event_types import ThrowingEvent
class EndEvent(ThrowingEvent):
"""
Task Spec for a bpmn:endEvent node.
From the specification of BPMN (http://www.omg.org/spec/BPMN/2.0/PDF -
document number:formal/2011-01-03): For a "terminate" End Event, the
Process is abnormally terminated - no other ongoing Process instances are
affected.
For all other End Events, the behavior associated with the Event type is
performed, e.g., the associated Message is sent for a Message End Event,
the associated signal is sent for a Signal End Event, and so on. The
Process instance is then completed, if and only if the following two
conditions hold:
* All start nodes of the Process have been visited. More precisely, all
Start Events have been triggered, and for all starting Event-Based
Gateways, one of the associated Events has been triggered.
* There is no token remaining within the Process instance.
"""
def _on_complete_hook(self, my_task):
super(EndEvent, self)._on_complete_hook(my_task)
if isinstance(self.event_definition, TerminateEventDefinition):
# We are finished. Set the workflow data and cancel all tasks
for task in my_task.workflow.get_tasks(state=TaskState.NOT_FINISHED_MASK):
task.cancel()
my_task.workflow._mark_complete(my_task)
elif isinstance(self.event_definition, CancelEventDefinition):
my_task.workflow.cancel()
| 2,485 | Python | .py | 47 | 48.212766 | 109 | 0.751029 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,078 | message.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/event_definitions/message.py | from copy import deepcopy
from SpiffWorkflow.bpmn.util import BpmnEvent, PendingBpmnEvent
from SpiffWorkflow.exceptions import WorkflowException
from .base import EventDefinition
class CorrelationProperty:
"""Rules for generating a correlation key when a message is sent or received."""
def __init__(self, name, retrieval_expression, correlation_keys):
self.name = name # This is the property name
self.retrieval_expression = retrieval_expression # This is how it's generated
self.correlation_keys = correlation_keys # These are the keys it's used by
class MessageEventDefinition(EventDefinition):
"""The default message event."""
def __init__(self, name, correlation_properties=None, **kwargs):
super().__init__(name, **kwargs)
self.correlation_properties = correlation_properties or []
def catches(self, my_task, event):
correlations = my_task.workflow.correlations
if len(self.correlation_properties) == 0 or not correlations:
# If we are not checking correlations (eg in lots of older workflows) OR this is the first message this is True
correlated = True
else:
# Otherwise we have to check to make sure any existing keys match
correlated = all([event.correlations.get(key) == correlations.get(key) for key in event.correlations ])
return self == event.event_definition and correlated
def catch(self, my_task, event=None):
self.update_internal_data(my_task, event)
super().catch(my_task, event)
def throw(self, my_task):
payload = deepcopy(my_task.data)
correlations = self.get_correlations(my_task, payload)
my_task.workflow.correlations.update(correlations)
event = BpmnEvent(self, payload=payload, correlations=correlations)
my_task.workflow.top_workflow.catch(event)
def update_internal_data(self, my_task, event):
my_task.internal_data[event.event_definition.name] = event.payload
def update_task_data(self, my_task):
# I've added this method so that different message implementations can handle
# copying their message data into the task
payload = my_task.internal_data.get(self.name)
if payload is not None:
my_task.set_data(**payload)
def get_correlations(self, my_task, payload):
return self.calculate_correlations(
my_task.workflow.script_engine,
self.correlation_properties,
payload
)
def calculate_correlations(self, script_engine, cp, ctx):
correlations = {}
for prop in cp:
value = script_engine.environment.evaluate(prop.retrieval_expression, ctx)
for key in prop.correlation_keys:
if key not in correlations:
correlations[key] = {}
try:
correlations[key][prop.name] = value
except WorkflowException:
# Just ignore missing keys. The dictionaries have to match exactly
pass
if len(prop.correlation_keys) == 0:
if self.name not in correlations:
correlations[self.name] = {}
correlations[self.name][prop.name] = value
return correlations
def details(self, my_task):
return PendingBpmnEvent(self.name, self.__class__.__name__, self.correlation_properties)
def __eq__(self, other):
return super().__eq__(other) and self.name == other.name
| 3,607 | Python | .py | 68 | 43.161765 | 123 | 0.652001 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,079 | item_aware_event.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/event_definitions/item_aware_event.py | from copy import deepcopy
from SpiffWorkflow.bpmn.util import BpmnEvent, PendingBpmnEvent
from .base import EventDefinition
class ItemAwareEventDefinition(EventDefinition):
def __init__(self, name, description=None):
super().__init__(name, description)
def catch(self, my_task, event=None):
my_task.internal_data[self.name] = event.payload
super().catch(my_task, event)
def throw(self, my_task):
payload = deepcopy(my_task.data)
event = BpmnEvent(self, payload=payload)
my_task.workflow.top_workflow.catch(event)
def update_task_data(self, my_task):
payload = my_task.internal_data.get(self.name)
if payload is not None:
my_task.set_data(**payload)
def reset(self, my_task):
my_task.internal_data.pop(self.name, None)
super().reset(my_task)
class CodeEventDefinition(ItemAwareEventDefinition):
def __init__(self, name, code=None, **kwargs):
super().__init__(name, **kwargs)
self.code = code
def throw(self, my_task):
payload = deepcopy(my_task.data)
event = BpmnEvent(self, payload=payload, target=my_task.workflow)
my_task.workflow.top_workflow.catch(event)
def details(self, my_task):
return PendingBpmnEvent(self.name, self.__class__.__name__, self.code)
def __eq__(self, other):
return super().__eq__(other) and self.code in [None, other.code]
class ErrorEventDefinition(CodeEventDefinition):
"""
Error events can occur only in subprocesses and as subprocess boundary events. They're
matched by code rather than name.
"""
pass
class EscalationEventDefinition(CodeEventDefinition):
"""
Escalation events have names, though they don't seem to be used for anything. Instead
the spec says that the escalation code should be matched.
"""
pass
class SignalEventDefinition(ItemAwareEventDefinition):
"""The SignalEventDefinition is the implementation of event definition used for Signal Events."""
def __init__(self, name, **kwargs):
super().__init__(name, **kwargs)
def __eq__(self, other):
return super().__eq__(other) and self.name == other.name
| 2,220 | Python | .py | 50 | 37.96 | 101 | 0.685116 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,080 | conditional.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/event_definitions/conditional.py | from .base import EventDefinition
class ConditionalEventDefinition(EventDefinition):
"""Conditional events can be used to trigger flows based on the state of the workflow"""
def __init__(self, expression, **kwargs):
super().__init__(**kwargs)
self.expression = expression
def has_fired(self, my_task):
my_task._set_internal_data(
has_fired=my_task.workflow.script_engine.evaluate(my_task, self.expression, external_context=my_task.workflow.data_objects)
)
return my_task._get_internal_data('has_fired', False)
| 577 | Python | .py | 11 | 45.363636 | 135 | 0.698046 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,081 | simple.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/event_definitions/simple.py | from SpiffWorkflow.bpmn.util import BpmnEvent
from .base import EventDefinition
class NoneEventDefinition(EventDefinition):
"""This class defines behavior for NoneEvents. We override throw to do nothing."""
def __init__(self, **kwargs):
super().__init__(**kwargs)
def throw(self, my_task):
"""It's a 'none' event, so nothing to throw."""
pass
def reset(self, my_task):
"""It's a 'none' event, so nothing to reset."""
pass
class CancelEventDefinition(EventDefinition):
"""Cancel events are only handled by the outerworkflow, as they can only be used inside of transaction subprocesses."""
def __init__(self, **kwargs):
super(CancelEventDefinition, self).__init__(**kwargs)
def throw(self, my_task, **kwargs):
event = BpmnEvent(self, target=my_task.workflow.parent_workflow)
my_task.workflow.top_workflow.catch(event)
class TerminateEventDefinition(EventDefinition):
"""The TerminateEventDefinition is the implementation of event definition used for Termination Events."""
def __init__(self, **kwargs):
super(TerminateEventDefinition, self).__init__(**kwargs)
def throw(self, my_task):
event = BpmnEvent(my_task.task_spec.event_definition, target=my_task.workflow)
my_task.workflow.top_workflow.catch(event)
| 1,348 | Python | .py | 26 | 45.423077 | 123 | 0.699007 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,082 | __init__.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/event_definitions/__init__.py | # Copyright (C) 2012 Matthew Hampton
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .simple import NoneEventDefinition, CancelEventDefinition, TerminateEventDefinition
from .timer import TimeDateEventDefinition, DurationTimerEventDefinition, CycleTimerEventDefinition
from .item_aware_event import SignalEventDefinition, EscalationEventDefinition, ErrorEventDefinition
from .message import MessageEventDefinition
from .multiple import MultipleEventDefinition
from .conditional import ConditionalEventDefinition | 1,242 | Python | .py | 24 | 50.75 | 100 | 0.830049 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,083 | multiple.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/event_definitions/multiple.py | from .timer import TimerEventDefinition, EventDefinition
class MultipleEventDefinition(EventDefinition):
def __init__(self, event_definitions=None, parallel=False, **kwargs):
super().__init__(**kwargs)
self.event_definitions = event_definitions or []
self.parallel = parallel
def has_fired(self, my_task):
event_definitions = list(self.event_definitions)
seen_events = my_task.internal_data.get('seen_events', [])
for event_definition in self.event_definitions:
if isinstance(event_definition, TimerEventDefinition):
child = [c for c in my_task.children if c.task_spec.event_definition == event_definition]
child[0].task_spec._update_hook(child[0])
if event_definition.has_fired(child[0]) and event_definition in event_definitions:
event_definitions.remove(event_definition)
else:
for event in seen_events:
if event_definition.catches(my_task, event) and event_definition in event_definitions:
event_definitions.remove(event_definition)
if self.parallel:
# Parallel multiple need to match all events
return len(event_definitions) == 0
else:
return len(seen_events) > 0
def catch(self, my_task, event=None):
event.event_definition.catch(my_task, event)
seen_events = my_task.internal_data.get('seen_events', []) + [event]
my_task._set_internal_data(seen_events=seen_events)
def reset(self, my_task):
my_task.internal_data.pop('seen_events', None)
super().reset(my_task)
def __eq__(self, other):
# This event can catch any of the events associated with it
for event in self.event_definitions:
if event == other:
return True
return False
def throw(self, my_task):
# Mutiple events throw all associated events when they fire
for event_definition in self.event_definitions:
event_definition.throw(my_task) | 2,105 | Python | .py | 41 | 40.487805 | 106 | 0.639105 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,084 | base.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/event_definitions/base.py | from SpiffWorkflow.bpmn.util import BpmnEvent, PendingBpmnEvent
class EventDefinition(object):
"""
This is the base class for Event Definitions. It implements the default throw/catch
behavior for events.
If internal is true, this event should be thrown to the current workflow
If external is true, this event should be thrown to the outer workflow
Default throw behavior is to send the event based on the values of the internal
and external flags.
Default catch behavior is to set the event to fired
"""
def __init__(self, name=None, description=None):
self.name = name
self.description = description
def has_fired(self, my_task):
return my_task._get_internal_data('event_fired', False)
def catches(self, my_task, event):
return self == event.event_definition
def catch(self, my_task, event=None):
my_task._set_internal_data(event_fired=True)
def throw(self, my_task):
event = BpmnEvent(self)
my_task.workflow.top_workflow.catch(event)
def update_task(self, my_task):
"""This method allows events to implement update behavior for the task"""
pass
def update_task_data(self, my_task):
"""This method allows events with payloads to merge them into the task"""
pass
def reset(self, my_task):
my_task._set_internal_data(event_fired=False)
def details(self, my_task):
return PendingBpmnEvent(self.name, self.__class__.__name__)
def __eq__(self, other):
return self.__class__ is other.__class__
| 1,590 | Python | .py | 35 | 38.714286 | 88 | 0.686325 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,085 | timer.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/specs/event_definitions/timer.py | import re
from datetime import datetime, timedelta, timezone
from calendar import monthrange
from time import timezone as tzoffset, altzone as dstoffset, struct_time, localtime
from SpiffWorkflow.util.task import TaskState
from SpiffWorkflow.bpmn.util import PendingBpmnEvent
from .base import EventDefinition
seconds_from_utc = dstoffset if struct_time(localtime()).tm_isdst else tzoffset
LOCALTZ = timezone(timedelta(seconds=-1 * seconds_from_utc))
class TimerEventDefinition(EventDefinition):
def __init__(self, name, expression, **kwargs):
"""
Constructor.
:param name: The description of the timer.
:param expression: An ISO 8601 datetime or interval expression.
"""
super().__init__(**kwargs)
self.name = name
self.expression = expression
@staticmethod
def get_datetime(expression):
dt = datetime.fromisoformat(expression)
if dt.tzinfo is None:
dt = datetime.combine(dt.date(), dt.time(), LOCALTZ)
return dt.astimezone(timezone.utc)
@staticmethod
def get_timedelta_from_start(parsed_duration, start=None):
start = start or datetime.now(timezone.utc)
years, months, days = parsed_duration.pop('years', 0), parsed_duration.pop('months', 0), parsed_duration.pop('days', 0)
months += years * 12
for idx in range(int(months)):
year = start.year + (start.month + idx - 1) // 12
month = (start.month + idx) % 12 or 12
days += monthrange(year, month)[1]
year = start.year + (start.month + months - 1) // 12
month = (start.month + months) % 12 or 12
days += (months - int(months)) * monthrange(year, month)[1]
parsed_duration['days'] = days
return timedelta(**parsed_duration)
@staticmethod
def get_timedelta_from_end(parsed_duration, end):
years, months, days = parsed_duration.pop('years', 0), parsed_duration.pop('months', 0), parsed_duration.pop('days', 0)
months += years * 12
for idx in range(1, int(months) + 1):
year = end.year - (1 + (idx - end.month) // 12)
month = (end.month - idx) % 12 or 12
days += monthrange(year, month)[1]
year = end.year - (1 + (months - end.month) // 12)
month = (end.month - months) % 12 or 12
days += (months - int(months)) * monthrange(year, month)[1]
parsed_duration['days'] = days
return timedelta(**parsed_duration)
@staticmethod
def parse_iso_duration(expression):
# Based on https://en.wikipedia.org/wiki/ISO_8601#Time_intervals
parsed, expr_t, current = {}, False, expression.lower().strip('p').replace(',', '.')
for designator in ['years', 'months', 'weeks', 'days', 't', 'hours', 'minutes', 'seconds']:
value = current.split(designator[0], 1)
if len(value) == 2:
duration, remainder = value
if duration.isdigit():
parsed[designator] = int(duration)
elif duration.replace('.', '').isdigit() and not remainder:
parsed[designator] = float(duration)
if designator in parsed or designator == 't':
current = remainder
if designator == 't':
expr_t = True
date_specs, time_specs = ['years', 'months', 'days'], ['hours', 'minutes', 'seconds']
parsed_t = len([d for d in parsed if d in time_specs]) > 0
if len(current) or parsed_t != expr_t or ('weeks' in parsed and any(v for v in parsed if v in date_specs)):
raise Exception('Invalid duration')
# The actual timedelta will have to be computed based on a start or end date, to account for
# months lengths, leap days, etc. This returns a dict of the parsed elements
return parsed
@staticmethod
def parse_iso_week(expression):
# https://en.wikipedia.org/wiki/ISO_8601#Week_dates
m = re.match(r'(\d{4})W(\d{2})(\d)(T.+)?', expression.upper().replace('-', ''))
year, month, day, ts = m.groups()
ds = datetime.fromisocalendar(int(year), int(month), int(day)).strftime('%Y-%m-%d')
return TimerEventDefinition.get_datetime(ds + (ts or ''))
@staticmethod
def parse_time_or_duration(expression):
if expression.upper().startswith('P'):
return TimerEventDefinition.parse_iso_duration(expression)
elif 'W' in expression.upper():
return TimerEventDefinition.parse_iso_week(expression)
else:
return TimerEventDefinition.get_datetime(expression)
@staticmethod
def parse_iso_recurring_interval(expression):
components = expression.upper().replace('--', '/').strip('R').split('/')
cycles = int(components[0]) if components[0] else -1
start_or_duration = TimerEventDefinition.parse_time_or_duration(components[1])
if len(components) == 3:
end_or_duration = TimerEventDefinition.parse_time_or_duration(components[2])
else:
end_or_duration = None
if isinstance(start_or_duration, datetime):
# Start time + interval duration
start = start_or_duration
duration = TimerEventDefinition.get_timedelta_from_start(end_or_duration, start_or_duration)
elif isinstance(end_or_duration, datetime):
# End time + interval duration
duration = TimerEventDefinition.get_timedelta_from_end(start_or_duration, end_or_duration)
start = end_or_duration - duration
elif end_or_duration is None:
# Just an interval duration, assume a start time of now + duration
duration = TimeDateEventDefinition.get_timedelta_from_start(start_or_duration)
start = datetime.now(timezone.utc) + duration
else:
raise Exception("Invalid recurring interval")
return cycles, start, duration
def __eq__(self, other):
return super().__eq__(other) and self.name == other.name
class TimeDateEventDefinition(TimerEventDefinition):
"""A Timer event represented by a specific date/time."""
def has_fired(self, my_task):
event_value = my_task._get_internal_data('event_value')
if event_value is None:
event_value = my_task.workflow.script_engine.evaluate(my_task, self.expression)
my_task._set_internal_data(event_value=event_value)
if TimerEventDefinition.parse_time_or_duration(event_value) < datetime.now(timezone.utc):
my_task._set_internal_data(event_fired=True)
return my_task._get_internal_data('event_fired', False)
def details(self, my_task):
return PendingBpmnEvent(self.name, self.__class__.__name__, my_task._get_internal_data('event_value'))
class DurationTimerEventDefinition(TimerEventDefinition):
"""A timer event represented by a duration"""
def has_fired(self, my_task):
event_value = my_task._get_internal_data("event_value")
if event_value is None:
expression = my_task.workflow.script_engine.evaluate(my_task, self.expression)
parsed_duration = TimerEventDefinition.parse_iso_duration(expression)
event_value = (datetime.now(timezone.utc) + TimerEventDefinition.get_timedelta_from_start(parsed_duration)).isoformat()
my_task._set_internal_data(event_value=event_value)
if TimerEventDefinition.get_datetime(event_value) < datetime.now(timezone.utc):
my_task._set_internal_data(event_fired=True)
return my_task._get_internal_data('event_fired', False)
def details(self, my_task):
return PendingBpmnEvent(self.name, self.__class__.__name__, my_task._get_internal_data('event_value'))
class CycleTimerEventDefinition(TimerEventDefinition):
def cycle_complete(self, my_task):
event_value = my_task._get_internal_data('event_value')
if event_value is None:
expression = my_task.workflow.script_engine.evaluate(my_task, self.expression)
cycles, start, duration = TimerEventDefinition.parse_iso_recurring_interval(expression)
event_value = {'cycles': cycles, 'next': start.isoformat(), 'duration': duration.total_seconds()}
# When the next timer event passes, return True to allow the parent task to generate another child
# Use event fired to indicate that this timer has completed all cycles and the task can be completed
ready = False
if event_value['cycles'] != 0:
next_event = datetime.fromisoformat(event_value['next'])
if next_event < datetime.now(timezone.utc):
event_value['next'] = (next_event + timedelta(seconds=event_value['duration'])).isoformat()
event_value['cycles'] -= 1
ready = True
else:
my_task.internal_data.pop('event_value', None)
my_task.internal_data['event_fired'] = True
my_task._set_internal_data(event_value=event_value)
return ready
def update_task(self, my_task):
if self.cycle_complete(my_task):
for output in my_task.task_spec.outputs:
child = my_task._add_child(output, TaskState.FUTURE)
child.task_spec._predict(child, mask=TaskState.NOT_FINISHED_MASK)
child.task_spec._update(child)
def details(self, my_task):
event_value = my_task._get_internal_data('event_value')
if event_value is not None and event_value['cycles'] != 0:
event_value = event_value['next']
return PendingBpmnEvent(self.name, self.__class__.__name__, event_value)
| 9,733 | Python | .py | 174 | 46.126437 | 131 | 0.642047 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,086 | workflow.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/serializer/workflow.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import json, gzip
from .migration.version_migration import MIGRATIONS
from .helpers import DefaultRegistry
from .config import DEFAULT_CONFIG
# This is the default version set on the workflow, it can be overridden in init
VERSION = "1.4"
class BpmnWorkflowSerializer:
"""This class implements a customizable BPMN Workflow serializer, based on the `DefaultRegistry`.
Workflows contain two types of objects: workflows/tasks/standard specs (objects that Spiff provides
serialization for automatically) and arbitrary data (associated with tasks and workflows). The goal
of this serializer is to provide a mechanism that allows for handling both, as well as the ability
to replace one of the default internal conversion mechanisms with your own if you've extended any of
the classes.
See `configure` for more details on customization.
Serialization occurs in two phases: the first is to convert everything in the workflow to a
dictionary containing only JSON-serializable objects and the second is dumping to JSON, which happens
only at the very end.
Attributes:
registry (`DictionaryConverter`): a registry that keeps track of all objects the serializer knows
json_encoder_cls: passed into `convert` to provides additional json encding capabilities (optional)
json_decoder_cls: passed into `restore` to provide additional json decoding capabilities (optional)
version (str): the serializer version
"""
VERSION_KEY = "serializer_version" # Why is this customizable?
@staticmethod
def configure(config=None, registry=None):
"""Can be used to create a with custom Spiff classes.
If you have replaced any of the default classes that Spiff uses with your own, Spiff will not know
how to serialize them and you'll have to provide conversion mechanisms.
The `config` is a dictionary with keys for each (Spiff) class that needs to be handled that map to a
converter for that class. There are some basic converters which provide from methods for handling
essential Spiff attributes in the `helpers` package of this module; the default converters, found in
the `defaults` package of this module extend these. The default configuration is found in `config`.
The `registry` contains optional custom data conversions and the items in `config` will be added to
it, to create one repository of information about serialization. See `DictionaryConverter` for more
information about customized data. This parameter is optional and if not provided, `DefaultRegistry`
will be used.
Objects that are unknown to the `registry` will be passed on as-is and serialization can be handled
through custom JSON encoding/decoding as an alternative.
Arguments:
spec_config (dict): a mapping of class -> objects containing `BpmnConverter`
registry (`DictionaryConverter`): with conversions for custom data (if applicable)
"""
config = config or DEFAULT_CONFIG
if registry is None:
registry = DefaultRegistry()
for target_class, converter_class in config.items():
converter_class(target_class, registry)
return registry
def __init__(self, registry=None, version=VERSION, json_encoder_cls=None, json_decoder_cls=None):
"""Intializes a Workflow Serializer.
Arguments:
registry (`DictionaryConverter`): a registry that keeps track of all objects the serializer knows
version (str): the serializer version
json_encoder_cls: passed into `convert` to provides additional json encding capabilities (optional)
json_decoder_cls: passed into `restore` to provide additional json decoding capabilities (optional)
"""
super().__init__()
self.registry = registry or self.configure()
self.json_encoder_cls = json_encoder_cls
self.json_decoder_cls = json_decoder_cls
self.VERSION = version
def serialize_json(self, workflow, use_gzip=False):
"""Serialize the dictionary representation of the workflow to JSON.
Arguments:
workflow: the workflow to serialize
use_gzip (bool): optionally gzip the resulting string
Returns:
a JSON dump of the dictionary representation or a gzipped version of it
"""
dct = self.to_dict(workflow)
dct[self.VERSION_KEY] = self.VERSION
json_str = json.dumps(dct, cls=self.json_encoder_cls)
return gzip.compress(json_str.encode('utf-8')) if use_gzip else json_str
def deserialize_json(self, serialization, use_gzip=False):
"""Deserialize a workflow from an optionally zipped JSON-dumped workflow.
Arguments:
serialization: the serialization to restore
use_gzip (bool): optionally gunzip the input
Returns:
the restored workflow
"""
json_str = gzip.decompress(serialization) if use_gzip else serialization
dct = json.loads(json_str, cls=self.json_decoder_cls)
self.migrate(dct)
return self.from_dict(dct)
def get_version(self, serialization):
"""Get the version specified in the serialization
Arguments:
serialization: a string or dictionary representation of a workflow
Returns:
the version of the serializer the serilization we done with, if present
"""
if isinstance(serialization, dict):
return serialization.get(self.VERsiON_KEY)
elif isinstance(serialization, str):
dct = json.loads(serialization, cls=self.json_decoder_cls)
return dct.get(self.VERSION_KEY)
def migrate(self, dct):
"""Update the serialization format, if necessaary."""
version = dct.pop(self.VERSION_KEY)
if version in MIGRATIONS:
MIGRATIONS[version](dct)
def to_dict(self, obj, **kwargs):
"""Apply any know conversions to an object.
Arguments:
obj: the object
Keyword arguments:
optional keyword args that will be passed to `self.registry.convert`
Returns:
a dictionary representation of the object
"""
return self.registry.convert(obj, **kwargs)
def from_dict(self, dct, **kwargs):
"""Restore an known object from a dict.
Arguments:
dct: the dictionary representation of the object
Keyword arguments:
optional keyword args that will be passed to `self.registry.restore`
Returns:
a restored object
"""
return self.registry.restore(dct, **kwargs) | 7,603 | Python | .py | 141 | 45.836879 | 111 | 0.704098 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,087 | config.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/serializer/config.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.task import Task
from SpiffWorkflow.bpmn import BpmnWorkflow, BpmnEvent
from SpiffWorkflow.bpmn.util.subworkflow import BpmnSubWorkflow
from SpiffWorkflow.bpmn.specs import BpmnProcessSpec
from SpiffWorkflow.bpmn.specs.defaults import (
ManualTask,
NoneTask,
UserTask,
ExclusiveGateway,
InclusiveGateway,
ParallelGateway,
EventBasedGateway,
ScriptTask,
ServiceTask,
StandardLoopTask,
ParallelMultiInstanceTask,
SequentialMultiInstanceTask,
SubWorkflowTask,
CallActivity,
TransactionSubprocess,
StartEvent,
EndEvent,
IntermediateCatchEvent,
IntermediateThrowEvent,
BoundaryEvent,
SendTask,
ReceiveTask,
)
from SpiffWorkflow.bpmn.specs.event_definitions import (
NoneEventDefinition,
CancelEventDefinition,
TerminateEventDefinition,
SignalEventDefinition,
ErrorEventDefinition,
EscalationEventDefinition,
TimeDateEventDefinition,
DurationTimerEventDefinition,
CycleTimerEventDefinition,
MessageEventDefinition,
MultipleEventDefinition,
ConditionalEventDefinition,
)
from SpiffWorkflow.bpmn.specs.control import (
BpmnStartTask,
SimpleBpmnTask,
BoundaryEventSplit,
BoundaryEventJoin,
StartEventSplit,
StartEventJoin,
_EndJoin,
)
from SpiffWorkflow.bpmn.specs.data_spec import (
DataObject,
TaskDataReference,
)
from SpiffWorkflow.bpmn.specs.bpmn_task_spec import BpmnIoSpecification
from .default.workflow import (
BpmnWorkflowConverter,
BpmnSubWorkflowConverter,
TaskConverter,
BpmnEventConverter,
)
from .helpers import BpmnDataSpecificationConverter, EventDefinitionConverter
from .default import BpmnProcessSpecConverter
from .default.task_spec import (
BpmnTaskSpecConverter,
ScriptTaskConverter,
StandardLoopTaskConverter,
MultiInstanceTaskConverter,
SubWorkflowConverter,
EventJoinConverter,
ConditionalGatewayConverter,
ExclusiveGatewayConverter,
ParallelGatewayConverter,
EventConverter,
BoundaryEventConverter,
IOSpecificationConverter,
)
from .default.event_definition import (
TimerConditionalEventDefinitionConverter,
ErrorEscalationEventDefinitionConverter,
MessageEventDefinitionConverter,
MultipleEventDefinitionConverter,
)
DEFAULT_CONFIG = {
BpmnWorkflow: BpmnWorkflowConverter,
BpmnSubWorkflow: BpmnSubWorkflowConverter,
Task: TaskConverter,
BpmnEvent: BpmnEventConverter,
DataObject: BpmnDataSpecificationConverter,
TaskDataReference: BpmnDataSpecificationConverter,
BpmnIoSpecification: IOSpecificationConverter,
BpmnProcessSpec: BpmnProcessSpecConverter,
SimpleBpmnTask: BpmnTaskSpecConverter,
BpmnStartTask: BpmnTaskSpecConverter,
_EndJoin: BpmnTaskSpecConverter,
NoneTask: BpmnTaskSpecConverter,
ManualTask: BpmnTaskSpecConverter,
UserTask: BpmnTaskSpecConverter,
ScriptTask: ScriptTaskConverter,
StandardLoopTask: StandardLoopTaskConverter,
ParallelMultiInstanceTask: MultiInstanceTaskConverter,
SequentialMultiInstanceTask: MultiInstanceTaskConverter,
SubWorkflowTask: SubWorkflowConverter,
CallActivity: SubWorkflowConverter,
TransactionSubprocess: SubWorkflowConverter,
BoundaryEventSplit: BpmnTaskSpecConverter,
BoundaryEventJoin: EventJoinConverter,
ExclusiveGateway: ExclusiveGatewayConverter,
InclusiveGateway: ConditionalGatewayConverter,
ParallelGateway: ParallelGatewayConverter,
StartEvent: EventConverter,
EndEvent: EventConverter,
IntermediateCatchEvent: EventConverter,
IntermediateThrowEvent: EventConverter,
BoundaryEvent: BoundaryEventConverter,
SendTask: EventConverter,
ReceiveTask: EventConverter,
EventBasedGateway: EventConverter,
CancelEventDefinition: EventDefinitionConverter,
ErrorEventDefinition: ErrorEscalationEventDefinitionConverter,
EscalationEventDefinition: ErrorEscalationEventDefinitionConverter,
MessageEventDefinition: MessageEventDefinitionConverter,
NoneEventDefinition: EventDefinitionConverter,
SignalEventDefinition: EventDefinitionConverter,
TerminateEventDefinition: EventDefinitionConverter,
TimeDateEventDefinition: TimerConditionalEventDefinitionConverter,
DurationTimerEventDefinition: TimerConditionalEventDefinitionConverter,
CycleTimerEventDefinition: TimerConditionalEventDefinitionConverter,
ConditionalEventDefinition: TimerConditionalEventDefinitionConverter,
MultipleEventDefinition: MultipleEventDefinitionConverter,
StartEventSplit: BpmnTaskSpecConverter,
StartEventJoin: EventJoinConverter,
}
| 5,450 | Python | .py | 152 | 31.894737 | 77 | 0.825586 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,088 | __init__.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/serializer/__init__.py | # Copyright (C) 2012 Matthew Hampton, 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .workflow import BpmnWorkflowSerializer
from .config import DEFAULT_CONFIG
from .helpers import DefaultRegistry | 945 | Python | .py | 21 | 44 | 69 | 0.799784 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,089 | exceptions.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/serializer/exceptions.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.exceptions import WorkflowException
class VersionMigrationError(WorkflowException):
pass | 919 | Python | .py | 21 | 42.52381 | 69 | 0.799331 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,090 | event_definition.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/serializer/default/event_definition.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from ..helpers.spec import EventDefinitionConverter
class TimerConditionalEventDefinitionConverter(EventDefinitionConverter):
def to_dict(self, event_definition):
dct = super().to_dict(event_definition)
dct['expression'] = event_definition.expression
return dct
class ErrorEscalationEventDefinitionConverter(EventDefinitionConverter):
def to_dict(self, event_definition):
dct = super().to_dict(event_definition)
dct['code'] = event_definition.code
return dct
class MessageEventDefinitionConverter(EventDefinitionConverter):
def to_dict(self, event_definition):
dct = super().to_dict(event_definition)
dct['correlation_properties'] = self.correlation_properties_to_dict(event_definition.correlation_properties)
return dct
def from_dict(self, dct):
dct['correlation_properties'] = self.correlation_properties_from_dict(dct['correlation_properties'])
event_definition = super().from_dict(dct)
return event_definition
class MultipleEventDefinitionConverter(EventDefinitionConverter):
def to_dict(self, event_definition):
dct = super().to_dict(event_definition)
dct['parallel'] = event_definition.parallel
dct['event_definitions'] = [self.registry.convert(e) for e in event_definition.event_definitions]
return dct
def from_dict(self, dct):
events = dct.pop('event_definitions')
event_definition = super().from_dict(dct)
event_definition.event_definitions = [self.registry.restore(d) for d in events]
return event_definition
| 2,424 | Python | .py | 49 | 44.428571 | 116 | 0.747141 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,091 | workflow.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/serializer/default/workflow.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from uuid import UUID
from SpiffWorkflow.bpmn.specs.mixins.subworkflow_task import SubWorkflowTask
from ..helpers.bpmn_converter import BpmnConverter
class TaskConverter(BpmnConverter):
def to_dict(self, task):
return {
'id': str(task.id),
'parent': str(task._parent) if task.parent is not None else None,
'children': [ str(child) for child in task._children ],
'last_state_change': task.last_state_change,
'state': task.state,
'task_spec': task.task_spec.name,
'triggered': task.triggered,
'internal_data': self.registry.convert(task.internal_data),
'data': self.registry.convert(self.registry.clean(task.data)),
}
def from_dict(self, dct, workflow):
task_spec = workflow.spec.task_specs.get(dct['task_spec'])
task = self.target_class(workflow, task_spec, state=dct['state'], id=UUID(dct['id']))
task._parent = UUID(dct['parent']) if dct['parent'] is not None else None
task._children = [UUID(child) for child in dct['children']]
task.last_state_change = dct['last_state_change']
task.triggered = dct['triggered']
task.internal_data = self.registry.restore(dct['internal_data'])
task.data = self.registry.restore(dct['data'])
return task
class BpmnEventConverter(BpmnConverter):
def to_dict(self, event):
return {
'event_definition': self.registry.convert(event.event_definition),
'payload': self.registry.convert(event.payload),
'correlations': self.mapping_to_dict(event.correlations),
}
def from_dict(self, dct):
return self.target_class(
self.registry.restore(dct['event_definition']),
self.registry.restore(dct['payload']),
self.mapping_from_dict(dct['correlations'])
)
class WorkflowConverter(BpmnConverter):
def to_dict(self, workflow):
"""Get a dictionary of attributes associated with both top level and subprocesses"""
return {
'data': self.registry.convert(self.registry.clean(workflow.data)),
'correlations': workflow.correlations,
'last_task': str(workflow.last_task.id) if workflow.last_task is not None else None,
'success': workflow.success,
'completed': workflow.completed,
'tasks': self.mapping_to_dict(workflow.tasks),
'root': str(workflow.task_tree.id),
}
def set_default_attributes(self, workflow, dct):
workflow.success = dct['success']
workflow.completed = dct.get('completed', False)
workflow.correlations = dct.pop('correlations', {})
if isinstance(dct['last_task'], str):
workflow.last_task = workflow.tasks.get(UUID(dct['last_task']))
workflow.data = self.registry.restore(dct.pop('data', {}))
class BpmnSubWorkflowConverter(WorkflowConverter):
def to_dict(self, workflow):
dct = super().to_dict(workflow)
dct['parent_task_id'] = str(workflow.parent_task_id)
dct['spec'] = workflow.spec.name
return dct
def from_dict(self, dct, task, top_workflow):
spec = top_workflow.subprocess_specs.get(task.task_spec.spec)
subprocess = self.target_class(spec, task.id, top_workflow, deserializing=True)
subprocess.tasks = self.mapping_from_dict(dct['tasks'], UUID, workflow=subprocess)
subprocess.task_tree = subprocess.tasks.get(UUID(dct['root']))
self.set_default_attributes(subprocess, dct)
return subprocess
class BpmnWorkflowConverter(WorkflowConverter):
def to_dict(self, workflow):
"""Return a JSON-serializable dictionary representation of the workflow.
:param workflow: the workflow
Returns:
a dictionary representation of the workflow
"""
dct = super().to_dict(workflow)
dct['spec'] = self.registry.convert(workflow.spec)
dct['subprocess_specs'] = self.mapping_to_dict(workflow.subprocess_specs)
dct['subprocesses'] = self.mapping_to_dict(workflow.subprocesses)
dct['bpmn_events'] = self.registry.convert(workflow.bpmn_events)
return dct
def from_dict(self, dct):
"""Create a workflow based on a dictionary representation.
:param dct: the dictionary representation
Returns:
a BPMN Workflow object
"""
# Restore the specs
spec = self.registry.restore(dct.pop('spec'))
subprocess_specs = self.mapping_from_dict(dct.pop('subprocess_specs', {}))
# Create the top-level workflow
workflow = self.target_class(spec, subprocess_specs, deserializing=True)
# Restore the task tree
workflow.tasks = self.mapping_from_dict(dct['tasks'], UUID, workflow=workflow)
workflow.task_tree = workflow.tasks.get(UUID(dct['root']))
# Restore other default attributes
self.set_default_attributes(workflow, dct)
# Handle the remaining top workflow attributes
self.subprocesses_from_dict(dct['subprocesses'], workflow)
workflow.bpmn_events = self.registry.restore(dct.pop('bpmn_events', []))
return workflow
def subprocesses_from_dict(self, dct, workflow, top_workflow=None):
# This ensures we create parent workflows before their children; we need the tasks they're associated with
top_workflow = top_workflow or workflow
for task in workflow.tasks.values():
if isinstance(task.task_spec, SubWorkflowTask) and str(task.id) in dct:
sp = self.registry.restore(dct.pop(str(task.id)), task=task, top_workflow=top_workflow)
top_workflow.subprocesses[task.id] = sp
sp.completed_event.connect(task.task_spec._on_subworkflow_completed, task)
self.subprocesses_from_dict(dct, sp, top_workflow)
| 6,755 | Python | .py | 131 | 43.21374 | 114 | 0.672683 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,092 | __init__.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/serializer/default/__init__.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .workflow import BpmnWorkflowConverter, BpmnSubWorkflowConverter, BpmnEventConverter
from .process_spec import BpmnProcessSpecConverter
from .task_spec import (
BpmnTaskSpecConverter,
ScriptTaskConverter,
StandardLoopTaskConverter,
MultiInstanceTaskConverter,
SubWorkflowConverter,
ConditionalGatewayConverter,
ExclusiveGatewayConverter,
ParallelGatewayConverter,
EventConverter,
BoundaryEventConverter,
)
from .event_definition import (
TimerConditionalEventDefinitionConverter,
ErrorEscalationEventDefinitionConverter,
MessageEventDefinitionConverter,
MultipleEventDefinitionConverter,
) | 1,457 | Python | .py | 38 | 35.868421 | 89 | 0.813249 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,093 | task_spec.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/serializer/default/task_spec.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from SpiffWorkflow.bpmn.specs.bpmn_task_spec import _BpmnCondition
from ..helpers.bpmn_converter import BpmnConverter
from ..helpers.spec import TaskSpecConverter
class IOSpecificationConverter(BpmnConverter):
"""The converter for an IOSpecification"""
def to_dict(self, spec):
"""Converts an IO spec to a dictionary representation
Arguments:
spec (BpmnIOSpecification): the `BpmnIOSpecification` of a `BpmnTaskSpec`
Returns:
dict: a dictionary representation of the IO spec
"""
return {
'data_inputs': [self.registry.convert(item) for item in spec.data_inputs],
'data_outputs': [self.registry.convert(item) for item in spec.data_outputs],
}
def from_dict(self, dct):
"""Restore a `BpmnIOSpecification` from a dictionary representation
Arguments:
dct (dict): the dictionary representation
Returns:
`BpmnIOSpecification`: a `BpmnTaskSpec` IO spec
"""
return self.target_class(
data_inputs=[self.registry.restore(item) for item in dct['data_inputs']],
data_outputs=[self.registry.restore(item) for item in dct['data_outputs']],
)
class BpmnTaskSpecConverter(TaskSpecConverter):
"""The base converter for a `BpmnTaskSpec`
This converter can be extended for customized task specs with additional attributes (e.g. the
ones defined in this module, which can serve as examples for anyone who has created a custom
BPMN task spec.
"""
def to_dict(self, spec):
"""Create a dictionary representation of the shared `BpmnTaskSpec` attributes
Arguments:
spec: the spec to be converter to a dictionary
Returns:
dict: a dictionary representation of shared attributes
"""
return self.get_default_attributes(spec)
def from_dict(self, dct):
"""Restore a `BpmnTaskSpec` from a dictionary of attributes
If you have added only custom attributes that can be passed to `__init__`, you won't need
to extend this.
Arguments:
dct (dict): the task spec's dictionary representation
Returns:
an instance of the target class
"""
return self.task_spec_from_dict(dct)
class ScriptTaskConverter(BpmnTaskSpecConverter):
"""The default converter for `ScriptTask`"""
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct['script'] = spec.script
return dct
class StandardLoopTaskConverter(BpmnTaskSpecConverter):
"""The default converter for `StandardLoopTask`"""
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct.update(self.get_standard_loop_attributes(spec))
return dct
class MultiInstanceTaskConverter(BpmnTaskSpecConverter):
"""The default converter for Parallel and Sequential MultiInstance Tasks"""
def to_dict(self, spec):
dct = self.get_default_attributes(spec)
dct['task_spec'] = spec.task_spec
dct['cardinality'] = spec.cardinality
dct['data_input'] = self.registry.convert(spec.data_input)
dct['data_output'] = self.registry.convert(spec.data_output)
dct['input_item'] = self.registry.convert(spec.input_item)
dct['output_item'] = self.registry.convert(spec.output_item)
dct['condition'] = spec.condition
return dct
def from_dict(self, dct):
dct['data_input'] = self.registry.restore(dct['data_input'])
dct['data_output'] = self.registry.restore(dct['data_output'])
dct['input_item'] = self.registry.restore(dct['input_item'])
dct['output_item'] = self.registry.restore(dct['output_item'])
return self.task_spec_from_dict(dct)
class EventJoinConverter(BpmnTaskSpecConverter):
"""The default converter for event join gateways"""
def to_dict(self, spec):
dct = super().to_dict(spec)
dct.update(self.get_join_attributes(spec))
return dct
class SubWorkflowConverter(BpmnTaskSpecConverter):
"""The default converter for subworkflows (`SubWOrkflowTask`, `CallActivity`, `TransactionSubprocess`)"""
def to_dict(self, spec):
dct = super().to_dict(spec)
dct.update(self.get_subworkflow_attributes(spec))
return dct
def from_dict(self, dct):
dct['subworkflow_spec'] = dct.pop('spec')
return self.task_spec_from_dict(dct)
class ConditionalGatewayConverter(BpmnTaskSpecConverter):
"""A converter class that adds attributes for a `TaskSpec` with conditional outputs"""
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['cond_task_specs'] = [ self.bpmn_condition_to_dict(cond) for cond in spec.cond_task_specs ]
dct['choice'] = spec.choice
return dct
def from_dict(self, dct):
conditions = dct.pop('cond_task_specs')
spec = self.task_spec_from_dict(dct)
spec.cond_task_specs = [ self.bpmn_condition_from_dict(cond) for cond in conditions ]
return spec
def bpmn_condition_from_dict(self, dct):
return (_BpmnCondition(dct['condition']) if dct['condition'] is not None else None, dct['task_spec'])
def bpmn_condition_to_dict(self, condition):
expr, task_spec = condition
return {
'condition': expr.args[0] if expr is not None else None,
'task_spec': task_spec
}
class ExclusiveGatewayConverter(ConditionalGatewayConverter):
"""THe default converterfor `ExclusiveGateway`task specs"""
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['default_task_spec'] = spec.default_task_spec
return dct
def from_dict(self, dct):
default_task_spec = dct.pop('default_task_spec')
spec = super().from_dict(dct)
spec.default_task_spec = default_task_spec
return spec
class ParallelGatewayConverter(BpmnTaskSpecConverter):
"""The default converter for `ParallelGateway` task specs """
def to_dict(self, spec):
dct = super().to_dict(spec)
dct.update(self.get_join_attributes(spec))
return dct
def from_dict(self, dct):
return self.task_spec_from_dict(dct)
class EventConverter(BpmnTaskSpecConverter):
"""The default converter for BPMN events"""
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['event_definition'] = self.registry.convert(spec.event_definition)
return dct
def from_dict(self, dct):
dct['event_definition'] = self.registry.restore(dct['event_definition'])
return self.task_spec_from_dict(dct)
class BoundaryEventConverter(EventConverter):
"""The default converter for `BoundaryEvent` task specs"""
def to_dict(self, spec):
dct = super().to_dict(spec)
dct['cancel_activity'] = spec.cancel_activity
return dct
| 7,765 | Python | .py | 168 | 39.02381 | 109 | 0.68435 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,094 | process_spec.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/serializer/default/process_spec.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from ..helpers.bpmn_converter import BpmnConverter
from SpiffWorkflow.bpmn.specs.mixins.multiinstance_task import LoopTask
class BpmnProcessSpecConverter(BpmnConverter):
def restore_task_spec_extensions(self, dct, task_spec):
if 'extensions' in dct:
task_spec.extensions = dct.pop('extensions')
def to_dict(self, spec):
dct = {
'name': spec.name,
'description': spec.description,
'file': spec.file,
'task_specs': {},
'io_specification': self.registry.convert(spec.io_specification),
'data_objects': dict([ (name, self.registry.convert(obj)) for name, obj in spec.data_objects.items() ]),
'correlation_keys': spec.correlation_keys,
}
for name, task_spec in spec.task_specs.items():
task_dict = self.registry.convert(task_spec)
dct['task_specs'][name] = task_dict
return dct
def from_dict(self, dct):
spec = self.target_class(name=dct['name'], description=dct['description'], filename=dct['file'])
# These are automatically created with a workflow and should be replaced
del spec.task_specs['Start']
spec.start = None
del spec.task_specs['End']
del spec.task_specs[f'{spec.name}.EndJoin']
# Add the data specs
spec.io_specification = self.registry.restore(dct.pop('io_specification', None))
# fixme: This conditional can be removed in the next release, just avoiding invalid a potential
# serialization issue for some users caught between official releases.
if isinstance(dct.get('data_objects', {}), dict):
spec.data_objects = dict([ (name, self.registry.restore(obj_dct)) for name, obj_dct in dct.pop('data_objects', {}).items() ])
else:
spec.data_objects = {}
# Add messaging related stuff
spec.correlation_keys = dct.pop('correlation_keys', {})
loop_tasks = []
dct['task_specs'].pop('Root', None)
for name, task_dict in dct['task_specs'].items():
# I hate this, but I need to pass in the workflow spec when I create the task.
# IMO storing the workflow spec on the task spec is a TERRIBLE idea, but that's
# how this thing works.
task_dict['wf_spec'] = spec
task_spec = self.registry.restore(task_dict)
if name == 'Start':
spec.start = task_spec
if isinstance(task_spec, LoopTask):
loop_tasks.append(task_spec)
self.restore_task_spec_extensions(task_dict, task_spec)
for task_spec in loop_tasks:
child_spec = spec.task_specs.get(task_spec.task_spec)
child_spec.completed_event.connect(task_spec.merge_child)
return spec
| 3,643 | Python | .py | 72 | 42.472222 | 137 | 0.661046 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,095 | version_1_1.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/serializer/migration/version_1_1.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
def move_subprocesses_to_top(dct):
subprocesses = dict((sp, { 'tasks': {}, 'root': None, 'data': {}, 'success': True }) for sp in dct['subprocesses'])
# Move the tasks out of the top-level
for sp, task_ids in dct['subprocesses'].items():
for task_id in task_ids:
if task_id in dct['tasks']:
subprocesses[sp]['tasks'][task_id] = dct['tasks'].pop(task_id)
if subprocesses[sp]['root'] is None:
subprocesses[sp]['root'] = task_id
subprocesses[sp]['tasks'][task_id]['parent'] = None
# Fix up th task and workflow states
waiting = []
for sp in subprocesses:
completed = sorted(
[t for t in subprocesses[sp]['tasks'].values() if t['state'] in [32, 64] ],
key=lambda t: t['last_state_change']
)
if len(completed) > 0:
subprocesses[sp]['last_task'] = completed[-1]
# If there are uncompleted tasks, set the subworkflow task state to waiting
if len(completed) < len(subprocesses[sp]['tasks']):
waiting.append(sp)
# Check the top level and all subprocesses for waiting tasks
# Also remove any children that are no longer in the tree
for sp in [dct] + list(subprocesses.values()):
for task_id, task in sp['tasks'].items():
if task_id in waiting:
task['state'] = 8
task['children'] = [ c for c in task['children'] if c in sp['tasks'] ]
dct['subprocesses'] = subprocesses
| 2,325 | Python | .py | 48 | 42 | 119 | 0.65581 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,096 | version_1_4.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/serializer/migration/version_1_4.py |
def update_mi_states(dct):
typenames = ['StandardLoopTask', 'SequentialMultiInstanceTask', 'ParallelMultiInstanceTask']
def update(tasks, task_specs):
for task in tasks:
task_spec = task_specs.get(task['task_spec'], {})
if task['state'] == 8 and task_spec['typename'] in typenames:
task['state'] = 32
for up in dct['subprocesses'].values():
update(sp['tasks'].values(), sp['spec']['task_specs'])
update(dct['tasks'].values(), dct['spec']['task_specs'])
| 528 | Python | .py | 10 | 44.3 | 96 | 0.615534 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,097 | version_migration.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/serializer/migration/version_migration.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from .version_1_1 import move_subprocesses_to_top
from .version_1_2 import (
convert_timer_expressions,
add_default_condition_to_cond_task_specs,
create_data_objects_and_io_specs,
check_multiinstance,
remove_loop_reset,
update_task_states,
convert_simple_tasks,
update_bpmn_attributes,
)
from .version_1_3 import (
update_event_definition_attributes,
remove_boundary_event_parent,
remove_root_task,
add_new_typenames,
update_data_objects,
)
from .version_1_4 import update_mi_states
def from_version_1_3(dct):
"""Upgrade serialization from v1.3 to v1.4
Multiinstance tasks now rely on events rather than polling to merge children, so once
they are reached, they should be STARTED rather than WAITING.
"""
dct['VERSION'] = "1.3"
update_mi_states(dct)
def from_version_1_2(dct):
"""Upgrade serialization from v.1.2 to v.1.3
The internal/external distinction on event definitions was replaced with the ability to
target a specific workflow.
Boundary event parent gateway tasks ave been replaced with a gateway structure.
The creation of an unnecessary root task was removed; the workflow spec's start task is
used as the root instead.
BpmnWorkflows and BpmnSubworkflows were split into to classes.
Data objects are now stored on the topmost workflow where they are defined.
"""
dct['VERSION'] = "1.3"
update_event_definition_attributes(dct)
remove_boundary_event_parent(dct)
remove_root_task(dct)
add_new_typenames(dct)
update_data_objects(dct)
def from_version_1_1(dct):
"""
Upgrade v1.1 serialization to v1.2.
Expressions in timer event definitions have been converted from python expressions to
ISO 8601 expressions.
Cycle timers no longer connect back to themselves. New children are created from a single
tasks rather than reusing previously executed tasks.
All conditions (including the default) are included in the conditions for gateways.
Data inputs and outputs on process specs were moved inside a BPMNIOSpecification, and
are now TaskDataReferences; BpmnDataSpecifications that referred to Data Objects are
now DataObjects.
Multiinstance tasks were completely refactored, in a way that is simply too difficult to
migrate.
Loop reset tasks were removed.
"""
dct['VERSION'] = "1.2"
convert_timer_expressions(dct)
add_default_condition_to_cond_task_specs(dct)
create_data_objects_and_io_specs(dct)
check_multiinstance(dct)
remove_loop_reset(dct)
update_task_states(dct)
convert_simple_tasks(dct)
update_bpmn_attributes(dct)
from_version_1_2(dct)
def from_version_1_0(dct):
"""
Upgrade v1.0 serializations to v1.1.
Starting with Spiff 1.1.8, subworkflows are no longer integrated in main task tree. When
a subworkflow (a subprocess, transaction, or call activity) is reached, a subprocss is
added to the top level workflow and the task enters a waiting state until the workflow
completes.
To make the serialization backwards compatible, we delete the tasks from the main workflow
task list and add them to the appropriate subprocess and recreate the remaining subprocess
attributes based on the task states.
"""
dct['VERSION'] = "1.1"
move_subprocesses_to_top(dct)
from_version_1_1(dct)
MIGRATIONS = {
'1.0': from_version_1_0,
'1.1': from_version_1_1,
'1.2': from_version_1_2,
'1.3': from_version_1_3,
}
| 4,337 | Python | .py | 105 | 37.247619 | 94 | 0.746914 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,098 | version_1_3.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/serializer/migration/version_1_3.py | from uuid import uuid4
def update_event_definition_attributes(dct):
def update_specs(wf_spec):
for spec in wf_spec['task_specs'].values():
if 'event_definition' in spec:
spec['event_definition'].pop('internal', None)
spec['event_definition'].pop('external', None)
if 'escalation_code' in spec['event_definition']:
spec['event_definition']['code'] = spec['event_definition'].pop('escalation_code')
if 'error_code' in spec['event_definition']:
spec['event_definition']['code'] = spec['event_definition'].pop('error_code')
update_specs(dct['spec'])
for sp_spec in dct['subprocess_specs'].values():
update_specs(sp_spec)
def remove_boundary_event_parent(dct):
def update_specs(wf_spec):
new_specs, delete_specs = {}, []
for spec in wf_spec['task_specs'].values():
if spec['typename'] == '_BoundaryEventParent':
delete_specs.append(spec['name'])
spec.pop('main_child_task_spec')
spec['typename'] = 'BoundaryEventSplit'
spec['name'] = spec['name'].replace('BoundaryEventParent', 'BoundaryEventSplit')
new_specs[spec['name']] = spec
join = {
"name": spec['name'].replace('BoundaryEventSplit', 'BoundaryEventJoin'),
"manual": False,
"bpmn_id": None,
"lookahead": 2,
"inputs": spec['outputs'],
"outputs": [],
"split_task": spec['name'],
"threshold": None,
"cancel": True,
"typename": "BoundaryEventJoin"
}
new_specs[join['name']] = join
for parent in spec['inputs']:
parent_spec = wf_spec['task_specs'][parent]
parent_spec['outputs'] = [name.replace('BoundaryEventParent', 'BoundaryEventSplit') for name in parent_spec['outputs']]
if "default_task_spec" in parent_spec:
parent_spec["default_task_spec"] = parent_spec["default_task_spec"].replace('BoundaryEventParent', 'BoundaryEventSplit')
for condition in parent_spec.get("cond_task_specs", []):
condition["task_spec"] = condition["task_spec"].replace('BoundaryEventParent', 'BoundaryEventSplit')
for child in spec['outputs']:
child_spec = wf_spec['task_specs'][child]
child_spec['outputs'].append(join['name'])
child_spec['inputs'] = [name.replace('BoundaryEventParent', 'BoundaryEventSplit') for name in child_spec['inputs']]
wf_spec['task_specs'].update(new_specs)
for name in delete_specs:
del wf_spec['task_specs'][name]
def update_tasks(wf):
new_tasks = {}
for task in wf['tasks'].values():
if task['task_spec'].endswith('BoundaryEventParent'):
task['task_spec'] = task['task_spec'].replace('BoundaryEventParent', 'BoundaryEventSplit')
completed = all([ wf['tasks'][child]['state'] in [64, 256] for child in task['children'] ])
for child in task['children']:
child_task = wf['tasks'][child]
if child_task['state'] < 8:
# MAYBE, LIKELY, FUTURE: use parent state
state = child_task['state']
elif child_task['state'] < 64:
# WAITING, READY, STARTED (definite): join is FUTURE
state = 4
elif child_task['state'] == 64:
# COMPLETED: if the join is not finished, WAITING, otherwise COMPLETED
state = 64 if completed else 8
elif child_task['state'] == 128:
# ERROR: we don't know what the original state was, but we can't proceed through the gateway
state = 8
else:
# Cancelled tasks don't have children
continue
new_task = {
'id': str(uuid4()),
'parent': child_task['id'],
'children': [],
'state': state,
'task_spec': task['task_spec'].replace('BoundaryEventSplit', 'BoundaryEventJoin'),
'last_state_change': child_task['last_state_change'],
'triggered': False,
'internal_data': {},
'data': {},
}
child_task['children'].append(new_task['id'])
new_tasks[new_task['id']] = new_task
wf['tasks'].update(new_tasks)
update_specs(dct['spec'])
for sp_spec in dct['subprocess_specs'].values():
update_specs(sp_spec)
update_tasks(dct)
for sp in dct['subprocesses'].values():
update_tasks(sp)
def remove_root_task(dct):
def update(wf):
root = wf['tasks'].get(wf['root'])
if root['task_spec'] == 'Root':
wf['tasks'].pop(root['id'])
start = wf['tasks'].get(root['children'][0])
start['parent'] = None
wf['root'] = start['id']
update(dct)
for sp in dct['subprocesses'].values():
update(sp)
dct['spec']['task_specs'].pop('Root', None)
for spec in dct['subprocess_specs'].values():
spec['task_specs'].pop('Root', None)
def add_new_typenames(dct):
dct['typename'] = 'BpmnWorkflow'
for task in dct['tasks'].values():
task['typename'] = 'Task'
for sp in dct['subprocesses'].values():
sp['typename'] = 'BpmnSubWorkflow'
for task in sp['tasks'].values():
task['typename'] = 'Task'
def update_data_objects(dct):
def update_spec(parent):
children = []
for ts in [ts for ts in parent['task_specs'].values() if 'spec' in ts]:
child = dct['subprocess_specs'].get(ts['spec'])
children.append((child, ts['typename']))
update_spec(child)
for child in [c for c, spec_type in children if spec_type != 'CallActivity']:
for name in parent['data_objects']:
child['data_objects'].pop(name, None)
data_objects = []
def update_wf(wf, spec):
data_objects.extend([v for v in spec.get('data_objects', {}) if v not in data_objects])
for task in [t for t in wf['tasks'].values() if t['id'] in dct['subprocesses']]:
ts = spec['task_specs'][task['task_spec']]
sp_spec = dct['subprocess_specs'].get(ts['spec'])
sp = dct['subprocesses'].get(task['id'])
update_wf(sp, sp_spec)
if len(spec.get('data_objects', {})) > 0:
wf['data']['data_objects'] = {}
for key in list(wf['data']):
if key in spec.get('data_objects', {}):
wf['data']['data_objects'][key] = wf['data'].pop(key)
elif key in data_objects:
del wf['data'][key]
update_spec(dct['spec'])
update_wf(dct, dct['spec'])
| 7,377 | Python | .py | 143 | 36.545455 | 144 | 0.518379 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
1,099 | version_1_2.py | sartography_SpiffWorkflow/SpiffWorkflow/bpmn/serializer/migration/version_1_2.py | # Copyright (C) 2023 Sartography
#
# This file is part of SpiffWorkflow.
#
# SpiffWorkflow is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3.0 of the License, or (at your option) any later version.
#
# SpiffWorkflow is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from datetime import datetime, timedelta
from SpiffWorkflow.util.task import TaskState
from SpiffWorkflow.bpmn.specs.event_definitions.timer import LOCALTZ
from ..exceptions import VersionMigrationError
def td_to_iso(td):
total = td.total_seconds()
v1, seconds = total // 60, total % 60
v2, minutes = v1 // 60, v1 % 60
days, hours = v2 // 24, v2 % 60
return f"P{days:.0f}DT{hours:.0f}H{minutes:.0f}M{seconds}S"
def convert_timer_expressions(dct):
message = "Unable to convert time specifications for {spec}. This most likely because the values are set during workflow execution."
# Moving this code into helper functions to make sonarcloud STFU about this file.
# Don't really consider this better but whatever.
def convert_timedate(spec):
expr = spec['event_definition'].pop('dateTime')
try:
dt = eval(expr)
if isinstance(dt, datetime):
spec['event_definition']['expression'] = f"'{dt.isoformat()}'"
spec['event_definition']['typename'] = 'TimeDateEventDefinition'
elif isinstance(dt, timedelta):
spec['event_definition']['expression'] = f"'{td_to_iso(dt)}'"
spec['event_definition']['typename'] = 'DurationTimerEventDefinition'
except Exception:
raise VersionMigrationError(message.format(spec=spec['name']))
def convert_cycle(spec, task):
expr = spec['event_definition'].pop('cycle_definition')
try:
repeat, duration = eval(expr)
spec['event_definition']['expression'] = f"'R{repeat}/{td_to_iso(duration)}'"
if task is not None:
cycles_complete = task['data'].pop('repeat_count', 0)
start_time = task['internal_data'].pop('start_time', None)
if start_time is not None:
dt = datetime.fromisoformat(start_time)
task['internal_data']['event_value'] = {
'cycles': repeat - cycles_complete,
'next': datetime.combine(dt.date(), dt.time(), LOCALTZ).isoformat(),
'duration': duration.total_seconds(),
}
except Exception:
raise VersionMigrationError(message.format(spec=spec['name']))
if spec['typename'] == 'StartEvent':
spec['outputs'].remove(spec['name'])
if task is not None:
children = [ dct['tasks'][c] for c in task['children'] ]
# Formerly cycles were handled by looping back and reusing the tasks so this removes the extra tasks
remove = [ c for c in children if c['task_spec'] == task['task_spec']][0]
for task_id in remove['children']:
child = dct['tasks'][task_id]
if child['task_spec'].startswith('return') or child['state'] != TaskState.COMPLETED:
dct['tasks'].pop(task_id)
else:
task['children'].append(task_id)
task['children'].remove(remove['id'])
dct['tasks'].pop(remove['id'])
def has_timer(ts):
return "event_definition" in ts and ts["event_definition"]["typename"] in ["CycleTimerEventDefinition", "TimerEventDefinition"]
for spec in [ ts for ts in dct['spec']['task_specs'].values() if has_timer(ts) ]:
spec['event_definition']['name'] = spec['event_definition'].pop('label')
if spec['event_definition']['typename'] == 'TimerEventDefinition':
convert_timedate(spec)
if spec['event_definition']['typename'] == 'CycleTimerEventDefinition':
tasks = [ t for t in dct['tasks'].values() if t['task_spec'] == spec['name'] ]
task = tasks[0] if len(tasks) > 0 else None
convert_cycle(spec, task)
def add_default_condition_to_cond_task_specs(dct):
for spec in [ts for ts in dct['spec']['task_specs'].values() if ts['typename'] == 'ExclusiveGateway']:
if spec['default_task_spec'] is not None and (None, spec['default_task_spec']) not in spec['cond_task_specs']:
spec['cond_task_specs'].append({'condition': None, 'task_spec': spec['default_task_spec']})
def create_data_objects_and_io_specs(dct):
def update_data_specs(spec):
for obj in spec.get('data_objects', {}).values():
obj['typename'] = 'DataObject'
data_inputs = spec.pop('data_inputs', [])
data_outputs = spec.pop('data_outputs', [])
if len(data_outputs) > 0 or len(data_outputs) > 0:
for item in data_inputs:
item['typename'] = 'TaskDataReference'
for item in data_outputs:
item['typename'] = 'TaskDataReference'
io_spec = {
'typename': 'BpmnIoSpecification',
'data_inputs': data_inputs,
'data_outputs': data_outputs,
}
spec['io_specification'] = io_spec
else:
spec['io_specification'] = None
update_data_specs(dct['spec'])
for sp in dct['subprocess_specs'].values():
update_data_specs(sp)
for spec in dct['spec']['task_specs'].values():
for item in spec.get('data_input_associations', {}):
item['typename'] = 'DataObject'
for item in spec.get('data_output_associations', {}):
item['typename'] = 'DataObject'
def check_multiinstance(dct):
specs = [ spec for spec in dct['spec']['task_specs'].values() if 'prevtaskclass' in spec ]
if len(specs) > 0:
raise VersionMigrationError("This workflow cannot be migrated because it contains MultiInstance Tasks")
def remove_loop_reset(dct):
task_specs = [spec for spec in dct['spec']['task_specs'].values() if spec['typename'] == 'LoopResetTask']
for spec in task_specs:
if spec['typename'] == 'LoopResetTask':
tasks = [t for t in dct['tasks'].values() if t['task_spec'] == spec['name']]
for task in tasks:
dct['tasks'].pop(task['id'])
parent = dct['tasks'].get(task['parent'])
parent['children'] = [c for c in parent['children'] if c != task['id']]
dct['spec']['task_specs'].pop(spec['name'])
def update_task_states(dct):
def update(process):
for task in process['tasks'].values():
if task['state'] == 32:
task['state'] = TaskState.COMPLETED
elif task['state'] == 64:
task['state'] = TaskState.CANCELLED
root = dct['tasks'].get(dct['root'])
if root['state'] == 32:
update(dct)
for sp in dct['subprocesses'].values():
update(sp)
def convert_simple_tasks(dct):
def update_specs(task_specs):
for name, spec in task_specs.items():
if spec['typename'] == 'StartTask':
spec['typename'] = 'BpmnStartTask'
elif spec['typename'] == 'Simple':
spec['typename'] = 'SimpleBpmnTask'
update_specs(dct['spec']['task_specs'])
for subprocess_spec in dct['subprocess_specs'].values():
update_specs(subprocess_spec['task_specs'])
def update_bpmn_attributes(dct):
descriptions = {
'StartEvent': 'Start Event',
'EndEvent': 'End Event',
'UserTask': 'User Task',
'Task': 'Task',
'SubProcess': 'Subprocess',
'ManualTask': 'Manual Task',
'ExclusiveGateway': 'Exclusive Gateway',
'ParallelGateway': 'Parallel Gateway',
'InclusiveGateway': 'Inclusive Gateway',
'CallActivity': 'Call Activity',
'TransactionSubprocess': 'Transaction',
'ScriptTask': 'Script Task',
'ServiceTask': 'Service Task',
'IntermediateCatchEvent': 'Intermediate Catch Event',
'IntermediateThrowEvent': 'Intermediate Throw Event',
'BoundaryEvent': 'Boundary Event',
'ReceiveTask': 'Receive Task',
'SendTask': 'Send Task',
'EventBasedGateway': 'Event Based Gateway',
'CancelEventDefinition': 'Cancel',
'ErrorEventDefinition': 'Error',
'EscalationEventDefinition': 'Escalation',
'TerminateEventDefinition': 'Terminate',
'MessageEventDefinition': 'Message',
'SignalEventDefinition': 'Signal',
'TimerEventDefinition': 'Timer',
'NoneEventDefinition': 'Default',
'MultipleEventDefinition': 'Multiple'
}
def update_data_spec(obj):
obj['bpmn_id'] = obj.pop('name')
obj['bpmn_name'] = obj.pop('description', None)
def update_io_spec(io_spec):
for obj in io_spec['data_inputs']:
update_data_spec(obj)
for obj in io_spec['data_outputs']:
update_data_spec(obj)
def update_task_specs(spec):
for spec in spec['task_specs'].values():
spec['bpmn_id'] = None
if spec['typename'] not in ['BpmnStartTask', 'SimpleBpmnTask', '_EndJoin', '_BoundaryEventParent']:
spec['bpmn_id'] = spec['name']
spec['bpmn_name'] = spec['description'] or None
if 'event_definition' in spec and spec['event_definition']['typename'] in descriptions:
spec_desc = descriptions.get(spec['typename'])
event_desc = descriptions.get(spec['event_definition']['typename'])
cancelling = spec.get('cancel_activity')
interrupt = 'Interrupting ' if cancelling else 'Non-Interrupting ' if not cancelling else ''
desc = f'{interrupt}{event_desc} {spec_desc}'
elif spec['typename'] in descriptions:
desc = descriptions.get(spec['typename'])
else:
desc = None
spec['description'] = desc
else:
spec['bpmn_name'] = None
spec['description'] = None
if spec.get('io_specification') is not None:
update_io_spec(spec['io_specification'])
for obj in spec.get('data_input_associations', []):
update_data_spec(obj)
for obj in spec.get('data_output_associations', []):
update_data_spec(obj)
update_task_specs(dct['spec'])
for obj in dct['spec'].get('data_objects', {}).values():
update_data_spec(obj)
for subprocess_spec in dct['subprocess_specs'].values():
update_task_specs(subprocess_spec)
for obj in subprocess_spec.get('data_objects', {}).values():
update_data_spec(obj)
if subprocess_spec.get('io_specification') is not None:
update_io_spec(subprocess_spec['io_specification'])
| 11,513 | Python | .py | 226 | 40.469027 | 136 | 0.60112 | sartography/SpiffWorkflow | 1,663 | 310 | 6 | LGPL-3.0 | 9/5/2024, 5:08:37 PM (Europe/Amsterdam) |
Subsets and Splits