body_hash
stringlengths
64
64
body
stringlengths
23
109k
docstring
stringlengths
1
57k
path
stringlengths
4
198
name
stringlengths
1
115
repository_name
stringlengths
7
111
repository_stars
float64
0
191k
lang
stringclasses
1 value
body_without_docstring
stringlengths
14
108k
unified
stringlengths
45
133k
107335678ab6ba24ab3ad1e18690cd983ad3d465ccadd1bfcd428da2529a9a0b
def create(self, name, instance_dir=None, config=None, schema=None, data_dir=None): 'http://wiki.apache.org/solr/CoreAdmin#head-7ca1b98a9df8b8ca0dcfbfc49940ed5ac98c4a08' params = {'action': 'CREATE', 'name': name} if (instance_dir is None): params.update(instanceDir=name) else: params.update(instanceDir=instance_dir) if data_dir: params.update(dataDir=data_dir) if config: params.update(config=config) if schema: params.update(schema=schema) url = self._get_url(params=params) return url
http://wiki.apache.org/solr/CoreAdmin#head-7ca1b98a9df8b8ca0dcfbfc49940ed5ac98c4a08
pysolr.py
create
sxalexander/pysolr
0
python
def create(self, name, instance_dir=None, config=None, schema=None, data_dir=None): params = {'action': 'CREATE', 'name': name} if (instance_dir is None): params.update(instanceDir=name) else: params.update(instanceDir=instance_dir) if data_dir: params.update(dataDir=data_dir) if config: params.update(config=config) if schema: params.update(schema=schema) url = self._get_url(params=params) return url
def create(self, name, instance_dir=None, config=None, schema=None, data_dir=None): params = {'action': 'CREATE', 'name': name} if (instance_dir is None): params.update(instanceDir=name) else: params.update(instanceDir=instance_dir) if data_dir: params.update(dataDir=data_dir) if config: params.update(config=config) if schema: params.update(schema=schema) url = self._get_url(params=params) return url<|docstring|>http://wiki.apache.org/solr/CoreAdmin#head-7ca1b98a9df8b8ca0dcfbfc49940ed5ac98c4a08<|endoftext|>
8ce87056b6839575cb3f0fe2cf266b9979663c8e6f4060054a4e4db284ec68ef
def reload(self, core): 'http://wiki.apache.org/solr/CoreAdmin#head-3f125034c6a64611779442539812067b8b430930' params = {'action': 'RELOAD', 'core': core} return self._get_url(params=params)
http://wiki.apache.org/solr/CoreAdmin#head-3f125034c6a64611779442539812067b8b430930
pysolr.py
reload
sxalexander/pysolr
0
python
def reload(self, core): params = {'action': 'RELOAD', 'core': core} return self._get_url(params=params)
def reload(self, core): params = {'action': 'RELOAD', 'core': core} return self._get_url(params=params)<|docstring|>http://wiki.apache.org/solr/CoreAdmin#head-3f125034c6a64611779442539812067b8b430930<|endoftext|>
9a2e91e4c8f72db5c66fcba37a169a4b9930c9223b44ec33734c053cd7b4b357
def rename(self, core, other): 'http://wiki.apache.org/solr/CoreAdmin#head-9473bee1abed39e8583ba45ef993bebb468e3afe' params = {'action': 'RENAME', 'core': core, 'other': other} return self._get_url(params=params)
http://wiki.apache.org/solr/CoreAdmin#head-9473bee1abed39e8583ba45ef993bebb468e3afe
pysolr.py
rename
sxalexander/pysolr
0
python
def rename(self, core, other): params = {'action': 'RENAME', 'core': core, 'other': other} return self._get_url(params=params)
def rename(self, core, other): params = {'action': 'RENAME', 'core': core, 'other': other} return self._get_url(params=params)<|docstring|>http://wiki.apache.org/solr/CoreAdmin#head-9473bee1abed39e8583ba45ef993bebb468e3afe<|endoftext|>
24506070f8a9dca9f21fb521b80f83e267d7e819df96d96e31da5b7fb342943f
def swap(self, core, other): 'http://wiki.apache.org/solr/CoreAdmin#head-928b872300f1b66748c85cebb12a59bb574e501b' params = {'action': 'SWAP', 'core': core, 'other': other} return self._get_url(params=params)
http://wiki.apache.org/solr/CoreAdmin#head-928b872300f1b66748c85cebb12a59bb574e501b
pysolr.py
swap
sxalexander/pysolr
0
python
def swap(self, core, other): params = {'action': 'SWAP', 'core': core, 'other': other} return self._get_url(params=params)
def swap(self, core, other): params = {'action': 'SWAP', 'core': core, 'other': other} return self._get_url(params=params)<|docstring|>http://wiki.apache.org/solr/CoreAdmin#head-928b872300f1b66748c85cebb12a59bb574e501b<|endoftext|>
f945e11b9ac8a7fc1c1bef82efb475ba8268fbe2bcea4a7d42be31160ffd1b5c
def unload(self, core, delete_instance_dir=False, delete_index=False, delete_data_dir=False): 'http://wiki.apache.org/solr/CoreAdmin#head-f5055a885932e2c25096a8856de840b06764d143' params = {'action': 'UNLOAD', 'core': core, 'deleteDataDir': delete_data_dir, 'deleteInstanceDir': delete_instance_dir, 'deleteIndex': delete_index} return self._get_url(params=params)
http://wiki.apache.org/solr/CoreAdmin#head-f5055a885932e2c25096a8856de840b06764d143
pysolr.py
unload
sxalexander/pysolr
0
python
def unload(self, core, delete_instance_dir=False, delete_index=False, delete_data_dir=False): params = {'action': 'UNLOAD', 'core': core, 'deleteDataDir': delete_data_dir, 'deleteInstanceDir': delete_instance_dir, 'deleteIndex': delete_index} return self._get_url(params=params)
def unload(self, core, delete_instance_dir=False, delete_index=False, delete_data_dir=False): params = {'action': 'UNLOAD', 'core': core, 'deleteDataDir': delete_data_dir, 'deleteInstanceDir': delete_instance_dir, 'deleteIndex': delete_index} return self._get_url(params=params)<|docstring|>http://wiki.apache.org/solr/CoreAdmin#head-f5055a885932e2c25096a8856de840b06764d143<|endoftext|>
0420756c068706cf0b7daeca9060a24d50708cef62e21194b8984e04779cc267
@api_view(['GET', 'POST', 'DELETE']) def person(request): '\n\n :param request:\n :return:\n ' if (request.method == 'GET'): persons = Person.objects.all() document_id = request.query_params.get('document_id', None) document_type = request.query_params.get('document_type', None) if document_id: persons = persons.filter(document_type__exact=document_type, document_id__exact=document_id) person_serializer = PersonSerializer(persons, many=True) return JsonResponse(person_serializer.data, safe=False) elif (request.method == 'POST'): person_data = JSONParser().parse(request) person_serializer = PersonSerializer(data=person_data) if person_serializer.is_valid(): person_serializer.save() return JsonResponse(person_serializer.data, status=status.HTTP_201_CREATED) return JsonResponse(person_serializer.errors, status=status.HTTP_400_BAD_REQUEST) elif (request.method == 'DELETE'): person_id = request.query_params.get('person_id', None) if person_id: Person.objects.filter(person_id=person_id).delete() return JsonResponse(status=status.HTTP_200_OK) return JsonResponse(status=status.HTTP_400_BAD_REQUEST)
:param request: :return:
registration/views/person.py
person
Eddyjim/registrations-backend
0
python
@api_view(['GET', 'POST', 'DELETE']) def person(request): '\n\n :param request:\n :return:\n ' if (request.method == 'GET'): persons = Person.objects.all() document_id = request.query_params.get('document_id', None) document_type = request.query_params.get('document_type', None) if document_id: persons = persons.filter(document_type__exact=document_type, document_id__exact=document_id) person_serializer = PersonSerializer(persons, many=True) return JsonResponse(person_serializer.data, safe=False) elif (request.method == 'POST'): person_data = JSONParser().parse(request) person_serializer = PersonSerializer(data=person_data) if person_serializer.is_valid(): person_serializer.save() return JsonResponse(person_serializer.data, status=status.HTTP_201_CREATED) return JsonResponse(person_serializer.errors, status=status.HTTP_400_BAD_REQUEST) elif (request.method == 'DELETE'): person_id = request.query_params.get('person_id', None) if person_id: Person.objects.filter(person_id=person_id).delete() return JsonResponse(status=status.HTTP_200_OK) return JsonResponse(status=status.HTTP_400_BAD_REQUEST)
@api_view(['GET', 'POST', 'DELETE']) def person(request): '\n\n :param request:\n :return:\n ' if (request.method == 'GET'): persons = Person.objects.all() document_id = request.query_params.get('document_id', None) document_type = request.query_params.get('document_type', None) if document_id: persons = persons.filter(document_type__exact=document_type, document_id__exact=document_id) person_serializer = PersonSerializer(persons, many=True) return JsonResponse(person_serializer.data, safe=False) elif (request.method == 'POST'): person_data = JSONParser().parse(request) person_serializer = PersonSerializer(data=person_data) if person_serializer.is_valid(): person_serializer.save() return JsonResponse(person_serializer.data, status=status.HTTP_201_CREATED) return JsonResponse(person_serializer.errors, status=status.HTTP_400_BAD_REQUEST) elif (request.method == 'DELETE'): person_id = request.query_params.get('person_id', None) if person_id: Person.objects.filter(person_id=person_id).delete() return JsonResponse(status=status.HTTP_200_OK) return JsonResponse(status=status.HTTP_400_BAD_REQUEST)<|docstring|>:param request: :return:<|endoftext|>
7ed7c98c57ab29d8f49bf4648b8f78363d965369cc96323c341c3f10e55dc004
def get_controller_object(controller): "\n Get the object associated with a controller, or None if there's no object,\n which is usually a group.\n " controller_objects = controller.controllerObject.listConnections(s=True, d=False) return (controller_objects[0] if controller_objects else None)
Get the object associated with a controller, or None if there's no object, which is usually a group.
scripts/zMayaTools/controller_editor.py
get_controller_object
zewt/zMayaTools
73
python
def get_controller_object(controller): "\n Get the object associated with a controller, or None if there's no object,\n which is usually a group.\n " controller_objects = controller.controllerObject.listConnections(s=True, d=False) return (controller_objects[0] if controller_objects else None)
def get_controller_object(controller): "\n Get the object associated with a controller, or None if there's no object,\n which is usually a group.\n " controller_objects = controller.controllerObject.listConnections(s=True, d=False) return (controller_objects[0] if controller_objects else None)<|docstring|>Get the object associated with a controller, or None if there's no object, which is usually a group.<|endoftext|>
74abedc74b5e377c81f7633ff1c8828a0504d5c9957dcc7bc0fa17725b47d797
def get_controller_children(parent, disconnect_from_parent=False): '\n Return a list of all children of the given controller.\n\n If disconnect_from_parent is true, disconnect the children list so edits can be\n made. Reassign the resulting list using assign_controller_children.\n ' all_children = [] for children_plug in parent.children: children = children_plug.listConnections(s=True, d=False, t='controller', p=True) if (not children): continue child = children[0] all_children.append(child.node()) if disconnect_from_parent: child.disconnect(children_plug) return all_children
Return a list of all children of the given controller. If disconnect_from_parent is true, disconnect the children list so edits can be made. Reassign the resulting list using assign_controller_children.
scripts/zMayaTools/controller_editor.py
get_controller_children
zewt/zMayaTools
73
python
def get_controller_children(parent, disconnect_from_parent=False): '\n Return a list of all children of the given controller.\n\n If disconnect_from_parent is true, disconnect the children list so edits can be\n made. Reassign the resulting list using assign_controller_children.\n ' all_children = [] for children_plug in parent.children: children = children_plug.listConnections(s=True, d=False, t='controller', p=True) if (not children): continue child = children[0] all_children.append(child.node()) if disconnect_from_parent: child.disconnect(children_plug) return all_children
def get_controller_children(parent, disconnect_from_parent=False): '\n Return a list of all children of the given controller.\n\n If disconnect_from_parent is true, disconnect the children list so edits can be\n made. Reassign the resulting list using assign_controller_children.\n ' all_children = [] for children_plug in parent.children: children = children_plug.listConnections(s=True, d=False, t='controller', p=True) if (not children): continue child = children[0] all_children.append(child.node()) if disconnect_from_parent: child.disconnect(children_plug) return all_children<|docstring|>Return a list of all children of the given controller. If disconnect_from_parent is true, disconnect the children list so edits can be made. Reassign the resulting list using assign_controller_children.<|endoftext|>
1a9e42a199b880925c62797c5b0f1fa17d9e7463a75714b24fea99d910a40760
def assign_controller_children(parent, children): "\n Assign a controller's child list.\n\n This is always called after disconnecting the list using\n get_controller_children(disconnect_from_parent=True). This doesn't connect\n the prepopulate attribute.\n " for (idx, child) in enumerate(children): child.parent.connect(parent.children[idx])
Assign a controller's child list. This is always called after disconnecting the list using get_controller_children(disconnect_from_parent=True). This doesn't connect the prepopulate attribute.
scripts/zMayaTools/controller_editor.py
assign_controller_children
zewt/zMayaTools
73
python
def assign_controller_children(parent, children): "\n Assign a controller's child list.\n\n This is always called after disconnecting the list using\n get_controller_children(disconnect_from_parent=True). This doesn't connect\n the prepopulate attribute.\n " for (idx, child) in enumerate(children): child.parent.connect(parent.children[idx])
def assign_controller_children(parent, children): "\n Assign a controller's child list.\n\n This is always called after disconnecting the list using\n get_controller_children(disconnect_from_parent=True). This doesn't connect\n the prepopulate attribute.\n " for (idx, child) in enumerate(children): child.parent.connect(parent.children[idx])<|docstring|>Assign a controller's child list. This is always called after disconnecting the list using get_controller_children(disconnect_from_parent=True). This doesn't connect the prepopulate attribute.<|endoftext|>
371c4c47e31ecb2e0be9c4983bd40d79cac7b60bf1f95b74a15ded8eae0bd4d0
def _remove_gaps_in_child_list(controller): "\n If there are gaps in the children array of a controller, pick walking doesn't\n work. Remove any gaps in the given controller's child list, preserving the\n order of connections.\n " connections = [] any_gaps_seen = False for entry in controller.children: if (not entry.isConnected()): any_gaps_seen = True continue connection = pm.listConnections(entry, s=True, d=False, p=True)[0] connections.append(connection) if (not any_gaps_seen): return for entry in controller.children: for connection in pm.listConnections(entry, s=True, d=False, p=True): connection.disconnect(entry) for (idx, connection) in enumerate(connections): connection.connect(controller.children[idx])
If there are gaps in the children array of a controller, pick walking doesn't work. Remove any gaps in the given controller's child list, preserving the order of connections.
scripts/zMayaTools/controller_editor.py
_remove_gaps_in_child_list
zewt/zMayaTools
73
python
def _remove_gaps_in_child_list(controller): "\n If there are gaps in the children array of a controller, pick walking doesn't\n work. Remove any gaps in the given controller's child list, preserving the\n order of connections.\n " connections = [] any_gaps_seen = False for entry in controller.children: if (not entry.isConnected()): any_gaps_seen = True continue connection = pm.listConnections(entry, s=True, d=False, p=True)[0] connections.append(connection) if (not any_gaps_seen): return for entry in controller.children: for connection in pm.listConnections(entry, s=True, d=False, p=True): connection.disconnect(entry) for (idx, connection) in enumerate(connections): connection.connect(controller.children[idx])
def _remove_gaps_in_child_list(controller): "\n If there are gaps in the children array of a controller, pick walking doesn't\n work. Remove any gaps in the given controller's child list, preserving the\n order of connections.\n " connections = [] any_gaps_seen = False for entry in controller.children: if (not entry.isConnected()): any_gaps_seen = True continue connection = pm.listConnections(entry, s=True, d=False, p=True)[0] connections.append(connection) if (not any_gaps_seen): return for entry in controller.children: for connection in pm.listConnections(entry, s=True, d=False, p=True): connection.disconnect(entry) for (idx, connection) in enumerate(connections): connection.connect(controller.children[idx])<|docstring|>If there are gaps in the children array of a controller, pick walking doesn't work. Remove any gaps in the given controller's child list, preserving the order of connections.<|endoftext|>
299634820ce8cd7df517f8932e1668b76403a924451fb6ae842eb893d8f15723
def remove_gaps_in_all_controllers(): '\n Call remove_gaps_in_child_list in all non-referenced controllers to correct\n any existing gaps.\n\n This is only needed to correct any bad controller connections created by earlier\n versions.\n ' for controller in pm.ls(type='controller'): if pm.referenceQuery(controller, isNodeReferenced=True): continue _remove_gaps_in_child_list(controller)
Call remove_gaps_in_child_list in all non-referenced controllers to correct any existing gaps. This is only needed to correct any bad controller connections created by earlier versions.
scripts/zMayaTools/controller_editor.py
remove_gaps_in_all_controllers
zewt/zMayaTools
73
python
def remove_gaps_in_all_controllers(): '\n Call remove_gaps_in_child_list in all non-referenced controllers to correct\n any existing gaps.\n\n This is only needed to correct any bad controller connections created by earlier\n versions.\n ' for controller in pm.ls(type='controller'): if pm.referenceQuery(controller, isNodeReferenced=True): continue _remove_gaps_in_child_list(controller)
def remove_gaps_in_all_controllers(): '\n Call remove_gaps_in_child_list in all non-referenced controllers to correct\n any existing gaps.\n\n This is only needed to correct any bad controller connections created by earlier\n versions.\n ' for controller in pm.ls(type='controller'): if pm.referenceQuery(controller, isNodeReferenced=True): continue _remove_gaps_in_child_list(controller)<|docstring|>Call remove_gaps_in_child_list in all non-referenced controllers to correct any existing gaps. This is only needed to correct any bad controller connections created by earlier versions.<|endoftext|>
08f29c594b8417eb06ca608e78e4e3d5f9a73e679462dbd475429fb6466a2d62
def set_controller_parent(controller, parent, after=None): "\n Set controller's parent to parent.\n\n If after is None, the node becomes the first child. Otherwise, the node is placed\n after the given node. If after is at_end, the node is placed at the end.\n\n controller -p should do this, but the docs don't really describe how it works and\n there doesn't seem to be any way to change the parent of a controller to be a root.\n " assert (controller != parent) if (after is at_end): if (parent is None): raise Exception('parent must be specified if after is at_end') children = get_controller_children(parent) if children: after = children[(- 1)] else: after = None with maya_helpers.undo(): old_parent_plug = get_controller_parent(controller, get_plug=True) if ((old_parent_plug is None) and (parent is None)): return if (old_parent_plug is not None): if old_parent_plug.node().prepopulate.isConnectedTo(controller.prepopulate): old_parent_plug.node().prepopulate.disconnect(controller.prepopulate) all_children = get_controller_children(old_parent_plug.node(), disconnect_from_parent=True) all_children.remove(controller.node()) assign_controller_children(old_parent_plug.node(), all_children) if (parent is None): return parent.prepopulate.connect(controller.prepopulate) all_children = get_controller_children(parent, disconnect_from_parent=True) if ((after is None) or (after not in all_children)): all_children = ([controller] + all_children) else: idx = (all_children.index(after) + 1) all_children[idx:idx] = [controller] assign_controller_children(parent, all_children)
Set controller's parent to parent. If after is None, the node becomes the first child. Otherwise, the node is placed after the given node. If after is at_end, the node is placed at the end. controller -p should do this, but the docs don't really describe how it works and there doesn't seem to be any way to change the parent of a controller to be a root.
scripts/zMayaTools/controller_editor.py
set_controller_parent
zewt/zMayaTools
73
python
def set_controller_parent(controller, parent, after=None): "\n Set controller's parent to parent.\n\n If after is None, the node becomes the first child. Otherwise, the node is placed\n after the given node. If after is at_end, the node is placed at the end.\n\n controller -p should do this, but the docs don't really describe how it works and\n there doesn't seem to be any way to change the parent of a controller to be a root.\n " assert (controller != parent) if (after is at_end): if (parent is None): raise Exception('parent must be specified if after is at_end') children = get_controller_children(parent) if children: after = children[(- 1)] else: after = None with maya_helpers.undo(): old_parent_plug = get_controller_parent(controller, get_plug=True) if ((old_parent_plug is None) and (parent is None)): return if (old_parent_plug is not None): if old_parent_plug.node().prepopulate.isConnectedTo(controller.prepopulate): old_parent_plug.node().prepopulate.disconnect(controller.prepopulate) all_children = get_controller_children(old_parent_plug.node(), disconnect_from_parent=True) all_children.remove(controller.node()) assign_controller_children(old_parent_plug.node(), all_children) if (parent is None): return parent.prepopulate.connect(controller.prepopulate) all_children = get_controller_children(parent, disconnect_from_parent=True) if ((after is None) or (after not in all_children)): all_children = ([controller] + all_children) else: idx = (all_children.index(after) + 1) all_children[idx:idx] = [controller] assign_controller_children(parent, all_children)
def set_controller_parent(controller, parent, after=None): "\n Set controller's parent to parent.\n\n If after is None, the node becomes the first child. Otherwise, the node is placed\n after the given node. If after is at_end, the node is placed at the end.\n\n controller -p should do this, but the docs don't really describe how it works and\n there doesn't seem to be any way to change the parent of a controller to be a root.\n " assert (controller != parent) if (after is at_end): if (parent is None): raise Exception('parent must be specified if after is at_end') children = get_controller_children(parent) if children: after = children[(- 1)] else: after = None with maya_helpers.undo(): old_parent_plug = get_controller_parent(controller, get_plug=True) if ((old_parent_plug is None) and (parent is None)): return if (old_parent_plug is not None): if old_parent_plug.node().prepopulate.isConnectedTo(controller.prepopulate): old_parent_plug.node().prepopulate.disconnect(controller.prepopulate) all_children = get_controller_children(old_parent_plug.node(), disconnect_from_parent=True) all_children.remove(controller.node()) assign_controller_children(old_parent_plug.node(), all_children) if (parent is None): return parent.prepopulate.connect(controller.prepopulate) all_children = get_controller_children(parent, disconnect_from_parent=True) if ((after is None) or (after not in all_children)): all_children = ([controller] + all_children) else: idx = (all_children.index(after) + 1) all_children[idx:idx] = [controller] assign_controller_children(parent, all_children)<|docstring|>Set controller's parent to parent. If after is None, the node becomes the first child. Otherwise, the node is placed after the given node. If after is at_end, the node is placed at the end. controller -p should do this, but the docs don't really describe how it works and there doesn't seem to be any way to change the parent of a controller to be a root.<|endoftext|>
517a0745ad9c2411a76b91a64bf98f87bcdf5726ef40f8011853c65a01b06044
def find_stale_controllers(): "\n Stale controller nodes often accumulate in scenes, since they aren't automatically deleted\n when their transforms are deleted, and they're a pain to clean up.\n \n Return all dead controller nodes: nodes which have no transform, and which don't have any\n children with a transform (not a group).\n " alive_controllers = set() all_controllers = set(pm.ls(type='controller')) for node in all_controllers: if (get_controller_object(node) is None): continue parent = node seen_parents = set() while (parent is not None): alive_controllers.add(parent) parent = get_controller_parent(parent) if (parent in alive_controllers): break assert (parent not in seen_parents) seen_parents.add(parent) return list((all_controllers - alive_controllers))
Stale controller nodes often accumulate in scenes, since they aren't automatically deleted when their transforms are deleted, and they're a pain to clean up. Return all dead controller nodes: nodes which have no transform, and which don't have any children with a transform (not a group).
scripts/zMayaTools/controller_editor.py
find_stale_controllers
zewt/zMayaTools
73
python
def find_stale_controllers(): "\n Stale controller nodes often accumulate in scenes, since they aren't automatically deleted\n when their transforms are deleted, and they're a pain to clean up.\n \n Return all dead controller nodes: nodes which have no transform, and which don't have any\n children with a transform (not a group).\n " alive_controllers = set() all_controllers = set(pm.ls(type='controller')) for node in all_controllers: if (get_controller_object(node) is None): continue parent = node seen_parents = set() while (parent is not None): alive_controllers.add(parent) parent = get_controller_parent(parent) if (parent in alive_controllers): break assert (parent not in seen_parents) seen_parents.add(parent) return list((all_controllers - alive_controllers))
def find_stale_controllers(): "\n Stale controller nodes often accumulate in scenes, since they aren't automatically deleted\n when their transforms are deleted, and they're a pain to clean up.\n \n Return all dead controller nodes: nodes which have no transform, and which don't have any\n children with a transform (not a group).\n " alive_controllers = set() all_controllers = set(pm.ls(type='controller')) for node in all_controllers: if (get_controller_object(node) is None): continue parent = node seen_parents = set() while (parent is not None): alive_controllers.add(parent) parent = get_controller_parent(parent) if (parent in alive_controllers): break assert (parent not in seen_parents) seen_parents.add(parent) return list((all_controllers - alive_controllers))<|docstring|>Stale controller nodes often accumulate in scenes, since they aren't automatically deleted when their transforms are deleted, and they're a pain to clean up. Return all dead controller nodes: nodes which have no transform, and which don't have any children with a transform (not a group).<|endoftext|>
7e7c637390fb329a66396bb03766a84bfd896805ca3667a886eff2846144adb8
def delete_stale_controllers(): '\n Delete all stale controllers in the scene.\n ' stale_controllers = find_stale_controllers() if (not stale_controllers): log.info('No stale controllers') return pm.delete(stale_controllers) log.info('Deleted %i stale %s', len(stale_controllers), ('controller' if (len(stale_controllers) == 1) else 'controllers'))
Delete all stale controllers in the scene.
scripts/zMayaTools/controller_editor.py
delete_stale_controllers
zewt/zMayaTools
73
python
def delete_stale_controllers(): '\n \n ' stale_controllers = find_stale_controllers() if (not stale_controllers): log.info('No stale controllers') return pm.delete(stale_controllers) log.info('Deleted %i stale %s', len(stale_controllers), ('controller' if (len(stale_controllers) == 1) else 'controllers'))
def delete_stale_controllers(): '\n \n ' stale_controllers = find_stale_controllers() if (not stale_controllers): log.info('No stale controllers') return pm.delete(stale_controllers) log.info('Deleted %i stale %s', len(stale_controllers), ('controller' if (len(stale_controllers) == 1) else 'controllers'))<|docstring|>Delete all stale controllers in the scene.<|endoftext|>
f4f5ce05477c56d4fedc59a59630eb2438424113a09a136a7c21727953068391
@property def current_selection(self): '\n If a controller is selected in the UI, return its node.\n ' items = self.ui.controllerTree.selectedItems() if (not items): return None return items[0].controller_node
If a controller is selected in the UI, return its node.
scripts/zMayaTools/controller_editor.py
current_selection
zewt/zMayaTools
73
python
@property def current_selection(self): '\n \n ' items = self.ui.controllerTree.selectedItems() if (not items): return None return items[0].controller_node
@property def current_selection(self): '\n \n ' items = self.ui.controllerTree.selectedItems() if (not items): return None return items[0].controller_node<|docstring|>If a controller is selected in the UI, return its node.<|endoftext|>
9036b9d94f27c9783a0c02cb9f4a659e6fb10f9c340b1ef4808c41121b1e28ea
def create_controller_for_objects(self, nodes): "\n Create controllers. nodes is a list of controller objects, which must be transforms.\n\n If an entry in nodes is None, create a controller without any object, which can be\n used as a group. This is what you get from pm.controller(g=True).\n\n Note that we don't use pm.controller, since it doesn't return the new node like most\n object creation functions do.\n " old_selection = pm.ls(sl=True) nodes = list(nodes) nodes.sort(key=(lambda node: len(pm.listRelatives(node, allParents=True)))) selected_controller = self.current_selection first_node = None for node in nodes: if (node is not None): existing_controller = get_controller_for_object(node) if existing_controller: first_node = existing_controller continue name = 'controller#' if (node is not None): name = ('%s_Tag' % node.nodeName()) controller = pm.createNode('controller', n=name) controller.prepopulate.set(False) controller.parentprepopulate.set(False) if (node is not None): node.message.connect(controller.controllerObject) if (first_node is None): first_node = controller for parent in pm.listRelatives(node, allParents=True): if (parent in nodes): parent_controller = get_controller_for_object(parent) assert (parent_controller is not None) set_controller_parent(controller, parent_controller, after=at_end) break else: if selected_controller: set_controller_parent(controller, selected_controller, after=at_end) self.populate_tree() item = self.controllers_to_items.get(first_node) self.ui.controllerTree.setCurrentItem(item) pm.select(old_selection, ne=True)
Create controllers. nodes is a list of controller objects, which must be transforms. If an entry in nodes is None, create a controller without any object, which can be used as a group. This is what you get from pm.controller(g=True). Note that we don't use pm.controller, since it doesn't return the new node like most object creation functions do.
scripts/zMayaTools/controller_editor.py
create_controller_for_objects
zewt/zMayaTools
73
python
def create_controller_for_objects(self, nodes): "\n Create controllers. nodes is a list of controller objects, which must be transforms.\n\n If an entry in nodes is None, create a controller without any object, which can be\n used as a group. This is what you get from pm.controller(g=True).\n\n Note that we don't use pm.controller, since it doesn't return the new node like most\n object creation functions do.\n " old_selection = pm.ls(sl=True) nodes = list(nodes) nodes.sort(key=(lambda node: len(pm.listRelatives(node, allParents=True)))) selected_controller = self.current_selection first_node = None for node in nodes: if (node is not None): existing_controller = get_controller_for_object(node) if existing_controller: first_node = existing_controller continue name = 'controller#' if (node is not None): name = ('%s_Tag' % node.nodeName()) controller = pm.createNode('controller', n=name) controller.prepopulate.set(False) controller.parentprepopulate.set(False) if (node is not None): node.message.connect(controller.controllerObject) if (first_node is None): first_node = controller for parent in pm.listRelatives(node, allParents=True): if (parent in nodes): parent_controller = get_controller_for_object(parent) assert (parent_controller is not None) set_controller_parent(controller, parent_controller, after=at_end) break else: if selected_controller: set_controller_parent(controller, selected_controller, after=at_end) self.populate_tree() item = self.controllers_to_items.get(first_node) self.ui.controllerTree.setCurrentItem(item) pm.select(old_selection, ne=True)
def create_controller_for_objects(self, nodes): "\n Create controllers. nodes is a list of controller objects, which must be transforms.\n\n If an entry in nodes is None, create a controller without any object, which can be\n used as a group. This is what you get from pm.controller(g=True).\n\n Note that we don't use pm.controller, since it doesn't return the new node like most\n object creation functions do.\n " old_selection = pm.ls(sl=True) nodes = list(nodes) nodes.sort(key=(lambda node: len(pm.listRelatives(node, allParents=True)))) selected_controller = self.current_selection first_node = None for node in nodes: if (node is not None): existing_controller = get_controller_for_object(node) if existing_controller: first_node = existing_controller continue name = 'controller#' if (node is not None): name = ('%s_Tag' % node.nodeName()) controller = pm.createNode('controller', n=name) controller.prepopulate.set(False) controller.parentprepopulate.set(False) if (node is not None): node.message.connect(controller.controllerObject) if (first_node is None): first_node = controller for parent in pm.listRelatives(node, allParents=True): if (parent in nodes): parent_controller = get_controller_for_object(parent) assert (parent_controller is not None) set_controller_parent(controller, parent_controller, after=at_end) break else: if selected_controller: set_controller_parent(controller, selected_controller, after=at_end) self.populate_tree() item = self.controllers_to_items.get(first_node) self.ui.controllerTree.setCurrentItem(item) pm.select(old_selection, ne=True)<|docstring|>Create controllers. nodes is a list of controller objects, which must be transforms. If an entry in nodes is None, create a controller without any object, which can be used as a group. This is what you get from pm.controller(g=True). Note that we don't use pm.controller, since it doesn't return the new node like most object creation functions do.<|endoftext|>
6c56b76b6949ebd5158dcc2bbf41931c6f44e59c6182d05262b3748e150b8cf1
def _async_refresh(self): "\n Queue a refresh. If this is called multiple times before we do the refresh, we'll only\n refresh once.\n " qt_helpers.run_async_once(self.refresh)
Queue a refresh. If this is called multiple times before we do the refresh, we'll only refresh once.
scripts/zMayaTools/controller_editor.py
_async_refresh
zewt/zMayaTools
73
python
def _async_refresh(self): "\n Queue a refresh. If this is called multiple times before we do the refresh, we'll only\n refresh once.\n " qt_helpers.run_async_once(self.refresh)
def _async_refresh(self): "\n Queue a refresh. If this is called multiple times before we do the refresh, we'll only\n refresh once.\n " qt_helpers.run_async_once(self.refresh)<|docstring|>Queue a refresh. If this is called multiple times before we do the refresh, we'll only refresh once.<|endoftext|>
3322062548dc4151f4362fbf39c2303202fe6180ce60e70bda8d260714417f41
def get_workflows(): '\n :return: all registered workflows\n ' return get_flattened_subclasses(Workflow)
:return: all registered workflows
leapp/workflows/__init__.py
get_workflows
dhodovsk/leapp
29
python
def get_workflows(): '\n \n ' return get_flattened_subclasses(Workflow)
def get_workflows(): '\n \n ' return get_flattened_subclasses(Workflow)<|docstring|>:return: all registered workflows<|endoftext|>
dd2d93099789c38ce31021a4026d526a7ccc26be9aa9833f223681be21020fb0
@property def errors(self): '\n :return: All reported errors\n ' return self._errors
:return: All reported errors
leapp/workflows/__init__.py
errors
dhodovsk/leapp
29
python
@property def errors(self): '\n \n ' return self._errors
@property def errors(self): '\n \n ' return self._errors<|docstring|>:return: All reported errors<|endoftext|>
75f5e5dedc14c68f4471c06fa1a112236cae586c77a73660830d5974430f1696
@property def answer_store(self): '\n : return: AnswerStore instance used for messaging\n ' return self._answer_store
: return: AnswerStore instance used for messaging
leapp/workflows/__init__.py
answer_store
dhodovsk/leapp
29
python
@property def answer_store(self): '\n \n ' return self._answer_store
@property def answer_store(self): '\n \n ' return self._answer_store<|docstring|>: return: AnswerStore instance used for messaging<|endoftext|>
47de5af6e9c14ab21fdb04ee4e4c8c3326ef7a59d3d0cf03d5575897fa693f08
def save_answers(self, answerfile_path, userchoices_path): '\n Generates an answer file for the dialogs of the workflow and saves it to `answerfile_path`.\n Updates a .userchoices file at `userchoices_path` with new answers encountered in answerfile.\n\n :param answerfile_path: The path where to store the answer file.\n :param userchoices_path: The path where to store the .userchoices file.\n :return: None\n ' self._answer_store.generate(self._dialogs, answerfile_path) self._answer_store.update(userchoices_path, allow_missing=True)
Generates an answer file for the dialogs of the workflow and saves it to `answerfile_path`. Updates a .userchoices file at `userchoices_path` with new answers encountered in answerfile. :param answerfile_path: The path where to store the answer file. :param userchoices_path: The path where to store the .userchoices file. :return: None
leapp/workflows/__init__.py
save_answers
dhodovsk/leapp
29
python
def save_answers(self, answerfile_path, userchoices_path): '\n Generates an answer file for the dialogs of the workflow and saves it to `answerfile_path`.\n Updates a .userchoices file at `userchoices_path` with new answers encountered in answerfile.\n\n :param answerfile_path: The path where to store the answer file.\n :param userchoices_path: The path where to store the .userchoices file.\n :return: None\n ' self._answer_store.generate(self._dialogs, answerfile_path) self._answer_store.update(userchoices_path, allow_missing=True)
def save_answers(self, answerfile_path, userchoices_path): '\n Generates an answer file for the dialogs of the workflow and saves it to `answerfile_path`.\n Updates a .userchoices file at `userchoices_path` with new answers encountered in answerfile.\n\n :param answerfile_path: The path where to store the answer file.\n :param userchoices_path: The path where to store the .userchoices file.\n :return: None\n ' self._answer_store.generate(self._dialogs, answerfile_path) self._answer_store.update(userchoices_path, allow_missing=True)<|docstring|>Generates an answer file for the dialogs of the workflow and saves it to `answerfile_path`. Updates a .userchoices file at `userchoices_path` with new answers encountered in answerfile. :param answerfile_path: The path where to store the answer file. :param userchoices_path: The path where to store the .userchoices file. :return: None<|endoftext|>
1281063758130c730878da9456994a5dc1970a4e04cb6eb443e815ee4a880630
def __init__(self, logger=None, auto_reboot=False): '\n :param logger: Optional logger to be used instead of leapp.workflow\n :type logger: Instance of :py:class:`logging.Logger`\n ' self.log = (logger or logging.getLogger('leapp')).getChild('workflow') self._errors = [] self._all_consumed = set() self._all_produced = set() self._initial = set() self._phase_actors = [] self._experimental_whitelist = set() self._auto_reboot = auto_reboot self._unhandled_exception = False self._answer_store = AnswerStore() self._dialogs = [] self._stop_after_phase_requested = False if self.configuration: config_actors = [actor for actor in self.tag.actors if (self.configuration in actor.produces)] if config_actors: if (len(config_actors) == 1): self._phase_actors.append((_ConfigPhase, PhaseActors((), 'Before'), PhaseActors(tuple(config_actors), 'Main'), PhaseActors((), 'After'))) else: config_actor_names = [a.name for a in config_actors] raise MultipleConfigActorsError(config_actor_names) self.description = (self.description or type(self).__doc__) for phase in self.phases: phase.filter.tags += (self.tag,) self._phase_actors.append((phase, self._apply_phase(phase.filter.get_before(), 'Before'), self._apply_phase(phase.filter.get(), 'Main'), self._apply_phase(phase.filter.get_after(), 'After')))
:param logger: Optional logger to be used instead of leapp.workflow :type logger: Instance of :py:class:`logging.Logger`
leapp/workflows/__init__.py
__init__
dhodovsk/leapp
29
python
def __init__(self, logger=None, auto_reboot=False): '\n :param logger: Optional logger to be used instead of leapp.workflow\n :type logger: Instance of :py:class:`logging.Logger`\n ' self.log = (logger or logging.getLogger('leapp')).getChild('workflow') self._errors = [] self._all_consumed = set() self._all_produced = set() self._initial = set() self._phase_actors = [] self._experimental_whitelist = set() self._auto_reboot = auto_reboot self._unhandled_exception = False self._answer_store = AnswerStore() self._dialogs = [] self._stop_after_phase_requested = False if self.configuration: config_actors = [actor for actor in self.tag.actors if (self.configuration in actor.produces)] if config_actors: if (len(config_actors) == 1): self._phase_actors.append((_ConfigPhase, PhaseActors((), 'Before'), PhaseActors(tuple(config_actors), 'Main'), PhaseActors((), 'After'))) else: config_actor_names = [a.name for a in config_actors] raise MultipleConfigActorsError(config_actor_names) self.description = (self.description or type(self).__doc__) for phase in self.phases: phase.filter.tags += (self.tag,) self._phase_actors.append((phase, self._apply_phase(phase.filter.get_before(), 'Before'), self._apply_phase(phase.filter.get(), 'Main'), self._apply_phase(phase.filter.get_after(), 'After')))
def __init__(self, logger=None, auto_reboot=False): '\n :param logger: Optional logger to be used instead of leapp.workflow\n :type logger: Instance of :py:class:`logging.Logger`\n ' self.log = (logger or logging.getLogger('leapp')).getChild('workflow') self._errors = [] self._all_consumed = set() self._all_produced = set() self._initial = set() self._phase_actors = [] self._experimental_whitelist = set() self._auto_reboot = auto_reboot self._unhandled_exception = False self._answer_store = AnswerStore() self._dialogs = [] self._stop_after_phase_requested = False if self.configuration: config_actors = [actor for actor in self.tag.actors if (self.configuration in actor.produces)] if config_actors: if (len(config_actors) == 1): self._phase_actors.append((_ConfigPhase, PhaseActors((), 'Before'), PhaseActors(tuple(config_actors), 'Main'), PhaseActors((), 'After'))) else: config_actor_names = [a.name for a in config_actors] raise MultipleConfigActorsError(config_actor_names) self.description = (self.description or type(self).__doc__) for phase in self.phases: phase.filter.tags += (self.tag,) self._phase_actors.append((phase, self._apply_phase(phase.filter.get_before(), 'Before'), self._apply_phase(phase.filter.get(), 'Main'), self._apply_phase(phase.filter.get_after(), 'After')))<|docstring|>:param logger: Optional logger to be used instead of leapp.workflow :type logger: Instance of :py:class:`logging.Logger`<|endoftext|>
09b63743cf16811ebef62d82dc31a1d09e57afbd51390e489c671d64f14ed6c9
@property def experimental_whitelist(self): ' Whitelist of actors that may be executed even that they are marked experimental ' return self._experimental_whitelist
Whitelist of actors that may be executed even that they are marked experimental
leapp/workflows/__init__.py
experimental_whitelist
dhodovsk/leapp
29
python
@property def experimental_whitelist(self): ' ' return self._experimental_whitelist
@property def experimental_whitelist(self): ' ' return self._experimental_whitelist<|docstring|>Whitelist of actors that may be executed even that they are marked experimental<|endoftext|>
c555953f5698fed83d4cff3fd0cf069196f4f28a2b0fc8c1329bad4359df61cb
def whitelist_experimental_actor(self, actor): '\n Adds an actor to the experimental whitelist and allows them to be executed.\n\n :param actor: Actor to be whitelisted\n :type actor: class derived from py:class:`leapp.actors.Actor`\n :return: None\n ' if actor: self._experimental_whitelist.add(actor)
Adds an actor to the experimental whitelist and allows them to be executed. :param actor: Actor to be whitelisted :type actor: class derived from py:class:`leapp.actors.Actor` :return: None
leapp/workflows/__init__.py
whitelist_experimental_actor
dhodovsk/leapp
29
python
def whitelist_experimental_actor(self, actor): '\n Adds an actor to the experimental whitelist and allows them to be executed.\n\n :param actor: Actor to be whitelisted\n :type actor: class derived from py:class:`leapp.actors.Actor`\n :return: None\n ' if actor: self._experimental_whitelist.add(actor)
def whitelist_experimental_actor(self, actor): '\n Adds an actor to the experimental whitelist and allows them to be executed.\n\n :param actor: Actor to be whitelisted\n :type actor: class derived from py:class:`leapp.actors.Actor`\n :return: None\n ' if actor: self._experimental_whitelist.add(actor)<|docstring|>Adds an actor to the experimental whitelist and allows them to be executed. :param actor: Actor to be whitelisted :type actor: class derived from py:class:`leapp.actors.Actor` :return: None<|endoftext|>
5f9863b204a771499aef75af4075139b74d6c4de50baceb6425abe45da8c135e
@property def phase_actors(self): ' Return all actors for the phase ' return self._phase_actors
Return all actors for the phase
leapp/workflows/__init__.py
phase_actors
dhodovsk/leapp
29
python
@property def phase_actors(self): ' ' return self._phase_actors
@property def phase_actors(self): ' ' return self._phase_actors<|docstring|>Return all actors for the phase<|endoftext|>
c1ade9f6f3782fa28863bc503be4ce3c7c55ae5a036a9ddfad5032190f534d95
@property def initial(self): ' Initial messages required ' return self._initial
Initial messages required
leapp/workflows/__init__.py
initial
dhodovsk/leapp
29
python
@property def initial(self): ' ' return self._initial
@property def initial(self): ' ' return self._initial<|docstring|>Initial messages required<|endoftext|>
ca91c56e12430dbace882b35d965d88c047f760bd19ba9162d8e3803b1b0173e
@property def consumes(self): ' All consumed messages ' return self._all_consumed
All consumed messages
leapp/workflows/__init__.py
consumes
dhodovsk/leapp
29
python
@property def consumes(self): ' ' return self._all_consumed
@property def consumes(self): ' ' return self._all_consumed<|docstring|>All consumed messages<|endoftext|>
585c1a6fabfd895919d3101af0911dbe91ab0c622dd1295548fbd5d4ef3d70d1
@property def produces(self): ' All produced messages ' return self._all_produced
All produced messages
leapp/workflows/__init__.py
produces
dhodovsk/leapp
29
python
@property def produces(self): ' ' return self._all_produced
@property def produces(self): ' ' return self._all_produced<|docstring|>All produced messages<|endoftext|>
7acdc7d49ceddc12846750e0060b3a19f4ba3c5fb06586b3e4888e8d957b71c3
@property def dialogs(self): ' All encountered dialogs ' return self._dialogs
All encountered dialogs
leapp/workflows/__init__.py
dialogs
dhodovsk/leapp
29
python
@property def dialogs(self): ' ' return self._dialogs
@property def dialogs(self): ' ' return self._dialogs<|docstring|>All encountered dialogs<|endoftext|>
440645e8a05ec27cd125d13e978caeb326e706a4a8ef1c68885c0747406c59d5
@classmethod def serialize(cls): '\n :return: Serialized form of the workflow\n ' return {'name': cls.name, 'short_name': cls.short_name, 'tag': cls.tag.__name__, 'description': cls.description, 'phases': [phase.serialize() for phase in cls.phases]}
:return: Serialized form of the workflow
leapp/workflows/__init__.py
serialize
dhodovsk/leapp
29
python
@classmethod def serialize(cls): '\n \n ' return {'name': cls.name, 'short_name': cls.short_name, 'tag': cls.tag.__name__, 'description': cls.description, 'phases': [phase.serialize() for phase in cls.phases]}
@classmethod def serialize(cls): '\n \n ' return {'name': cls.name, 'short_name': cls.short_name, 'tag': cls.tag.__name__, 'description': cls.description, 'phases': [phase.serialize() for phase in cls.phases]}<|docstring|>:return: Serialized form of the workflow<|endoftext|>
6ced3092e52fee6add8f207f2f447d9d5cb67c392c7c08cc876037e362c83c00
def run(self, context=None, until_phase=None, until_actor=None, skip_phases_until=None, skip_dialogs=False, only_with_tags=None): "\n Executes the workflow\n\n :param context: Custom execution ID to be used instead of a randomly generated UUIDv4\n :type context: str\n :param until_phase: Specify until including which phase the execution should run - phase.stage can be used to\n control it even more granularly. `phase` is any phase name where `stage` refers to `main`,\n `before` or `after`. If no stage is defined, `after` is assumed to be the default value.\n The execution ends when this phase (and stage, if specified) has been executed.\n :type until_phase: str\n :param until_actor: The execution finishes when this actor has been executed.\n :type until_actor: str\n :param skip_phases_until: Skips all phases until including the phase specified, and then continues the\n execution.\n :type skip_phases_until: str or None\n :param skip_dialogs: Inform actors about the mode of dialogs processing. If skip_dialogs is set to True it\n means that dialogs can't be processed in the current workflow run interactively and\n every attempted call of get_answers api method will be non-blocking, returning an empty\n dict if no user choice was found in answerfile or a selected option otherwise.\n If skip_dialogs is set to False then in case of absent recorded answer the dialog will\n be rendered in a blocking user input requiring way.\n The value of skip_dialogs will be passed to the actors that can theoretically use it for\n their purposes.\n :type skip_dialogs: bool\n :param only_with_tags: Executes only actors with the given tag, any other actor is going to get skipped.\n :type only_with_tags: List[str]\n\n " context = (context or str(uuid.uuid4())) os.environ['LEAPP_EXECUTION_ID'] = context if (not os.environ.get('LEAPP_HOSTNAME', None)): os.environ['LEAPP_HOSTNAME'] = socket.getfqdn() self.log.info('Starting workflow execution: {name} - ID: {id}'.format(name=self.name, id=os.environ['LEAPP_EXECUTION_ID'])) skip_phases_until = (skip_phases_until or '').lower() needle_phase = (until_phase or '') needle_stage = None if ('.' in needle_phase): (needle_phase, needle_stage) = needle_phase.split('.', 1) needle_phase = needle_phase.lower() needle_stage = (needle_stage or '').lower() needle_actor = (until_actor or '').lower() self._errors = get_errors(context) config_model = type(self).configuration for phase in (skip_phases_until, needle_phase): if (phase and (not self.is_valid_phase(phase))): raise CommandError('Phase {phase} does not exist in the workflow'.format(phase=phase)) self._stop_after_phase_requested = False for phase in self._phase_actors: os.environ['LEAPP_CURRENT_PHASE'] = phase[0].name if skip_phases_until: if (skip_phases_until in phase_names(phase)): skip_phases_until = '' self.log.info('Skipping phase {name}'.format(name=phase[0].name)) continue display_status_current_phase(phase) self.log.info('Starting phase {name}'.format(name=phase[0].name)) current_logger = self.log.getChild(phase[0].name) early_finish = False for stage in phase[1:]: if early_finish: return current_logger.info('Starting stage {stage} of phase {phase}'.format(phase=phase[0].name, stage=stage.stage)) for actor in stage.actors: if early_finish: return designation = '' if (ExperimentalTag in actor.tags): designation = '[EXPERIMENTAL]' if (actor not in self.experimental_whitelist): current_logger.info('Skipping experimental actor {actor}'.format(actor=actor.name)) continue if (only_with_tags and (not contains_tag(only_with_tags, actor.tags))): current_logger.info('Actor {actor} does not contain any required tag. Skipping.'.format(actor=actor.name)) continue display_status_current_actor(actor, designation=designation) current_logger.info('Executing actor {actor} {designation}'.format(designation=designation, actor=actor.name)) messaging = InProcessMessaging(config_model=config_model, answer_store=self._answer_store) messaging.load(actor.consumes) instance = actor(logger=current_logger, messaging=messaging, config_model=config_model, skip_dialogs=skip_dialogs) try: instance.run() except BaseException: self._unhandled_exception = True raise self._stop_after_phase_requested = (messaging.stop_after_phase or self._stop_after_phase_requested) self._dialogs.extend(messaging.dialogs()) if messaging.errors(): self._errors.extend(messaging.errors()) if (phase[0].policies.error is Policies.Errors.FailImmediately): self.log.info('Workflow interrupted due to FailImmediately error policy') early_finish = True break for command in messaging.commands: if (command['command'] == SkipPhasesUntilCommand.COMMAND): skip_phases_until = command['arguments']['until_phase'] self.log.info('SkipPhasesUntilCommand received. Skipping phases until {}'.format(skip_phases_until)) checkpoint(actor=actor.name, phase=phase[0].name, context=context, hostname=os.environ['LEAPP_HOSTNAME']) if (needle_actor in actor_names(actor)): self.log.info('Workflow finished due to the until-actor flag') early_finish = True break if (not stage.actors): checkpoint(actor='', phase=((phase[0].name + '.') + stage.stage), context=context, hostname=os.environ['LEAPP_HOSTNAME']) if ((needle_phase in phase_names(phase)) and (needle_stage == stage.stage.lower())): self.log.info('Workflow finished due to the until-phase flag') early_finish = True break checkpoint(actor='', phase=phase[0].name, context=context, hostname=os.environ['LEAPP_HOSTNAME']) if (self._errors and (phase[0].policies.error is Policies.Errors.FailPhase)): self.log.info('Workflow interrupted due to the FailPhase error policy') early_finish = True elif (needle_phase in phase_names(phase)): self.log.info('Workflow finished due to the until-phase flag') early_finish = True elif self._stop_after_phase_requested: self.log.info('Workflow received request to stop after phase.') early_finish = True elif (phase[0].flags.request_restart_after_phase or phase[0].flags.restart_after_phase): reboot = True if (phase[0].flags.request_restart_after_phase and (not self._auto_reboot)): reboot = False messaging.request_answers(RawMessageDialog(message='A reboot is required to continue. Please reboot your system.')) if reboot: self.log.info('Initiating system reboot due to the restart_after_reboot flag') reboot_system() early_finish = True elif phase[0].flags.is_checkpoint: self.log.info('Stopping the workflow execution due to the is_checkpoint flag') early_finish = True if early_finish: return
Executes the workflow :param context: Custom execution ID to be used instead of a randomly generated UUIDv4 :type context: str :param until_phase: Specify until including which phase the execution should run - phase.stage can be used to control it even more granularly. `phase` is any phase name where `stage` refers to `main`, `before` or `after`. If no stage is defined, `after` is assumed to be the default value. The execution ends when this phase (and stage, if specified) has been executed. :type until_phase: str :param until_actor: The execution finishes when this actor has been executed. :type until_actor: str :param skip_phases_until: Skips all phases until including the phase specified, and then continues the execution. :type skip_phases_until: str or None :param skip_dialogs: Inform actors about the mode of dialogs processing. If skip_dialogs is set to True it means that dialogs can't be processed in the current workflow run interactively and every attempted call of get_answers api method will be non-blocking, returning an empty dict if no user choice was found in answerfile or a selected option otherwise. If skip_dialogs is set to False then in case of absent recorded answer the dialog will be rendered in a blocking user input requiring way. The value of skip_dialogs will be passed to the actors that can theoretically use it for their purposes. :type skip_dialogs: bool :param only_with_tags: Executes only actors with the given tag, any other actor is going to get skipped. :type only_with_tags: List[str]
leapp/workflows/__init__.py
run
dhodovsk/leapp
29
python
def run(self, context=None, until_phase=None, until_actor=None, skip_phases_until=None, skip_dialogs=False, only_with_tags=None): "\n Executes the workflow\n\n :param context: Custom execution ID to be used instead of a randomly generated UUIDv4\n :type context: str\n :param until_phase: Specify until including which phase the execution should run - phase.stage can be used to\n control it even more granularly. `phase` is any phase name where `stage` refers to `main`,\n `before` or `after`. If no stage is defined, `after` is assumed to be the default value.\n The execution ends when this phase (and stage, if specified) has been executed.\n :type until_phase: str\n :param until_actor: The execution finishes when this actor has been executed.\n :type until_actor: str\n :param skip_phases_until: Skips all phases until including the phase specified, and then continues the\n execution.\n :type skip_phases_until: str or None\n :param skip_dialogs: Inform actors about the mode of dialogs processing. If skip_dialogs is set to True it\n means that dialogs can't be processed in the current workflow run interactively and\n every attempted call of get_answers api method will be non-blocking, returning an empty\n dict if no user choice was found in answerfile or a selected option otherwise.\n If skip_dialogs is set to False then in case of absent recorded answer the dialog will\n be rendered in a blocking user input requiring way.\n The value of skip_dialogs will be passed to the actors that can theoretically use it for\n their purposes.\n :type skip_dialogs: bool\n :param only_with_tags: Executes only actors with the given tag, any other actor is going to get skipped.\n :type only_with_tags: List[str]\n\n " context = (context or str(uuid.uuid4())) os.environ['LEAPP_EXECUTION_ID'] = context if (not os.environ.get('LEAPP_HOSTNAME', None)): os.environ['LEAPP_HOSTNAME'] = socket.getfqdn() self.log.info('Starting workflow execution: {name} - ID: {id}'.format(name=self.name, id=os.environ['LEAPP_EXECUTION_ID'])) skip_phases_until = (skip_phases_until or ).lower() needle_phase = (until_phase or ) needle_stage = None if ('.' in needle_phase): (needle_phase, needle_stage) = needle_phase.split('.', 1) needle_phase = needle_phase.lower() needle_stage = (needle_stage or ).lower() needle_actor = (until_actor or ).lower() self._errors = get_errors(context) config_model = type(self).configuration for phase in (skip_phases_until, needle_phase): if (phase and (not self.is_valid_phase(phase))): raise CommandError('Phase {phase} does not exist in the workflow'.format(phase=phase)) self._stop_after_phase_requested = False for phase in self._phase_actors: os.environ['LEAPP_CURRENT_PHASE'] = phase[0].name if skip_phases_until: if (skip_phases_until in phase_names(phase)): skip_phases_until = self.log.info('Skipping phase {name}'.format(name=phase[0].name)) continue display_status_current_phase(phase) self.log.info('Starting phase {name}'.format(name=phase[0].name)) current_logger = self.log.getChild(phase[0].name) early_finish = False for stage in phase[1:]: if early_finish: return current_logger.info('Starting stage {stage} of phase {phase}'.format(phase=phase[0].name, stage=stage.stage)) for actor in stage.actors: if early_finish: return designation = if (ExperimentalTag in actor.tags): designation = '[EXPERIMENTAL]' if (actor not in self.experimental_whitelist): current_logger.info('Skipping experimental actor {actor}'.format(actor=actor.name)) continue if (only_with_tags and (not contains_tag(only_with_tags, actor.tags))): current_logger.info('Actor {actor} does not contain any required tag. Skipping.'.format(actor=actor.name)) continue display_status_current_actor(actor, designation=designation) current_logger.info('Executing actor {actor} {designation}'.format(designation=designation, actor=actor.name)) messaging = InProcessMessaging(config_model=config_model, answer_store=self._answer_store) messaging.load(actor.consumes) instance = actor(logger=current_logger, messaging=messaging, config_model=config_model, skip_dialogs=skip_dialogs) try: instance.run() except BaseException: self._unhandled_exception = True raise self._stop_after_phase_requested = (messaging.stop_after_phase or self._stop_after_phase_requested) self._dialogs.extend(messaging.dialogs()) if messaging.errors(): self._errors.extend(messaging.errors()) if (phase[0].policies.error is Policies.Errors.FailImmediately): self.log.info('Workflow interrupted due to FailImmediately error policy') early_finish = True break for command in messaging.commands: if (command['command'] == SkipPhasesUntilCommand.COMMAND): skip_phases_until = command['arguments']['until_phase'] self.log.info('SkipPhasesUntilCommand received. Skipping phases until {}'.format(skip_phases_until)) checkpoint(actor=actor.name, phase=phase[0].name, context=context, hostname=os.environ['LEAPP_HOSTNAME']) if (needle_actor in actor_names(actor)): self.log.info('Workflow finished due to the until-actor flag') early_finish = True break if (not stage.actors): checkpoint(actor=, phase=((phase[0].name + '.') + stage.stage), context=context, hostname=os.environ['LEAPP_HOSTNAME']) if ((needle_phase in phase_names(phase)) and (needle_stage == stage.stage.lower())): self.log.info('Workflow finished due to the until-phase flag') early_finish = True break checkpoint(actor=, phase=phase[0].name, context=context, hostname=os.environ['LEAPP_HOSTNAME']) if (self._errors and (phase[0].policies.error is Policies.Errors.FailPhase)): self.log.info('Workflow interrupted due to the FailPhase error policy') early_finish = True elif (needle_phase in phase_names(phase)): self.log.info('Workflow finished due to the until-phase flag') early_finish = True elif self._stop_after_phase_requested: self.log.info('Workflow received request to stop after phase.') early_finish = True elif (phase[0].flags.request_restart_after_phase or phase[0].flags.restart_after_phase): reboot = True if (phase[0].flags.request_restart_after_phase and (not self._auto_reboot)): reboot = False messaging.request_answers(RawMessageDialog(message='A reboot is required to continue. Please reboot your system.')) if reboot: self.log.info('Initiating system reboot due to the restart_after_reboot flag') reboot_system() early_finish = True elif phase[0].flags.is_checkpoint: self.log.info('Stopping the workflow execution due to the is_checkpoint flag') early_finish = True if early_finish: return
def run(self, context=None, until_phase=None, until_actor=None, skip_phases_until=None, skip_dialogs=False, only_with_tags=None): "\n Executes the workflow\n\n :param context: Custom execution ID to be used instead of a randomly generated UUIDv4\n :type context: str\n :param until_phase: Specify until including which phase the execution should run - phase.stage can be used to\n control it even more granularly. `phase` is any phase name where `stage` refers to `main`,\n `before` or `after`. If no stage is defined, `after` is assumed to be the default value.\n The execution ends when this phase (and stage, if specified) has been executed.\n :type until_phase: str\n :param until_actor: The execution finishes when this actor has been executed.\n :type until_actor: str\n :param skip_phases_until: Skips all phases until including the phase specified, and then continues the\n execution.\n :type skip_phases_until: str or None\n :param skip_dialogs: Inform actors about the mode of dialogs processing. If skip_dialogs is set to True it\n means that dialogs can't be processed in the current workflow run interactively and\n every attempted call of get_answers api method will be non-blocking, returning an empty\n dict if no user choice was found in answerfile or a selected option otherwise.\n If skip_dialogs is set to False then in case of absent recorded answer the dialog will\n be rendered in a blocking user input requiring way.\n The value of skip_dialogs will be passed to the actors that can theoretically use it for\n their purposes.\n :type skip_dialogs: bool\n :param only_with_tags: Executes only actors with the given tag, any other actor is going to get skipped.\n :type only_with_tags: List[str]\n\n " context = (context or str(uuid.uuid4())) os.environ['LEAPP_EXECUTION_ID'] = context if (not os.environ.get('LEAPP_HOSTNAME', None)): os.environ['LEAPP_HOSTNAME'] = socket.getfqdn() self.log.info('Starting workflow execution: {name} - ID: {id}'.format(name=self.name, id=os.environ['LEAPP_EXECUTION_ID'])) skip_phases_until = (skip_phases_until or ).lower() needle_phase = (until_phase or ) needle_stage = None if ('.' in needle_phase): (needle_phase, needle_stage) = needle_phase.split('.', 1) needle_phase = needle_phase.lower() needle_stage = (needle_stage or ).lower() needle_actor = (until_actor or ).lower() self._errors = get_errors(context) config_model = type(self).configuration for phase in (skip_phases_until, needle_phase): if (phase and (not self.is_valid_phase(phase))): raise CommandError('Phase {phase} does not exist in the workflow'.format(phase=phase)) self._stop_after_phase_requested = False for phase in self._phase_actors: os.environ['LEAPP_CURRENT_PHASE'] = phase[0].name if skip_phases_until: if (skip_phases_until in phase_names(phase)): skip_phases_until = self.log.info('Skipping phase {name}'.format(name=phase[0].name)) continue display_status_current_phase(phase) self.log.info('Starting phase {name}'.format(name=phase[0].name)) current_logger = self.log.getChild(phase[0].name) early_finish = False for stage in phase[1:]: if early_finish: return current_logger.info('Starting stage {stage} of phase {phase}'.format(phase=phase[0].name, stage=stage.stage)) for actor in stage.actors: if early_finish: return designation = if (ExperimentalTag in actor.tags): designation = '[EXPERIMENTAL]' if (actor not in self.experimental_whitelist): current_logger.info('Skipping experimental actor {actor}'.format(actor=actor.name)) continue if (only_with_tags and (not contains_tag(only_with_tags, actor.tags))): current_logger.info('Actor {actor} does not contain any required tag. Skipping.'.format(actor=actor.name)) continue display_status_current_actor(actor, designation=designation) current_logger.info('Executing actor {actor} {designation}'.format(designation=designation, actor=actor.name)) messaging = InProcessMessaging(config_model=config_model, answer_store=self._answer_store) messaging.load(actor.consumes) instance = actor(logger=current_logger, messaging=messaging, config_model=config_model, skip_dialogs=skip_dialogs) try: instance.run() except BaseException: self._unhandled_exception = True raise self._stop_after_phase_requested = (messaging.stop_after_phase or self._stop_after_phase_requested) self._dialogs.extend(messaging.dialogs()) if messaging.errors(): self._errors.extend(messaging.errors()) if (phase[0].policies.error is Policies.Errors.FailImmediately): self.log.info('Workflow interrupted due to FailImmediately error policy') early_finish = True break for command in messaging.commands: if (command['command'] == SkipPhasesUntilCommand.COMMAND): skip_phases_until = command['arguments']['until_phase'] self.log.info('SkipPhasesUntilCommand received. Skipping phases until {}'.format(skip_phases_until)) checkpoint(actor=actor.name, phase=phase[0].name, context=context, hostname=os.environ['LEAPP_HOSTNAME']) if (needle_actor in actor_names(actor)): self.log.info('Workflow finished due to the until-actor flag') early_finish = True break if (not stage.actors): checkpoint(actor=, phase=((phase[0].name + '.') + stage.stage), context=context, hostname=os.environ['LEAPP_HOSTNAME']) if ((needle_phase in phase_names(phase)) and (needle_stage == stage.stage.lower())): self.log.info('Workflow finished due to the until-phase flag') early_finish = True break checkpoint(actor=, phase=phase[0].name, context=context, hostname=os.environ['LEAPP_HOSTNAME']) if (self._errors and (phase[0].policies.error is Policies.Errors.FailPhase)): self.log.info('Workflow interrupted due to the FailPhase error policy') early_finish = True elif (needle_phase in phase_names(phase)): self.log.info('Workflow finished due to the until-phase flag') early_finish = True elif self._stop_after_phase_requested: self.log.info('Workflow received request to stop after phase.') early_finish = True elif (phase[0].flags.request_restart_after_phase or phase[0].flags.restart_after_phase): reboot = True if (phase[0].flags.request_restart_after_phase and (not self._auto_reboot)): reboot = False messaging.request_answers(RawMessageDialog(message='A reboot is required to continue. Please reboot your system.')) if reboot: self.log.info('Initiating system reboot due to the restart_after_reboot flag') reboot_system() early_finish = True elif phase[0].flags.is_checkpoint: self.log.info('Stopping the workflow execution due to the is_checkpoint flag') early_finish = True if early_finish: return<|docstring|>Executes the workflow :param context: Custom execution ID to be used instead of a randomly generated UUIDv4 :type context: str :param until_phase: Specify until including which phase the execution should run - phase.stage can be used to control it even more granularly. `phase` is any phase name where `stage` refers to `main`, `before` or `after`. If no stage is defined, `after` is assumed to be the default value. The execution ends when this phase (and stage, if specified) has been executed. :type until_phase: str :param until_actor: The execution finishes when this actor has been executed. :type until_actor: str :param skip_phases_until: Skips all phases until including the phase specified, and then continues the execution. :type skip_phases_until: str or None :param skip_dialogs: Inform actors about the mode of dialogs processing. If skip_dialogs is set to True it means that dialogs can't be processed in the current workflow run interactively and every attempted call of get_answers api method will be non-blocking, returning an empty dict if no user choice was found in answerfile or a selected option otherwise. If skip_dialogs is set to False then in case of absent recorded answer the dialog will be rendered in a blocking user input requiring way. The value of skip_dialogs will be passed to the actors that can theoretically use it for their purposes. :type skip_dialogs: bool :param only_with_tags: Executes only actors with the given tag, any other actor is going to get skipped. :type only_with_tags: List[str]<|endoftext|>
46640ae2ca4d427f48e8ceed212991615c2912433fd94ab72c9c45c15e8a297f
def build_time_range(from_str=None, to_str=None, ago=1): ' This function expect human-formatted user input with a combination of\n - end date, e.g.: 2018-09-21 (default is today)\n - start date, e.g.: 2018-09-01 (default is today - nb of days ago)\n - number of days ago, e.g.: 30 (default is DEFAULT_NB_DAYS_AGO)\n ' end_date = parse_date(to_str) if (from_str is None): start_date = (end_date - timedelta(ago)) else: start_date = parse_date(from_str) if (start_date > end_date): raise ValueError('Please provide a starting date before the end date') start_time = datetime.combine(start_date, datetime.min.time()) end_time = datetime.combine(end_date, datetime.max.time()) return (start_time, end_time)
This function expect human-formatted user input with a combination of - end date, e.g.: 2018-09-21 (default is today) - start date, e.g.: 2018-09-01 (default is today - nb of days ago) - number of days ago, e.g.: 30 (default is DEFAULT_NB_DAYS_AGO)
src/utils/dates.py
build_time_range
ebreton/pybootstrap
2
python
def build_time_range(from_str=None, to_str=None, ago=1): ' This function expect human-formatted user input with a combination of\n - end date, e.g.: 2018-09-21 (default is today)\n - start date, e.g.: 2018-09-01 (default is today - nb of days ago)\n - number of days ago, e.g.: 30 (default is DEFAULT_NB_DAYS_AGO)\n ' end_date = parse_date(to_str) if (from_str is None): start_date = (end_date - timedelta(ago)) else: start_date = parse_date(from_str) if (start_date > end_date): raise ValueError('Please provide a starting date before the end date') start_time = datetime.combine(start_date, datetime.min.time()) end_time = datetime.combine(end_date, datetime.max.time()) return (start_time, end_time)
def build_time_range(from_str=None, to_str=None, ago=1): ' This function expect human-formatted user input with a combination of\n - end date, e.g.: 2018-09-21 (default is today)\n - start date, e.g.: 2018-09-01 (default is today - nb of days ago)\n - number of days ago, e.g.: 30 (default is DEFAULT_NB_DAYS_AGO)\n ' end_date = parse_date(to_str) if (from_str is None): start_date = (end_date - timedelta(ago)) else: start_date = parse_date(from_str) if (start_date > end_date): raise ValueError('Please provide a starting date before the end date') start_time = datetime.combine(start_date, datetime.min.time()) end_time = datetime.combine(end_date, datetime.max.time()) return (start_time, end_time)<|docstring|>This function expect human-formatted user input with a combination of - end date, e.g.: 2018-09-21 (default is today) - start date, e.g.: 2018-09-01 (default is today - nb of days ago) - number of days ago, e.g.: 30 (default is DEFAULT_NB_DAYS_AGO)<|endoftext|>
05d9f669edade7ae94fbf431f273c7d3b46574d41521c540103d40418531a452
def test_retry_doesnt_work_for_other_exceptions(self): '\n db_retry() takes care only about OperationalError exception.\n Other exceptions re-raised immediately \n ' hit = [0] @ddr.db_retry(tries=100, delay=1) def decorated_func(): hit[0] += 1 (1 / 0) with self.assertRaises(ZeroDivisionError): decorated_func() self.assertEqual(hit[0], 1)
db_retry() takes care only about OperationalError exception. Other exceptions re-raised immediately
django_dbconn_retry/tests/test_decorator.py
test_retry_doesnt_work_for_other_exceptions
baffolobill/django-dbconn-retry
0
python
def test_retry_doesnt_work_for_other_exceptions(self): '\n db_retry() takes care only about OperationalError exception.\n Other exceptions re-raised immediately \n ' hit = [0] @ddr.db_retry(tries=100, delay=1) def decorated_func(): hit[0] += 1 (1 / 0) with self.assertRaises(ZeroDivisionError): decorated_func() self.assertEqual(hit[0], 1)
def test_retry_doesnt_work_for_other_exceptions(self): '\n db_retry() takes care only about OperationalError exception.\n Other exceptions re-raised immediately \n ' hit = [0] @ddr.db_retry(tries=100, delay=1) def decorated_func(): hit[0] += 1 (1 / 0) with self.assertRaises(ZeroDivisionError): decorated_func() self.assertEqual(hit[0], 1)<|docstring|>db_retry() takes care only about OperationalError exception. Other exceptions re-raised immediately<|endoftext|>
368190b2c922c939aceab4769b441c3e2235fe181a489c8765f2428281508be6
def maxSubArray(self, nums): ' 动态规划问题。写出来状态转移方程。\n dp[i] = max(dp[i-1]+nums[i], dp[i])\n :type nums: List[int]\n :rtype: int\n ' if (not nums): return 0 size = len(nums) max_list = ([0] * size) for i in range(size): if (i == 0): max_list[i] = nums[i] else: max_list[i] = max((nums[i] + max_list[(i - 1)]), nums[i]) return max(max_list)
动态规划问题。写出来状态转移方程。 dp[i] = max(dp[i-1]+nums[i], dp[i]) :type nums: List[int] :rtype: int
剑指offer/31_GreatestSumOfSubarrays(连续子数组最大和).py
maxSubArray
PegasusWang/python_data_structures_and_algorithms
2,468
python
def maxSubArray(self, nums): ' 动态规划问题。写出来状态转移方程。\n dp[i] = max(dp[i-1]+nums[i], dp[i])\n :type nums: List[int]\n :rtype: int\n ' if (not nums): return 0 size = len(nums) max_list = ([0] * size) for i in range(size): if (i == 0): max_list[i] = nums[i] else: max_list[i] = max((nums[i] + max_list[(i - 1)]), nums[i]) return max(max_list)
def maxSubArray(self, nums): ' 动态规划问题。写出来状态转移方程。\n dp[i] = max(dp[i-1]+nums[i], dp[i])\n :type nums: List[int]\n :rtype: int\n ' if (not nums): return 0 size = len(nums) max_list = ([0] * size) for i in range(size): if (i == 0): max_list[i] = nums[i] else: max_list[i] = max((nums[i] + max_list[(i - 1)]), nums[i]) return max(max_list)<|docstring|>动态规划问题。写出来状态转移方程。 dp[i] = max(dp[i-1]+nums[i], dp[i]) :type nums: List[int] :rtype: int<|endoftext|>
b8823b69d4556f51b59af3d25d8e40fd959f509b8cb0180168885a52a41db1a9
def detectCycle(self, head: ListNode) -> ListNode: "\n Given a linked list, return the node where the cycle begins. If there is no cycle, return null.\n\n There is a cycle in a linked list if there is some node in the list that can be reached again\n by continuously following the next pointer. Internally, pos is used to denote the index of the\n node that tail's next pointer is connected to. Note that pos is not passed as a parameter.\n \n " fast = head slow = fast while (fast and fast.next): fast = fast.next.next slow = slow.next if (fast == slow): fast = head while (fast != slow): fast = fast.next slow = slow.next return fast else: return None
Given a linked list, return the node where the cycle begins. If there is no cycle, return null. There is a cycle in a linked list if there is some node in the list that can be reached again by continuously following the next pointer. Internally, pos is used to denote the index of the node that tail's next pointer is connected to. Note that pos is not passed as a parameter.
python/detectCycle.py
detectCycle
l0latgithub/codediary
0
python
def detectCycle(self, head: ListNode) -> ListNode: "\n Given a linked list, return the node where the cycle begins. If there is no cycle, return null.\n\n There is a cycle in a linked list if there is some node in the list that can be reached again\n by continuously following the next pointer. Internally, pos is used to denote the index of the\n node that tail's next pointer is connected to. Note that pos is not passed as a parameter.\n \n " fast = head slow = fast while (fast and fast.next): fast = fast.next.next slow = slow.next if (fast == slow): fast = head while (fast != slow): fast = fast.next slow = slow.next return fast else: return None
def detectCycle(self, head: ListNode) -> ListNode: "\n Given a linked list, return the node where the cycle begins. If there is no cycle, return null.\n\n There is a cycle in a linked list if there is some node in the list that can be reached again\n by continuously following the next pointer. Internally, pos is used to denote the index of the\n node that tail's next pointer is connected to. Note that pos is not passed as a parameter.\n \n " fast = head slow = fast while (fast and fast.next): fast = fast.next.next slow = slow.next if (fast == slow): fast = head while (fast != slow): fast = fast.next slow = slow.next return fast else: return None<|docstring|>Given a linked list, return the node where the cycle begins. If there is no cycle, return null. There is a cycle in a linked list if there is some node in the list that can be reached again by continuously following the next pointer. Internally, pos is used to denote the index of the node that tail's next pointer is connected to. Note that pos is not passed as a parameter.<|endoftext|>
5491972d511e9b161fbd9743b52eece43fc4225b039907ef6f5714d9ada8a4b2
def get_association(id, **kwargs): '\n Fetch an association object by ID\n ' results = search_associations(id=id, **kwargs) assoc = (results['associations'][0] if (len(results['associations']) > 0) else {}) return assoc
Fetch an association object by ID
ontobio/golr/golr_associations.py
get_association
alliance-genome/ontobio
101
python
def get_association(id, **kwargs): '\n \n ' results = search_associations(id=id, **kwargs) assoc = (results['associations'][0] if (len(results['associations']) > 0) else {}) return assoc
def get_association(id, **kwargs): '\n \n ' results = search_associations(id=id, **kwargs) assoc = (results['associations'][0] if (len(results['associations']) > 0) else {}) return assoc<|docstring|>Fetch an association object by ID<|endoftext|>
ba6aa9f2470ddc7ab5202952c9410e473399d2a05aa4529ada619ffdfd15b97b
def search_associations(**kwargs): '\n Fetch a set of association objects based on a query.\n :rtype:\n ' logger.info('CREATING_GOLR_QUERY {}'.format(kwargs)) q = GolrAssociationQuery(**kwargs) return q.exec()
Fetch a set of association objects based on a query. :rtype:
ontobio/golr/golr_associations.py
search_associations
alliance-genome/ontobio
101
python
def search_associations(**kwargs): '\n Fetch a set of association objects based on a query.\n :rtype:\n ' logger.info('CREATING_GOLR_QUERY {}'.format(kwargs)) q = GolrAssociationQuery(**kwargs) return q.exec()
def search_associations(**kwargs): '\n Fetch a set of association objects based on a query.\n :rtype:\n ' logger.info('CREATING_GOLR_QUERY {}'.format(kwargs)) q = GolrAssociationQuery(**kwargs) return q.exec()<|docstring|>Fetch a set of association objects based on a query. :rtype:<|endoftext|>
90c7e705078927e471b9514db875c9b6fb262e0891da49bc429e89dc13de745e
def get_objects_for_subject(subject=None, object_category=None, relation=None, **kwargs): '\n Convenience method: Given a subject (e.g. gene, disease, variant), return all associated objects (phenotypes, functions, interacting genes, etc)\n ' searchresult = search_associations(subject=subject, fetch_objects=True, rows=0, object_category=object_category, relation=relation, **kwargs) objs = searchresult['objects'] return objs
Convenience method: Given a subject (e.g. gene, disease, variant), return all associated objects (phenotypes, functions, interacting genes, etc)
ontobio/golr/golr_associations.py
get_objects_for_subject
alliance-genome/ontobio
101
python
def get_objects_for_subject(subject=None, object_category=None, relation=None, **kwargs): '\n \n ' searchresult = search_associations(subject=subject, fetch_objects=True, rows=0, object_category=object_category, relation=relation, **kwargs) objs = searchresult['objects'] return objs
def get_objects_for_subject(subject=None, object_category=None, relation=None, **kwargs): '\n \n ' searchresult = search_associations(subject=subject, fetch_objects=True, rows=0, object_category=object_category, relation=relation, **kwargs) objs = searchresult['objects'] return objs<|docstring|>Convenience method: Given a subject (e.g. gene, disease, variant), return all associated objects (phenotypes, functions, interacting genes, etc)<|endoftext|>
010e1d61ecf67c4e76c32aff3e59f940d3f42028a9b0ceee5354968cb61fddad
def get_subjects_for_object(object=None, subject_category=None, subject_taxon=None, relation=None, **kwargs): '\n Convenience method: Given a object (e.g. ontology term like phenotype or GO; interacting gene; disease; pathway etc), return all associated subjects (genes, variants, pubs, etc)\n ' searchresult = search_associations(object=object, fetch_subjects=True, rows=0, subject_category=subject_category, subject_taxon=subject_taxon, relation=relation, **kwargs) subjs = searchresult['subjects'] return subjs
Convenience method: Given a object (e.g. ontology term like phenotype or GO; interacting gene; disease; pathway etc), return all associated subjects (genes, variants, pubs, etc)
ontobio/golr/golr_associations.py
get_subjects_for_object
alliance-genome/ontobio
101
python
def get_subjects_for_object(object=None, subject_category=None, subject_taxon=None, relation=None, **kwargs): '\n \n ' searchresult = search_associations(object=object, fetch_subjects=True, rows=0, subject_category=subject_category, subject_taxon=subject_taxon, relation=relation, **kwargs) subjs = searchresult['subjects'] return subjs
def get_subjects_for_object(object=None, subject_category=None, subject_taxon=None, relation=None, **kwargs): '\n \n ' searchresult = search_associations(object=object, fetch_subjects=True, rows=0, subject_category=subject_category, subject_taxon=subject_taxon, relation=relation, **kwargs) subjs = searchresult['subjects'] return subjs<|docstring|>Convenience method: Given a object (e.g. ontology term like phenotype or GO; interacting gene; disease; pathway etc), return all associated subjects (genes, variants, pubs, etc)<|endoftext|>
14f023bcf8ed86c6a672309c32f03403d0dc6a64be5fb90d6006035f1d0c182c
def search_associations_compact(**kwargs): '\n Convenience method: as for search associations, use compact\n ' searchresult = search_associations(use_compact_associations=True, facet_fields=[], **kwargs) return searchresult['compact_associations']
Convenience method: as for search associations, use compact
ontobio/golr/golr_associations.py
search_associations_compact
alliance-genome/ontobio
101
python
def search_associations_compact(**kwargs): '\n \n ' searchresult = search_associations(use_compact_associations=True, facet_fields=[], **kwargs) return searchresult['compact_associations']
def search_associations_compact(**kwargs): '\n \n ' searchresult = search_associations(use_compact_associations=True, facet_fields=[], **kwargs) return searchresult['compact_associations']<|docstring|>Convenience method: as for search associations, use compact<|endoftext|>
ff59ce6c0338876c2fdc72cf52461bf03ebdb516a7b3025e4a68910cd8be3422
def map2slim(subjects, slim, **kwargs): '\n Maps a set of subjects (e.g. genes) to a set of slims\n\n Result is a list of unique subject-class pairs, with\n a list of source assocations\n ' logger.info('SLIM SUBJECTS:{} SLIM:{} CAT:{}'.format(subjects, slim, kwargs.get('category'))) searchresult = search_associations(subjects=subjects, slim=slim, facet_fields=[], **kwargs) pmap = {} for a in searchresult['associations']: subj = a['subject']['id'] slimmed_terms = a['slim'] for t in slimmed_terms: k = (subj, t) if (k not in pmap): pmap[k] = [] pmap[k].append(a) results = [{'subject': subj, 'slim': t, 'assocs': assocs} for ((subj, t), assocs) in pmap.items()] return results
Maps a set of subjects (e.g. genes) to a set of slims Result is a list of unique subject-class pairs, with a list of source assocations
ontobio/golr/golr_associations.py
map2slim
alliance-genome/ontobio
101
python
def map2slim(subjects, slim, **kwargs): '\n Maps a set of subjects (e.g. genes) to a set of slims\n\n Result is a list of unique subject-class pairs, with\n a list of source assocations\n ' logger.info('SLIM SUBJECTS:{} SLIM:{} CAT:{}'.format(subjects, slim, kwargs.get('category'))) searchresult = search_associations(subjects=subjects, slim=slim, facet_fields=[], **kwargs) pmap = {} for a in searchresult['associations']: subj = a['subject']['id'] slimmed_terms = a['slim'] for t in slimmed_terms: k = (subj, t) if (k not in pmap): pmap[k] = [] pmap[k].append(a) results = [{'subject': subj, 'slim': t, 'assocs': assocs} for ((subj, t), assocs) in pmap.items()] return results
def map2slim(subjects, slim, **kwargs): '\n Maps a set of subjects (e.g. genes) to a set of slims\n\n Result is a list of unique subject-class pairs, with\n a list of source assocations\n ' logger.info('SLIM SUBJECTS:{} SLIM:{} CAT:{}'.format(subjects, slim, kwargs.get('category'))) searchresult = search_associations(subjects=subjects, slim=slim, facet_fields=[], **kwargs) pmap = {} for a in searchresult['associations']: subj = a['subject']['id'] slimmed_terms = a['slim'] for t in slimmed_terms: k = (subj, t) if (k not in pmap): pmap[k] = [] pmap[k].append(a) results = [{'subject': subj, 'slim': t, 'assocs': assocs} for ((subj, t), assocs) in pmap.items()] return results<|docstring|>Maps a set of subjects (e.g. genes) to a set of slims Result is a list of unique subject-class pairs, with a list of source assocations<|endoftext|>
b0f5a3301c514217e4825a487312fd721b10ba3cbd8a0d76365dda3f39e6f3a6
def bulk_fetch(subject_category, object_category, taxon, rows=MAX_ROWS, **kwargs): '\n Fetch associations for a species and pair of categories in bulk.\n\n Arguments:\n\n - subject_category: String (not None)\n - object_category: String (not None)\n - taxon: String\n - rows: int\n\n Additionally, any argument for search_associations can be passed\n ' assert (subject_category is not None) assert (object_category is not None) time.sleep(1) logger.info('Bulk query: {} {} {}'.format(subject_category, object_category, taxon)) assocs = search_associations_compact(subject_category=subject_category, object_category=object_category, subject_taxon=taxon, rows=rows, iterate=True, **kwargs) logger.info('Rows retrieved: {}'.format(len(assocs))) if (len(assocs) == 0): logger.error('No associations returned for query: {} {} {}'.format(subject_category, object_category, taxon)) return assocs
Fetch associations for a species and pair of categories in bulk. Arguments: - subject_category: String (not None) - object_category: String (not None) - taxon: String - rows: int Additionally, any argument for search_associations can be passed
ontobio/golr/golr_associations.py
bulk_fetch
alliance-genome/ontobio
101
python
def bulk_fetch(subject_category, object_category, taxon, rows=MAX_ROWS, **kwargs): '\n Fetch associations for a species and pair of categories in bulk.\n\n Arguments:\n\n - subject_category: String (not None)\n - object_category: String (not None)\n - taxon: String\n - rows: int\n\n Additionally, any argument for search_associations can be passed\n ' assert (subject_category is not None) assert (object_category is not None) time.sleep(1) logger.info('Bulk query: {} {} {}'.format(subject_category, object_category, taxon)) assocs = search_associations_compact(subject_category=subject_category, object_category=object_category, subject_taxon=taxon, rows=rows, iterate=True, **kwargs) logger.info('Rows retrieved: {}'.format(len(assocs))) if (len(assocs) == 0): logger.error('No associations returned for query: {} {} {}'.format(subject_category, object_category, taxon)) return assocs
def bulk_fetch(subject_category, object_category, taxon, rows=MAX_ROWS, **kwargs): '\n Fetch associations for a species and pair of categories in bulk.\n\n Arguments:\n\n - subject_category: String (not None)\n - object_category: String (not None)\n - taxon: String\n - rows: int\n\n Additionally, any argument for search_associations can be passed\n ' assert (subject_category is not None) assert (object_category is not None) time.sleep(1) logger.info('Bulk query: {} {} {}'.format(subject_category, object_category, taxon)) assocs = search_associations_compact(subject_category=subject_category, object_category=object_category, subject_taxon=taxon, rows=rows, iterate=True, **kwargs) logger.info('Rows retrieved: {}'.format(len(assocs))) if (len(assocs) == 0): logger.error('No associations returned for query: {} {} {}'.format(subject_category, object_category, taxon)) return assocs<|docstring|>Fetch associations for a species and pair of categories in bulk. Arguments: - subject_category: String (not None) - object_category: String (not None) - taxon: String - rows: int Additionally, any argument for search_associations can be passed<|endoftext|>
baa76c35e758059b002dbe226702d96b46dbdb990677baba616a5fd08957f2a2
def pivot_query(facet=None, facet_pivot_fields=None, **kwargs): '\n Pivot query\n ' if (facet_pivot_fields is None): facet_pivot_fields = [] results = search_associations(rows=0, facet_fields=[facet], facet_pivot_fields=facet_pivot_fields, **kwargs) return results
Pivot query
ontobio/golr/golr_associations.py
pivot_query
alliance-genome/ontobio
101
python
def pivot_query(facet=None, facet_pivot_fields=None, **kwargs): '\n \n ' if (facet_pivot_fields is None): facet_pivot_fields = [] results = search_associations(rows=0, facet_fields=[facet], facet_pivot_fields=facet_pivot_fields, **kwargs) return results
def pivot_query(facet=None, facet_pivot_fields=None, **kwargs): '\n \n ' if (facet_pivot_fields is None): facet_pivot_fields = [] results = search_associations(rows=0, facet_fields=[facet], facet_pivot_fields=facet_pivot_fields, **kwargs) return results<|docstring|>Pivot query<|endoftext|>
b5851636e8214d1b16942bc1f8a35ba116ce956c440ee1b687ac121c4340933d
def pivot_query_as_matrix(facet=None, facet_pivot_fields=None, **kwargs): '\n Pivot query\n ' if (facet_pivot_fields is None): facet_pivot_fields = [] logger.info('Additional args: {}'.format(kwargs)) fp = search_associations(rows=0, facet_fields=[facet], facet_pivot_fields=facet_pivot_fields, **kwargs)['facet_pivot'] results = list(fp.items())[0][1] tups = [] xtype = None ytype = None xlabels = set() ylabels = set() for r in results: logger.info('R={}'.format(r)) xtype = r['field'] rv = r['value'] xlabels.add(rv) for piv in r['pivot']: ytype = piv['field'] pv = piv['value'] ylabels.add(pv) tups.append((rv, pv, piv['count'])) z = [([0] * len(xlabels)) for i1 in range(len(ylabels))] xlabels = list(xlabels) ylabels = list(ylabels) xmap = dict([x[::(- 1)] for x in enumerate(xlabels)]) ymap = dict([x[::(- 1)] for x in enumerate(ylabels)]) for t in tups: z[ymap[t[1]]][xmap[t[0]]] = t[2] m = {'xtype': xtype, 'ytype': ytype, 'xaxis': xlabels, 'yaxis': ylabels, 'z': z} return m
Pivot query
ontobio/golr/golr_associations.py
pivot_query_as_matrix
alliance-genome/ontobio
101
python
def pivot_query_as_matrix(facet=None, facet_pivot_fields=None, **kwargs): '\n \n ' if (facet_pivot_fields is None): facet_pivot_fields = [] logger.info('Additional args: {}'.format(kwargs)) fp = search_associations(rows=0, facet_fields=[facet], facet_pivot_fields=facet_pivot_fields, **kwargs)['facet_pivot'] results = list(fp.items())[0][1] tups = [] xtype = None ytype = None xlabels = set() ylabels = set() for r in results: logger.info('R={}'.format(r)) xtype = r['field'] rv = r['value'] xlabels.add(rv) for piv in r['pivot']: ytype = piv['field'] pv = piv['value'] ylabels.add(pv) tups.append((rv, pv, piv['count'])) z = [([0] * len(xlabels)) for i1 in range(len(ylabels))] xlabels = list(xlabels) ylabels = list(ylabels) xmap = dict([x[::(- 1)] for x in enumerate(xlabels)]) ymap = dict([x[::(- 1)] for x in enumerate(ylabels)]) for t in tups: z[ymap[t[1]]][xmap[t[0]]] = t[2] m = {'xtype': xtype, 'ytype': ytype, 'xaxis': xlabels, 'yaxis': ylabels, 'z': z} return m
def pivot_query_as_matrix(facet=None, facet_pivot_fields=None, **kwargs): '\n \n ' if (facet_pivot_fields is None): facet_pivot_fields = [] logger.info('Additional args: {}'.format(kwargs)) fp = search_associations(rows=0, facet_fields=[facet], facet_pivot_fields=facet_pivot_fields, **kwargs)['facet_pivot'] results = list(fp.items())[0][1] tups = [] xtype = None ytype = None xlabels = set() ylabels = set() for r in results: logger.info('R={}'.format(r)) xtype = r['field'] rv = r['value'] xlabels.add(rv) for piv in r['pivot']: ytype = piv['field'] pv = piv['value'] ylabels.add(pv) tups.append((rv, pv, piv['count'])) z = [([0] * len(xlabels)) for i1 in range(len(ylabels))] xlabels = list(xlabels) ylabels = list(ylabels) xmap = dict([x[::(- 1)] for x in enumerate(xlabels)]) ymap = dict([x[::(- 1)] for x in enumerate(ylabels)]) for t in tups: z[ymap[t[1]]][xmap[t[0]]] = t[2] m = {'xtype': xtype, 'ytype': ytype, 'xaxis': xlabels, 'yaxis': ylabels, 'z': z} return m<|docstring|>Pivot query<|endoftext|>
bdd0a7da169c2d0fe2bba992111e2dc170f7539d97d86d1a07f783bc2f3e4e6e
def search_associations_go(subject_category=None, object_category=None, relation=None, subject=None, **kwargs): '\n Perform association search using Monarch golr\n ' go_golr_url = 'http://golr.geneontology.org/solr/' go_solr = pysolr.Solr(go_golr_url, timeout=5) go_solr.get_session().headers['User-Agent'] = get_user_agent(caller_name=__name__) return search_associations(subject_category, object_category, relation, subject, solr=go_solr, field_mapping=goassoc_fieldmap(), **kwargs)
Perform association search using Monarch golr
ontobio/golr/golr_associations.py
search_associations_go
alliance-genome/ontobio
101
python
def search_associations_go(subject_category=None, object_category=None, relation=None, subject=None, **kwargs): '\n \n ' go_golr_url = 'http://golr.geneontology.org/solr/' go_solr = pysolr.Solr(go_golr_url, timeout=5) go_solr.get_session().headers['User-Agent'] = get_user_agent(caller_name=__name__) return search_associations(subject_category, object_category, relation, subject, solr=go_solr, field_mapping=goassoc_fieldmap(), **kwargs)
def search_associations_go(subject_category=None, object_category=None, relation=None, subject=None, **kwargs): '\n \n ' go_golr_url = 'http://golr.geneontology.org/solr/' go_solr = pysolr.Solr(go_golr_url, timeout=5) go_solr.get_session().headers['User-Agent'] = get_user_agent(caller_name=__name__) return search_associations(subject_category, object_category, relation, subject, solr=go_solr, field_mapping=goassoc_fieldmap(), **kwargs)<|docstring|>Perform association search using Monarch golr<|endoftext|>
378d2f4efc939d9cfe56f0bff7a94bf6a35129b5ac2c5a6a3ce0eacf7265fc6f
def select_distinct(distinct_field=None, **kwargs): '\n select distinct values for a given field for a given a query\n ' results = search_associations(rows=0, select_fields=[], facet_field_limits={distinct_field: (- 1)}, facet_fields=[distinct_field], **kwargs) return list(results['facet_counts'][distinct_field].keys())
select distinct values for a given field for a given a query
ontobio/golr/golr_associations.py
select_distinct
alliance-genome/ontobio
101
python
def select_distinct(distinct_field=None, **kwargs): '\n \n ' results = search_associations(rows=0, select_fields=[], facet_field_limits={distinct_field: (- 1)}, facet_fields=[distinct_field], **kwargs) return list(results['facet_counts'][distinct_field].keys())
def select_distinct(distinct_field=None, **kwargs): '\n \n ' results = search_associations(rows=0, select_fields=[], facet_field_limits={distinct_field: (- 1)}, facet_fields=[distinct_field], **kwargs) return list(results['facet_counts'][distinct_field].keys())<|docstring|>select distinct values for a given field for a given a query<|endoftext|>
267ed07077dd8545d2968a2aaecf3273209cfa420066c41099ce78bb08c2bc9e
def select_distinct_subjects(**kwargs): '\n select distinct subject IDs given a query\n ' return select_distinct(M.SUBJECT, **kwargs)
select distinct subject IDs given a query
ontobio/golr/golr_associations.py
select_distinct_subjects
alliance-genome/ontobio
101
python
def select_distinct_subjects(**kwargs): '\n \n ' return select_distinct(M.SUBJECT, **kwargs)
def select_distinct_subjects(**kwargs): '\n \n ' return select_distinct(M.SUBJECT, **kwargs)<|docstring|>select distinct subject IDs given a query<|endoftext|>
dda62b40f65e6a562d5b581fc9ecf44e80a4422bed88671c0b943c947b458427
def calculate_information_content(**kwargs): '\n\n Arguments are as for search_associations, in particular:\n\n - subject_category\n - object_category\n - subject_taxon\n\n ' results = search_associations(rows=0, select_fields=[], facet_field_limits={M.OBJECT: (- 1)}, facet_fields=[M.OBJECT], **kwargs) pop_size = None icmap = {} for (f, fc) in results['facet_counts'][M.OBJECT].items(): if ((pop_size is None) or (pop_size < fc)): pop_size = fc for (f, fc) in results['facet_counts'][M.OBJECT].items(): freq = (fc / pop_size) icmap[f] = (- math.log(freq, 2)) return icmap
Arguments are as for search_associations, in particular: - subject_category - object_category - subject_taxon
ontobio/golr/golr_associations.py
calculate_information_content
alliance-genome/ontobio
101
python
def calculate_information_content(**kwargs): '\n\n Arguments are as for search_associations, in particular:\n\n - subject_category\n - object_category\n - subject_taxon\n\n ' results = search_associations(rows=0, select_fields=[], facet_field_limits={M.OBJECT: (- 1)}, facet_fields=[M.OBJECT], **kwargs) pop_size = None icmap = {} for (f, fc) in results['facet_counts'][M.OBJECT].items(): if ((pop_size is None) or (pop_size < fc)): pop_size = fc for (f, fc) in results['facet_counts'][M.OBJECT].items(): freq = (fc / pop_size) icmap[f] = (- math.log(freq, 2)) return icmap
def calculate_information_content(**kwargs): '\n\n Arguments are as for search_associations, in particular:\n\n - subject_category\n - object_category\n - subject_taxon\n\n ' results = search_associations(rows=0, select_fields=[], facet_field_limits={M.OBJECT: (- 1)}, facet_fields=[M.OBJECT], **kwargs) pop_size = None icmap = {} for (f, fc) in results['facet_counts'][M.OBJECT].items(): if ((pop_size is None) or (pop_size < fc)): pop_size = fc for (f, fc) in results['facet_counts'][M.OBJECT].items(): freq = (fc / pop_size) icmap[f] = (- math.log(freq, 2)) return icmap<|docstring|>Arguments are as for search_associations, in particular: - subject_category - object_category - subject_taxon<|endoftext|>
545d25f7371cbe3f0f1a82c39dbf0200aafce6533cc9362af1d695f3e2a5f2d9
def RoundCorner(image, radius): '\n\n Generate the rounded corner image for orgimage.\n\n ' image = image.convert('RGBA') mask = Image.new('RGBA', image.size, (0, 0, 0, 0)) draw = aggdraw.Draw(mask) brush = aggdraw.Brush('black') (width, height) = mask.size draw.rectangle((0, 0, mask.size[0], mask.size[1]), aggdraw.Brush('white')) draw.pieslice((0, 0, (radius * 2), (radius * 2)), 90, 180, None, brush) draw.pieslice(((width - (radius * 2)), 0, width, (radius * 2)), 0, 90, None, brush) draw.pieslice((0, (height - (radius * 2)), (radius * 2), height), 180, 270, None, brush) draw.pieslice(((width - (radius * 2)), (height - (radius * 2)), width, height), 270, 360, None, brush) draw.rectangle((radius, radius, (width - radius), (height - radius)), brush) draw.rectangle((radius, 0, (width - radius), radius), brush) draw.rectangle((0, radius, radius, (height - radius)), brush) draw.rectangle((radius, (height - radius), (width - radius), height), brush) draw.rectangle(((width - radius), radius, width, (height - radius)), brush) draw.flush() del draw return ImageChops.add(mask, image)
Generate the rounded corner image for orgimage.
py/jobs/tasks/graphs/textbox.py
RoundCorner
mabotech/mabo.task
0
python
def RoundCorner(image, radius): '\n\n \n\n ' image = image.convert('RGBA') mask = Image.new('RGBA', image.size, (0, 0, 0, 0)) draw = aggdraw.Draw(mask) brush = aggdraw.Brush('black') (width, height) = mask.size draw.rectangle((0, 0, mask.size[0], mask.size[1]), aggdraw.Brush('white')) draw.pieslice((0, 0, (radius * 2), (radius * 2)), 90, 180, None, brush) draw.pieslice(((width - (radius * 2)), 0, width, (radius * 2)), 0, 90, None, brush) draw.pieslice((0, (height - (radius * 2)), (radius * 2), height), 180, 270, None, brush) draw.pieslice(((width - (radius * 2)), (height - (radius * 2)), width, height), 270, 360, None, brush) draw.rectangle((radius, radius, (width - radius), (height - radius)), brush) draw.rectangle((radius, 0, (width - radius), radius), brush) draw.rectangle((0, radius, radius, (height - radius)), brush) draw.rectangle((radius, (height - radius), (width - radius), height), brush) draw.rectangle(((width - radius), radius, width, (height - radius)), brush) draw.flush() del draw return ImageChops.add(mask, image)
def RoundCorner(image, radius): '\n\n \n\n ' image = image.convert('RGBA') mask = Image.new('RGBA', image.size, (0, 0, 0, 0)) draw = aggdraw.Draw(mask) brush = aggdraw.Brush('black') (width, height) = mask.size draw.rectangle((0, 0, mask.size[0], mask.size[1]), aggdraw.Brush('white')) draw.pieslice((0, 0, (radius * 2), (radius * 2)), 90, 180, None, brush) draw.pieslice(((width - (radius * 2)), 0, width, (radius * 2)), 0, 90, None, brush) draw.pieslice((0, (height - (radius * 2)), (radius * 2), height), 180, 270, None, brush) draw.pieslice(((width - (radius * 2)), (height - (radius * 2)), width, height), 270, 360, None, brush) draw.rectangle((radius, radius, (width - radius), (height - radius)), brush) draw.rectangle((radius, 0, (width - radius), radius), brush) draw.rectangle((0, radius, radius, (height - radius)), brush) draw.rectangle((radius, (height - radius), (width - radius), height), brush) draw.rectangle(((width - radius), radius, width, (height - radius)), brush) draw.flush() del draw return ImageChops.add(mask, image)<|docstring|>Generate the rounded corner image for orgimage.<|endoftext|>
a07b239c35fe34b14f08f7f2b5205b9dedcf4cbdcd5d32298d00a9148031183e
def reduce_opacity(im, opacity): 'Returns an image with reduced opacity.' assert ((opacity >= 0) and (opacity <= 1)) if (im.mode != 'RGBA'): im = im.convert('RGBA') else: im = im.copy() alpha = im.split()[3] alpha = ImageEnhance.Brightness(alpha).enhance(opacity) im.putalpha(alpha) return im
Returns an image with reduced opacity.
py/jobs/tasks/graphs/textbox.py
reduce_opacity
mabotech/mabo.task
0
python
def reduce_opacity(im, opacity): assert ((opacity >= 0) and (opacity <= 1)) if (im.mode != 'RGBA'): im = im.convert('RGBA') else: im = im.copy() alpha = im.split()[3] alpha = ImageEnhance.Brightness(alpha).enhance(opacity) im.putalpha(alpha) return im
def reduce_opacity(im, opacity): assert ((opacity >= 0) and (opacity <= 1)) if (im.mode != 'RGBA'): im = im.convert('RGBA') else: im = im.copy() alpha = im.split()[3] alpha = ImageEnhance.Brightness(alpha).enhance(opacity) im.putalpha(alpha) return im<|docstring|>Returns an image with reduced opacity.<|endoftext|>
2c9cf914527e96dfe01ef4e7ff092e8e57b3edd3728b36142539a20c1afcad5d
def Watermark(image, markimage, position, opacity=1): 'Adds a watermark to an image.' im = image mark = markimage if (opacity < 1): mark = reduce_opacity(mark, opacity) if (im.mode != 'RGBA'): im = im.convert('RGBA') layer = Image.new('RGBA', im.size, (0, 0, 0, 0)) if (position == 'tile'): for y in range(0, im.size[1], mark.size[1]): for x in range(0, im.size[0], mark.size[0]): layer.paste(mark, (x, y)) elif (position == 'scale'): ratio = min((float(im.size[0]) / mark.size[0]), (float(im.size[1]) / mark.size[1])) w = int((mark.size[0] * ratio)) h = int((mark.size[1] * ratio)) mark = mark.resize((w, h)) layer.paste(mark, (((im.size[0] - w) / 2), ((im.size[1] - h) / 2))) else: layer.paste(mark, position) return Image.composite(layer, im, layer)
Adds a watermark to an image.
py/jobs/tasks/graphs/textbox.py
Watermark
mabotech/mabo.task
0
python
def Watermark(image, markimage, position, opacity=1): im = image mark = markimage if (opacity < 1): mark = reduce_opacity(mark, opacity) if (im.mode != 'RGBA'): im = im.convert('RGBA') layer = Image.new('RGBA', im.size, (0, 0, 0, 0)) if (position == 'tile'): for y in range(0, im.size[1], mark.size[1]): for x in range(0, im.size[0], mark.size[0]): layer.paste(mark, (x, y)) elif (position == 'scale'): ratio = min((float(im.size[0]) / mark.size[0]), (float(im.size[1]) / mark.size[1])) w = int((mark.size[0] * ratio)) h = int((mark.size[1] * ratio)) mark = mark.resize((w, h)) layer.paste(mark, (((im.size[0] - w) / 2), ((im.size[1] - h) / 2))) else: layer.paste(mark, position) return Image.composite(layer, im, layer)
def Watermark(image, markimage, position, opacity=1): im = image mark = markimage if (opacity < 1): mark = reduce_opacity(mark, opacity) if (im.mode != 'RGBA'): im = im.convert('RGBA') layer = Image.new('RGBA', im.size, (0, 0, 0, 0)) if (position == 'tile'): for y in range(0, im.size[1], mark.size[1]): for x in range(0, im.size[0], mark.size[0]): layer.paste(mark, (x, y)) elif (position == 'scale'): ratio = min((float(im.size[0]) / mark.size[0]), (float(im.size[1]) / mark.size[1])) w = int((mark.size[0] * ratio)) h = int((mark.size[1] * ratio)) mark = mark.resize((w, h)) layer.paste(mark, (((im.size[0] - w) / 2), ((im.size[1] - h) / 2))) else: layer.paste(mark, position) return Image.composite(layer, im, layer)<|docstring|>Adds a watermark to an image.<|endoftext|>
790c0b400abd906921cdb8cfcdd46097c848b017afd1da0296838b78d635afef
def Signature(image, type, text, position, font=None, color=(255, 0, 0)): '\n\n imprints a PIL image with the indicated text in lower-right corner\n\n ' if (type not in ['c', 'v']): raise if (image.mode != 'RGBA'): image = image.convert('RGBA') textdraw = ImageDraw.Draw(image) j = 1 lst = text.split('\n') lst.reverse() z = 0 newlst = [] maxline = '' max = 0 for line in lst: if (len(line) > 25): l1 = line[0:25] maxline = l1 max = 25 l2 = line[25:] newlst.append(l2) newlst.append(l1) else: newlst.append(line) if (len(line) > max): max = len(line) maxline = line z = (z + 1) textsize = textdraw.textsize(maxline, font=font) for line in newlst: textpos = [0, 0] for i in [0, 1]: if (i == 1): textpos[i] = ((image.size[i] - (j * textsize[i])) - position[i]) else: textpos[i] = ((image.size[i] - textsize[i]) - position[i]) textdraw.text(textpos, line, font=font, fill=color) j = (j + 1) del textdraw return image
imprints a PIL image with the indicated text in lower-right corner
py/jobs/tasks/graphs/textbox.py
Signature
mabotech/mabo.task
0
python
def Signature(image, type, text, position, font=None, color=(255, 0, 0)): '\n\n \n\n ' if (type not in ['c', 'v']): raise if (image.mode != 'RGBA'): image = image.convert('RGBA') textdraw = ImageDraw.Draw(image) j = 1 lst = text.split('\n') lst.reverse() z = 0 newlst = [] maxline = max = 0 for line in lst: if (len(line) > 25): l1 = line[0:25] maxline = l1 max = 25 l2 = line[25:] newlst.append(l2) newlst.append(l1) else: newlst.append(line) if (len(line) > max): max = len(line) maxline = line z = (z + 1) textsize = textdraw.textsize(maxline, font=font) for line in newlst: textpos = [0, 0] for i in [0, 1]: if (i == 1): textpos[i] = ((image.size[i] - (j * textsize[i])) - position[i]) else: textpos[i] = ((image.size[i] - textsize[i]) - position[i]) textdraw.text(textpos, line, font=font, fill=color) j = (j + 1) del textdraw return image
def Signature(image, type, text, position, font=None, color=(255, 0, 0)): '\n\n \n\n ' if (type not in ['c', 'v']): raise if (image.mode != 'RGBA'): image = image.convert('RGBA') textdraw = ImageDraw.Draw(image) j = 1 lst = text.split('\n') lst.reverse() z = 0 newlst = [] maxline = max = 0 for line in lst: if (len(line) > 25): l1 = line[0:25] maxline = l1 max = 25 l2 = line[25:] newlst.append(l2) newlst.append(l1) else: newlst.append(line) if (len(line) > max): max = len(line) maxline = line z = (z + 1) textsize = textdraw.textsize(maxline, font=font) for line in newlst: textpos = [0, 0] for i in [0, 1]: if (i == 1): textpos[i] = ((image.size[i] - (j * textsize[i])) - position[i]) else: textpos[i] = ((image.size[i] - textsize[i]) - position[i]) textdraw.text(textpos, line, font=font, fill=color) j = (j + 1) del textdraw return image<|docstring|>imprints a PIL image with the indicated text in lower-right corner<|endoftext|>
321620fc8b66cb014e824b4028191e72c8abc805978a2484cbdc1ba7f23de33d
@property def dilute_initial(self): 'Initial atom density for nuclides with zero initial concentration' return self._dilute_initial
Initial atom density for nuclides with zero initial concentration
openmc/deplete/abc.py
dilute_initial
xiashaopeng/openmc
262
python
@property def dilute_initial(self): return self._dilute_initial
@property def dilute_initial(self): return self._dilute_initial<|docstring|>Initial atom density for nuclides with zero initial concentration<|endoftext|>
76e8736c3fbce5074cf08924601b8fdbc72ae887dad32e1010475ad682526a8d
@abstractmethod def __call__(self, vec, source_rate): 'Runs a simulation.\n\n Parameters\n ----------\n vec : list of numpy.ndarray\n Total atoms to be used in function.\n source_rate : float\n Power in [W] or source rate in [neutron/sec]\n\n Returns\n -------\n openmc.deplete.OperatorResult\n Eigenvalue and reaction rates resulting from transport operator\n\n '
Runs a simulation. Parameters ---------- vec : list of numpy.ndarray Total atoms to be used in function. source_rate : float Power in [W] or source rate in [neutron/sec] Returns ------- openmc.deplete.OperatorResult Eigenvalue and reaction rates resulting from transport operator
openmc/deplete/abc.py
__call__
xiashaopeng/openmc
262
python
@abstractmethod def __call__(self, vec, source_rate): 'Runs a simulation.\n\n Parameters\n ----------\n vec : list of numpy.ndarray\n Total atoms to be used in function.\n source_rate : float\n Power in [W] or source rate in [neutron/sec]\n\n Returns\n -------\n openmc.deplete.OperatorResult\n Eigenvalue and reaction rates resulting from transport operator\n\n '
@abstractmethod def __call__(self, vec, source_rate): 'Runs a simulation.\n\n Parameters\n ----------\n vec : list of numpy.ndarray\n Total atoms to be used in function.\n source_rate : float\n Power in [W] or source rate in [neutron/sec]\n\n Returns\n -------\n openmc.deplete.OperatorResult\n Eigenvalue and reaction rates resulting from transport operator\n\n '<|docstring|>Runs a simulation. Parameters ---------- vec : list of numpy.ndarray Total atoms to be used in function. source_rate : float Power in [W] or source rate in [neutron/sec] Returns ------- openmc.deplete.OperatorResult Eigenvalue and reaction rates resulting from transport operator<|endoftext|>
a6f351c98a89c9fa946822616d7b669d7fe4afc061ddeadcff8636923bd8f842
@abstractmethod def initial_condition(self): 'Performs final setup and returns initial condition.\n\n Returns\n -------\n list of numpy.ndarray\n Total density for initial conditions.\n '
Performs final setup and returns initial condition. Returns ------- list of numpy.ndarray Total density for initial conditions.
openmc/deplete/abc.py
initial_condition
xiashaopeng/openmc
262
python
@abstractmethod def initial_condition(self): 'Performs final setup and returns initial condition.\n\n Returns\n -------\n list of numpy.ndarray\n Total density for initial conditions.\n '
@abstractmethod def initial_condition(self): 'Performs final setup and returns initial condition.\n\n Returns\n -------\n list of numpy.ndarray\n Total density for initial conditions.\n '<|docstring|>Performs final setup and returns initial condition. Returns ------- list of numpy.ndarray Total density for initial conditions.<|endoftext|>
b1f30bf1f37c8a619e119424b3ee6627581535cbcc9cec378087df7fc99e85b4
@abstractmethod def get_results_info(self): 'Returns volume list, cell lists, and nuc lists.\n\n Returns\n -------\n volume : dict of str to float\n Volumes corresponding to materials in burn_list\n nuc_list : list of str\n A list of all nuclide names. Used for sorting the simulation.\n burn_list : list of int\n A list of all cell IDs to be burned. Used for sorting the\n simulation.\n full_burn_list : list of int\n All burnable materials in the geometry.\n '
Returns volume list, cell lists, and nuc lists. Returns ------- volume : dict of str to float Volumes corresponding to materials in burn_list nuc_list : list of str A list of all nuclide names. Used for sorting the simulation. burn_list : list of int A list of all cell IDs to be burned. Used for sorting the simulation. full_burn_list : list of int All burnable materials in the geometry.
openmc/deplete/abc.py
get_results_info
xiashaopeng/openmc
262
python
@abstractmethod def get_results_info(self): 'Returns volume list, cell lists, and nuc lists.\n\n Returns\n -------\n volume : dict of str to float\n Volumes corresponding to materials in burn_list\n nuc_list : list of str\n A list of all nuclide names. Used for sorting the simulation.\n burn_list : list of int\n A list of all cell IDs to be burned. Used for sorting the\n simulation.\n full_burn_list : list of int\n All burnable materials in the geometry.\n '
@abstractmethod def get_results_info(self): 'Returns volume list, cell lists, and nuc lists.\n\n Returns\n -------\n volume : dict of str to float\n Volumes corresponding to materials in burn_list\n nuc_list : list of str\n A list of all nuclide names. Used for sorting the simulation.\n burn_list : list of int\n A list of all cell IDs to be burned. Used for sorting the\n simulation.\n full_burn_list : list of int\n All burnable materials in the geometry.\n '<|docstring|>Returns volume list, cell lists, and nuc lists. Returns ------- volume : dict of str to float Volumes corresponding to materials in burn_list nuc_list : list of str A list of all nuclide names. Used for sorting the simulation. burn_list : list of int A list of all cell IDs to be burned. Used for sorting the simulation. full_burn_list : list of int All burnable materials in the geometry.<|endoftext|>
64aa8205aa49511d7a6e875559076a0834ae0afdab7f028416d8580d0912ba54
@abstractmethod def write_bos_data(self, step): 'Document beginning of step data for a given step\n\n Called at the beginning of a depletion step and at\n the final point in the simulation.\n\n Parameters\n ----------\n step : int\n Current depletion step including restarts\n '
Document beginning of step data for a given step Called at the beginning of a depletion step and at the final point in the simulation. Parameters ---------- step : int Current depletion step including restarts
openmc/deplete/abc.py
write_bos_data
xiashaopeng/openmc
262
python
@abstractmethod def write_bos_data(self, step): 'Document beginning of step data for a given step\n\n Called at the beginning of a depletion step and at\n the final point in the simulation.\n\n Parameters\n ----------\n step : int\n Current depletion step including restarts\n '
@abstractmethod def write_bos_data(self, step): 'Document beginning of step data for a given step\n\n Called at the beginning of a depletion step and at\n the final point in the simulation.\n\n Parameters\n ----------\n step : int\n Current depletion step including restarts\n '<|docstring|>Document beginning of step data for a given step Called at the beginning of a depletion step and at the final point in the simulation. Parameters ---------- step : int Current depletion step including restarts<|endoftext|>
a60e097dec9577a2d53f7b0c3e8544b0c0ae4106778da0776b66d5db5b852dd0
@abstractmethod def generate_tallies(self, materials, scores): 'Use the C API to build tallies needed for reaction rates'
Use the C API to build tallies needed for reaction rates
openmc/deplete/abc.py
generate_tallies
xiashaopeng/openmc
262
python
@abstractmethod def generate_tallies(self, materials, scores):
@abstractmethod def generate_tallies(self, materials, scores): <|docstring|>Use the C API to build tallies needed for reaction rates<|endoftext|>
14d377c2ecec9f80af576c8335a27d77f33add2ac8927824821c174b5e33e091
@property def nuclides(self): 'List of nuclides with requested reaction rates' return self._nuclides
List of nuclides with requested reaction rates
openmc/deplete/abc.py
nuclides
xiashaopeng/openmc
262
python
@property def nuclides(self): return self._nuclides
@property def nuclides(self): return self._nuclides<|docstring|>List of nuclides with requested reaction rates<|endoftext|>
645d97f1eea76b9618d7b30d46e5e2514d7048285e64f619decc28e9d1abd5ed
@abstractmethod def get_material_rates(self, mat_id, nuc_index, react_index): 'Return 2D array of [nuclide, reaction] reaction rates\n\n Parameters\n ----------\n mat_id : int\n Unique ID for the requested material\n nuc_index : list of str\n Ordering of desired nuclides\n react_index : list of str\n Ordering of reactions\n '
Return 2D array of [nuclide, reaction] reaction rates Parameters ---------- mat_id : int Unique ID for the requested material nuc_index : list of str Ordering of desired nuclides react_index : list of str Ordering of reactions
openmc/deplete/abc.py
get_material_rates
xiashaopeng/openmc
262
python
@abstractmethod def get_material_rates(self, mat_id, nuc_index, react_index): 'Return 2D array of [nuclide, reaction] reaction rates\n\n Parameters\n ----------\n mat_id : int\n Unique ID for the requested material\n nuc_index : list of str\n Ordering of desired nuclides\n react_index : list of str\n Ordering of reactions\n '
@abstractmethod def get_material_rates(self, mat_id, nuc_index, react_index): 'Return 2D array of [nuclide, reaction] reaction rates\n\n Parameters\n ----------\n mat_id : int\n Unique ID for the requested material\n nuc_index : list of str\n Ordering of desired nuclides\n react_index : list of str\n Ordering of reactions\n '<|docstring|>Return 2D array of [nuclide, reaction] reaction rates Parameters ---------- mat_id : int Unique ID for the requested material nuc_index : list of str Ordering of desired nuclides react_index : list of str Ordering of reactions<|endoftext|>
e5d0a86b511387bfff5403aabe2cd578beaedfec4d73de6eede4581c009b6be0
def divide_by_adens(self, number): 'Normalize reaction rates by number of nuclides\n\n Acts on the current material examined by :meth:`get_material_rates`\n\n Parameters\n ----------\n number : iterable of float\n Number density [atoms/b-cm] of each nuclide tracked in the\n calculation.\n\n Returns\n -------\n results : numpy.ndarray\n Array of reactions rates of shape ``(n_nuclides, n_rxns)``\n normalized by the number of nuclides\n ' mask = nonzero(number) results = self._results_cache for col in range(results.shape[1]): results[(mask, col)] /= number[mask] return results
Normalize reaction rates by number of nuclides Acts on the current material examined by :meth:`get_material_rates` Parameters ---------- number : iterable of float Number density [atoms/b-cm] of each nuclide tracked in the calculation. Returns ------- results : numpy.ndarray Array of reactions rates of shape ``(n_nuclides, n_rxns)`` normalized by the number of nuclides
openmc/deplete/abc.py
divide_by_adens
xiashaopeng/openmc
262
python
def divide_by_adens(self, number): 'Normalize reaction rates by number of nuclides\n\n Acts on the current material examined by :meth:`get_material_rates`\n\n Parameters\n ----------\n number : iterable of float\n Number density [atoms/b-cm] of each nuclide tracked in the\n calculation.\n\n Returns\n -------\n results : numpy.ndarray\n Array of reactions rates of shape ``(n_nuclides, n_rxns)``\n normalized by the number of nuclides\n ' mask = nonzero(number) results = self._results_cache for col in range(results.shape[1]): results[(mask, col)] /= number[mask] return results
def divide_by_adens(self, number): 'Normalize reaction rates by number of nuclides\n\n Acts on the current material examined by :meth:`get_material_rates`\n\n Parameters\n ----------\n number : iterable of float\n Number density [atoms/b-cm] of each nuclide tracked in the\n calculation.\n\n Returns\n -------\n results : numpy.ndarray\n Array of reactions rates of shape ``(n_nuclides, n_rxns)``\n normalized by the number of nuclides\n ' mask = nonzero(number) results = self._results_cache for col in range(results.shape[1]): results[(mask, col)] /= number[mask] return results<|docstring|>Normalize reaction rates by number of nuclides Acts on the current material examined by :meth:`get_material_rates` Parameters ---------- number : iterable of float Number density [atoms/b-cm] of each nuclide tracked in the calculation. Returns ------- results : numpy.ndarray Array of reactions rates of shape ``(n_nuclides, n_rxns)`` normalized by the number of nuclides<|endoftext|>
db7c0fdb07a0e2b952db2b647ef8f7b06645e14b95a7a2f70bea520d6b0a8cf0
def reset(self): 'Reset state for normalization'
Reset state for normalization
openmc/deplete/abc.py
reset
xiashaopeng/openmc
262
python
def reset(self):
def reset(self): <|docstring|>Reset state for normalization<|endoftext|>
5ed49aa13bbd01c767b04610e3b0d086638af8cb9cdff470c890be90acdecf94
@abstractmethod def prepare(self, chain_nucs, rate_index): 'Perform work needed to obtain energy produced\n\n This method is called prior to the transport simulations\n in :meth:`openmc.deplete.Operator.initial_condition`. Only used for\n energy-based normalization.\n\n Parameters\n ----------\n chain_nucs : list of str\n All nuclides to be tracked in this problem\n rate_index : dict of str to int\n Mapping from nuclide name to index in the\n `fission_rates` for :meth:`update`.\n '
Perform work needed to obtain energy produced This method is called prior to the transport simulations in :meth:`openmc.deplete.Operator.initial_condition`. Only used for energy-based normalization. Parameters ---------- chain_nucs : list of str All nuclides to be tracked in this problem rate_index : dict of str to int Mapping from nuclide name to index in the `fission_rates` for :meth:`update`.
openmc/deplete/abc.py
prepare
xiashaopeng/openmc
262
python
@abstractmethod def prepare(self, chain_nucs, rate_index): 'Perform work needed to obtain energy produced\n\n This method is called prior to the transport simulations\n in :meth:`openmc.deplete.Operator.initial_condition`. Only used for\n energy-based normalization.\n\n Parameters\n ----------\n chain_nucs : list of str\n All nuclides to be tracked in this problem\n rate_index : dict of str to int\n Mapping from nuclide name to index in the\n `fission_rates` for :meth:`update`.\n '
@abstractmethod def prepare(self, chain_nucs, rate_index): 'Perform work needed to obtain energy produced\n\n This method is called prior to the transport simulations\n in :meth:`openmc.deplete.Operator.initial_condition`. Only used for\n energy-based normalization.\n\n Parameters\n ----------\n chain_nucs : list of str\n All nuclides to be tracked in this problem\n rate_index : dict of str to int\n Mapping from nuclide name to index in the\n `fission_rates` for :meth:`update`.\n '<|docstring|>Perform work needed to obtain energy produced This method is called prior to the transport simulations in :meth:`openmc.deplete.Operator.initial_condition`. Only used for energy-based normalization. Parameters ---------- chain_nucs : list of str All nuclides to be tracked in this problem rate_index : dict of str to int Mapping from nuclide name to index in the `fission_rates` for :meth:`update`.<|endoftext|>
0e887dd57e74b2c7daf336a7e8ea5794b000c6571fc478bb311cd5efd386aad3
def update(self, fission_rates): 'Update the normalization based on fission rates (only used for\n energy-based normalization)\n\n Parameters\n ----------\n fission_rates : numpy.ndarray\n fission reaction rate for each isotope in the specified\n material. Should be ordered corresponding to initial\n ``rate_index`` used in :meth:`prepare`\n '
Update the normalization based on fission rates (only used for energy-based normalization) Parameters ---------- fission_rates : numpy.ndarray fission reaction rate for each isotope in the specified material. Should be ordered corresponding to initial ``rate_index`` used in :meth:`prepare`
openmc/deplete/abc.py
update
xiashaopeng/openmc
262
python
def update(self, fission_rates): 'Update the normalization based on fission rates (only used for\n energy-based normalization)\n\n Parameters\n ----------\n fission_rates : numpy.ndarray\n fission reaction rate for each isotope in the specified\n material. Should be ordered corresponding to initial\n ``rate_index`` used in :meth:`prepare`\n '
def update(self, fission_rates): 'Update the normalization based on fission rates (only used for\n energy-based normalization)\n\n Parameters\n ----------\n fission_rates : numpy.ndarray\n fission reaction rate for each isotope in the specified\n material. Should be ordered corresponding to initial\n ``rate_index`` used in :meth:`prepare`\n '<|docstring|>Update the normalization based on fission rates (only used for energy-based normalization) Parameters ---------- fission_rates : numpy.ndarray fission reaction rate for each isotope in the specified material. Should be ordered corresponding to initial ``rate_index`` used in :meth:`prepare`<|endoftext|>
14d377c2ecec9f80af576c8335a27d77f33add2ac8927824821c174b5e33e091
@property def nuclides(self): 'List of nuclides with requested reaction rates' return self._nuclides
List of nuclides with requested reaction rates
openmc/deplete/abc.py
nuclides
xiashaopeng/openmc
262
python
@property def nuclides(self): return self._nuclides
@property def nuclides(self): return self._nuclides<|docstring|>List of nuclides with requested reaction rates<|endoftext|>
a5321694c0693a034e1c8715e8885309ce9fa4288b5cdf6b891bed4ee6632291
@abstractmethod def factor(self, source_rate): 'Return normalization factor\n\n Parameters\n ----------\n source_rate : float\n Power in [W] or source rate in [neutron/sec]\n\n Returns\n -------\n float\n Normalization factor for tallies\n\n '
Return normalization factor Parameters ---------- source_rate : float Power in [W] or source rate in [neutron/sec] Returns ------- float Normalization factor for tallies
openmc/deplete/abc.py
factor
xiashaopeng/openmc
262
python
@abstractmethod def factor(self, source_rate): 'Return normalization factor\n\n Parameters\n ----------\n source_rate : float\n Power in [W] or source rate in [neutron/sec]\n\n Returns\n -------\n float\n Normalization factor for tallies\n\n '
@abstractmethod def factor(self, source_rate): 'Return normalization factor\n\n Parameters\n ----------\n source_rate : float\n Power in [W] or source rate in [neutron/sec]\n\n Returns\n -------\n float\n Normalization factor for tallies\n\n '<|docstring|>Return normalization factor Parameters ---------- source_rate : float Power in [W] or source rate in [neutron/sec] Returns ------- float Normalization factor for tallies<|endoftext|>
8b0c202449e47c680d4c5e0a52880a7ecf06b5aec127c2b7e99f861ff48a00c7
@abstractmethod def weighted_yields(self, local_mat_index): 'Return fission yields for a specific material\n\n Parameters\n ----------\n local_mat_index : int\n Index for the material with requested fission yields.\n Should correspond to the material represented in\n ``mat_indexes[local_mat_index]`` during\n :meth:`generate_tallies`.\n\n Returns\n -------\n library : collections.abc.Mapping\n Dictionary-like object mapping ``{str: {str: float}``.\n This reflects fission yields for ``{parent: {product: fyield}}``.\n '
Return fission yields for a specific material Parameters ---------- local_mat_index : int Index for the material with requested fission yields. Should correspond to the material represented in ``mat_indexes[local_mat_index]`` during :meth:`generate_tallies`. Returns ------- library : collections.abc.Mapping Dictionary-like object mapping ``{str: {str: float}``. This reflects fission yields for ``{parent: {product: fyield}}``.
openmc/deplete/abc.py
weighted_yields
xiashaopeng/openmc
262
python
@abstractmethod def weighted_yields(self, local_mat_index): 'Return fission yields for a specific material\n\n Parameters\n ----------\n local_mat_index : int\n Index for the material with requested fission yields.\n Should correspond to the material represented in\n ``mat_indexes[local_mat_index]`` during\n :meth:`generate_tallies`.\n\n Returns\n -------\n library : collections.abc.Mapping\n Dictionary-like object mapping ``{str: {str: float}``.\n This reflects fission yields for ``{parent: {product: fyield}}``.\n '
@abstractmethod def weighted_yields(self, local_mat_index): 'Return fission yields for a specific material\n\n Parameters\n ----------\n local_mat_index : int\n Index for the material with requested fission yields.\n Should correspond to the material represented in\n ``mat_indexes[local_mat_index]`` during\n :meth:`generate_tallies`.\n\n Returns\n -------\n library : collections.abc.Mapping\n Dictionary-like object mapping ``{str: {str: float}``.\n This reflects fission yields for ``{parent: {product: fyield}}``.\n '<|docstring|>Return fission yields for a specific material Parameters ---------- local_mat_index : int Index for the material with requested fission yields. Should correspond to the material represented in ``mat_indexes[local_mat_index]`` during :meth:`generate_tallies`. Returns ------- library : collections.abc.Mapping Dictionary-like object mapping ``{str: {str: float}``. This reflects fission yields for ``{parent: {product: fyield}}``.<|endoftext|>
2096d3192e0b49de573770099750051139b4db3bc6e1e05b790cf889fdf393f9
@staticmethod def unpack(): 'Unpack tally data prior to compute fission yields.\n\n Called after a :meth:`openmc.deplete.Operator.__call__`\n routine during the normalization of reaction rates.\n\n Not necessary for all subclasses to implement, unless tallies\n are used.\n '
Unpack tally data prior to compute fission yields. Called after a :meth:`openmc.deplete.Operator.__call__` routine during the normalization of reaction rates. Not necessary for all subclasses to implement, unless tallies are used.
openmc/deplete/abc.py
unpack
xiashaopeng/openmc
262
python
@staticmethod def unpack(): 'Unpack tally data prior to compute fission yields.\n\n Called after a :meth:`openmc.deplete.Operator.__call__`\n routine during the normalization of reaction rates.\n\n Not necessary for all subclasses to implement, unless tallies\n are used.\n '
@staticmethod def unpack(): 'Unpack tally data prior to compute fission yields.\n\n Called after a :meth:`openmc.deplete.Operator.__call__`\n routine during the normalization of reaction rates.\n\n Not necessary for all subclasses to implement, unless tallies\n are used.\n '<|docstring|>Unpack tally data prior to compute fission yields. Called after a :meth:`openmc.deplete.Operator.__call__` routine during the normalization of reaction rates. Not necessary for all subclasses to implement, unless tallies are used.<|endoftext|>
be589c43db4130e50c5dd844e64683cd0093e3e9daff0f0b00641496c159c644
@staticmethod def generate_tallies(materials, mat_indexes): 'Construct tallies necessary for computing fission yields\n\n Called during the operator set up phase prior to depleting.\n Not necessary for subclasses to implement\n\n Parameters\n ----------\n materials : iterable of C-API materials\n Materials to be used in :class:`openmc.lib.MaterialFilter`\n mat_indexes : iterable of int\n Indices of tallied materials that will have their fission\n yields computed by this helper. Necessary as the\n :class:`openmc.deplete.Operator` that uses this helper\n may only burn a subset of all materials when running\n in parallel mode.\n '
Construct tallies necessary for computing fission yields Called during the operator set up phase prior to depleting. Not necessary for subclasses to implement Parameters ---------- materials : iterable of C-API materials Materials to be used in :class:`openmc.lib.MaterialFilter` mat_indexes : iterable of int Indices of tallied materials that will have their fission yields computed by this helper. Necessary as the :class:`openmc.deplete.Operator` that uses this helper may only burn a subset of all materials when running in parallel mode.
openmc/deplete/abc.py
generate_tallies
xiashaopeng/openmc
262
python
@staticmethod def generate_tallies(materials, mat_indexes): 'Construct tallies necessary for computing fission yields\n\n Called during the operator set up phase prior to depleting.\n Not necessary for subclasses to implement\n\n Parameters\n ----------\n materials : iterable of C-API materials\n Materials to be used in :class:`openmc.lib.MaterialFilter`\n mat_indexes : iterable of int\n Indices of tallied materials that will have their fission\n yields computed by this helper. Necessary as the\n :class:`openmc.deplete.Operator` that uses this helper\n may only burn a subset of all materials when running\n in parallel mode.\n '
@staticmethod def generate_tallies(materials, mat_indexes): 'Construct tallies necessary for computing fission yields\n\n Called during the operator set up phase prior to depleting.\n Not necessary for subclasses to implement\n\n Parameters\n ----------\n materials : iterable of C-API materials\n Materials to be used in :class:`openmc.lib.MaterialFilter`\n mat_indexes : iterable of int\n Indices of tallied materials that will have their fission\n yields computed by this helper. Necessary as the\n :class:`openmc.deplete.Operator` that uses this helper\n may only burn a subset of all materials when running\n in parallel mode.\n '<|docstring|>Construct tallies necessary for computing fission yields Called during the operator set up phase prior to depleting. Not necessary for subclasses to implement Parameters ---------- materials : iterable of C-API materials Materials to be used in :class:`openmc.lib.MaterialFilter` mat_indexes : iterable of int Indices of tallied materials that will have their fission yields computed by this helper. Necessary as the :class:`openmc.deplete.Operator` that uses this helper may only burn a subset of all materials when running in parallel mode.<|endoftext|>
4ad94b9ecbf78eb61ad3fe55995c371e736357ef6fdaf6113fbc3dea124fe72d
def update_tally_nuclides(self, nuclides): 'Return nuclides with non-zero densities and yield data\n\n Parameters\n ----------\n nuclides : iterable of str\n Nuclides with non-zero densities from the\n :class:`openmc.deplete.Operator`\n\n Returns\n -------\n nuclides : list of str\n Union of nuclides that the :class:`openmc.deplete.Operator`\n says have non-zero densities at this stage and those that\n have yield data. Sorted by nuclide name\n\n ' return sorted((self._chain_set & set(nuclides)))
Return nuclides with non-zero densities and yield data Parameters ---------- nuclides : iterable of str Nuclides with non-zero densities from the :class:`openmc.deplete.Operator` Returns ------- nuclides : list of str Union of nuclides that the :class:`openmc.deplete.Operator` says have non-zero densities at this stage and those that have yield data. Sorted by nuclide name
openmc/deplete/abc.py
update_tally_nuclides
xiashaopeng/openmc
262
python
def update_tally_nuclides(self, nuclides): 'Return nuclides with non-zero densities and yield data\n\n Parameters\n ----------\n nuclides : iterable of str\n Nuclides with non-zero densities from the\n :class:`openmc.deplete.Operator`\n\n Returns\n -------\n nuclides : list of str\n Union of nuclides that the :class:`openmc.deplete.Operator`\n says have non-zero densities at this stage and those that\n have yield data. Sorted by nuclide name\n\n ' return sorted((self._chain_set & set(nuclides)))
def update_tally_nuclides(self, nuclides): 'Return nuclides with non-zero densities and yield data\n\n Parameters\n ----------\n nuclides : iterable of str\n Nuclides with non-zero densities from the\n :class:`openmc.deplete.Operator`\n\n Returns\n -------\n nuclides : list of str\n Union of nuclides that the :class:`openmc.deplete.Operator`\n says have non-zero densities at this stage and those that\n have yield data. Sorted by nuclide name\n\n ' return sorted((self._chain_set & set(nuclides)))<|docstring|>Return nuclides with non-zero densities and yield data Parameters ---------- nuclides : iterable of str Nuclides with non-zero densities from the :class:`openmc.deplete.Operator` Returns ------- nuclides : list of str Union of nuclides that the :class:`openmc.deplete.Operator` says have non-zero densities at this stage and those that have yield data. Sorted by nuclide name<|endoftext|>
8afcd46352de1a75ff0886f6093177a995202337d75953393a2eb89fd5cbabad
@classmethod def from_operator(cls, operator, **kwargs): 'Create a new instance by pulling data from the operator\n\n All keyword arguments should be identical to their counterpart\n in the main ``__init__`` method\n\n Parameters\n ----------\n operator : openmc.deplete.TransportOperator\n Operator with a depletion chain\n kwargs: optional\n Additional keyword arguments to be used in constuction\n ' return cls(operator.chain.nuclides, **kwargs)
Create a new instance by pulling data from the operator All keyword arguments should be identical to their counterpart in the main ``__init__`` method Parameters ---------- operator : openmc.deplete.TransportOperator Operator with a depletion chain kwargs: optional Additional keyword arguments to be used in constuction
openmc/deplete/abc.py
from_operator
xiashaopeng/openmc
262
python
@classmethod def from_operator(cls, operator, **kwargs): 'Create a new instance by pulling data from the operator\n\n All keyword arguments should be identical to their counterpart\n in the main ``__init__`` method\n\n Parameters\n ----------\n operator : openmc.deplete.TransportOperator\n Operator with a depletion chain\n kwargs: optional\n Additional keyword arguments to be used in constuction\n ' return cls(operator.chain.nuclides, **kwargs)
@classmethod def from_operator(cls, operator, **kwargs): 'Create a new instance by pulling data from the operator\n\n All keyword arguments should be identical to their counterpart\n in the main ``__init__`` method\n\n Parameters\n ----------\n operator : openmc.deplete.TransportOperator\n Operator with a depletion chain\n kwargs: optional\n Additional keyword arguments to be used in constuction\n ' return cls(operator.chain.nuclides, **kwargs)<|docstring|>Create a new instance by pulling data from the operator All keyword arguments should be identical to their counterpart in the main ``__init__`` method Parameters ---------- operator : openmc.deplete.TransportOperator Operator with a depletion chain kwargs: optional Additional keyword arguments to be used in constuction<|endoftext|>
3b82ab0450c7bf6e6fd5d90febf2e06b561318589f7efe62cec64dc911f97458
def generate_tallies(self, materials, mat_indexes): 'Construct the fission rate tally\n\n Parameters\n ----------\n materials : iterable of :class:`openmc.lib.Material`\n Materials to be used in :class:`openmc.lib.MaterialFilter`\n mat_indexes : iterable of int\n Indices of tallied materials that will have their fission\n yields computed by this helper. Necessary as the\n :class:`openmc.deplete.Operator` that uses this helper\n may only burn a subset of all materials when running\n in parallel mode.\n ' self._local_indexes = asarray(mat_indexes) self._fission_rate_tally = Tally() self._fission_rate_tally.writable = False self._fission_rate_tally.scores = ['fission'] self._fission_rate_tally.filters = [MaterialFilter(materials)]
Construct the fission rate tally Parameters ---------- materials : iterable of :class:`openmc.lib.Material` Materials to be used in :class:`openmc.lib.MaterialFilter` mat_indexes : iterable of int Indices of tallied materials that will have their fission yields computed by this helper. Necessary as the :class:`openmc.deplete.Operator` that uses this helper may only burn a subset of all materials when running in parallel mode.
openmc/deplete/abc.py
generate_tallies
xiashaopeng/openmc
262
python
def generate_tallies(self, materials, mat_indexes): 'Construct the fission rate tally\n\n Parameters\n ----------\n materials : iterable of :class:`openmc.lib.Material`\n Materials to be used in :class:`openmc.lib.MaterialFilter`\n mat_indexes : iterable of int\n Indices of tallied materials that will have their fission\n yields computed by this helper. Necessary as the\n :class:`openmc.deplete.Operator` that uses this helper\n may only burn a subset of all materials when running\n in parallel mode.\n ' self._local_indexes = asarray(mat_indexes) self._fission_rate_tally = Tally() self._fission_rate_tally.writable = False self._fission_rate_tally.scores = ['fission'] self._fission_rate_tally.filters = [MaterialFilter(materials)]
def generate_tallies(self, materials, mat_indexes): 'Construct the fission rate tally\n\n Parameters\n ----------\n materials : iterable of :class:`openmc.lib.Material`\n Materials to be used in :class:`openmc.lib.MaterialFilter`\n mat_indexes : iterable of int\n Indices of tallied materials that will have their fission\n yields computed by this helper. Necessary as the\n :class:`openmc.deplete.Operator` that uses this helper\n may only burn a subset of all materials when running\n in parallel mode.\n ' self._local_indexes = asarray(mat_indexes) self._fission_rate_tally = Tally() self._fission_rate_tally.writable = False self._fission_rate_tally.scores = ['fission'] self._fission_rate_tally.filters = [MaterialFilter(materials)]<|docstring|>Construct the fission rate tally Parameters ---------- materials : iterable of :class:`openmc.lib.Material` Materials to be used in :class:`openmc.lib.MaterialFilter` mat_indexes : iterable of int Indices of tallied materials that will have their fission yields computed by this helper. Necessary as the :class:`openmc.deplete.Operator` that uses this helper may only burn a subset of all materials when running in parallel mode.<|endoftext|>
ea7650ed57800a18da3181e878b9b58b7c54e66068febd48af412a0fd45f9697
def update_tally_nuclides(self, nuclides): 'Tally nuclides with non-zero density and multiple yields\n\n Must be run after :meth:`generate_tallies`.\n\n Parameters\n ----------\n nuclides : iterable of str\n Potential nuclides to be tallied, such as those with\n non-zero density at this stage.\n\n Returns\n -------\n nuclides : list of str\n Union of input nuclides and those that have multiple sets\n of yield data. Sorted by nuclide name\n\n Raises\n ------\n AttributeError\n If tallies not generated\n ' assert (self._fission_rate_tally is not None), 'Run generate_tallies first' overlap = set(self._chain_nuclides).intersection(set(nuclides)) nuclides = sorted(overlap) self._tally_nucs = [self._chain_nuclides[n] for n in nuclides] self._fission_rate_tally.nuclides = nuclides return nuclides
Tally nuclides with non-zero density and multiple yields Must be run after :meth:`generate_tallies`. Parameters ---------- nuclides : iterable of str Potential nuclides to be tallied, such as those with non-zero density at this stage. Returns ------- nuclides : list of str Union of input nuclides and those that have multiple sets of yield data. Sorted by nuclide name Raises ------ AttributeError If tallies not generated
openmc/deplete/abc.py
update_tally_nuclides
xiashaopeng/openmc
262
python
def update_tally_nuclides(self, nuclides): 'Tally nuclides with non-zero density and multiple yields\n\n Must be run after :meth:`generate_tallies`.\n\n Parameters\n ----------\n nuclides : iterable of str\n Potential nuclides to be tallied, such as those with\n non-zero density at this stage.\n\n Returns\n -------\n nuclides : list of str\n Union of input nuclides and those that have multiple sets\n of yield data. Sorted by nuclide name\n\n Raises\n ------\n AttributeError\n If tallies not generated\n ' assert (self._fission_rate_tally is not None), 'Run generate_tallies first' overlap = set(self._chain_nuclides).intersection(set(nuclides)) nuclides = sorted(overlap) self._tally_nucs = [self._chain_nuclides[n] for n in nuclides] self._fission_rate_tally.nuclides = nuclides return nuclides
def update_tally_nuclides(self, nuclides): 'Tally nuclides with non-zero density and multiple yields\n\n Must be run after :meth:`generate_tallies`.\n\n Parameters\n ----------\n nuclides : iterable of str\n Potential nuclides to be tallied, such as those with\n non-zero density at this stage.\n\n Returns\n -------\n nuclides : list of str\n Union of input nuclides and those that have multiple sets\n of yield data. Sorted by nuclide name\n\n Raises\n ------\n AttributeError\n If tallies not generated\n ' assert (self._fission_rate_tally is not None), 'Run generate_tallies first' overlap = set(self._chain_nuclides).intersection(set(nuclides)) nuclides = sorted(overlap) self._tally_nucs = [self._chain_nuclides[n] for n in nuclides] self._fission_rate_tally.nuclides = nuclides return nuclides<|docstring|>Tally nuclides with non-zero density and multiple yields Must be run after :meth:`generate_tallies`. Parameters ---------- nuclides : iterable of str Potential nuclides to be tallied, such as those with non-zero density at this stage. Returns ------- nuclides : list of str Union of input nuclides and those that have multiple sets of yield data. Sorted by nuclide name Raises ------ AttributeError If tallies not generated<|endoftext|>
6afd8466e19dd35c224e01ed65ac2a91c2edd631f3cd7ac78b059b4065cc47ea
@abstractmethod def unpack(self): 'Unpack tallies after a transport run.\n\n Abstract because each subclass will need to arrange its\n tally data.\n '
Unpack tallies after a transport run. Abstract because each subclass will need to arrange its tally data.
openmc/deplete/abc.py
unpack
xiashaopeng/openmc
262
python
@abstractmethod def unpack(self): 'Unpack tallies after a transport run.\n\n Abstract because each subclass will need to arrange its\n tally data.\n '
@abstractmethod def unpack(self): 'Unpack tallies after a transport run.\n\n Abstract because each subclass will need to arrange its\n tally data.\n '<|docstring|>Unpack tallies after a transport run. Abstract because each subclass will need to arrange its tally data.<|endoftext|>
c3b8d4da4ac1ea3d8df8a33c782a2252191e29e946834fe26c7cf22a23f22c6d
@abstractmethod def __call__(self, conc, rates, dt, source_rate, i): 'Perform the integration across one time step\n\n Parameters\n ----------\n conc : numpy.ndarray\n Initial concentrations for all nuclides in [atom]\n rates : openmc.deplete.ReactionRates\n Reaction rates from operator\n dt : float\n Time in [s] for the entire depletion interval\n source_rate : float\n Power in [W] or source rate in [neutron/sec]\n i : int\n Current depletion step index\n\n Returns\n -------\n proc_time : float\n Time spent in CRAM routines for all materials in [s]\n conc_list : list of numpy.ndarray\n Concentrations at each of the intermediate points with\n the final concentration as the last element\n op_results : list of openmc.deplete.OperatorResult\n Eigenvalue and reaction rates from intermediate transport\n simulations\n '
Perform the integration across one time step Parameters ---------- conc : numpy.ndarray Initial concentrations for all nuclides in [atom] rates : openmc.deplete.ReactionRates Reaction rates from operator dt : float Time in [s] for the entire depletion interval source_rate : float Power in [W] or source rate in [neutron/sec] i : int Current depletion step index Returns ------- proc_time : float Time spent in CRAM routines for all materials in [s] conc_list : list of numpy.ndarray Concentrations at each of the intermediate points with the final concentration as the last element op_results : list of openmc.deplete.OperatorResult Eigenvalue and reaction rates from intermediate transport simulations
openmc/deplete/abc.py
__call__
xiashaopeng/openmc
262
python
@abstractmethod def __call__(self, conc, rates, dt, source_rate, i): 'Perform the integration across one time step\n\n Parameters\n ----------\n conc : numpy.ndarray\n Initial concentrations for all nuclides in [atom]\n rates : openmc.deplete.ReactionRates\n Reaction rates from operator\n dt : float\n Time in [s] for the entire depletion interval\n source_rate : float\n Power in [W] or source rate in [neutron/sec]\n i : int\n Current depletion step index\n\n Returns\n -------\n proc_time : float\n Time spent in CRAM routines for all materials in [s]\n conc_list : list of numpy.ndarray\n Concentrations at each of the intermediate points with\n the final concentration as the last element\n op_results : list of openmc.deplete.OperatorResult\n Eigenvalue and reaction rates from intermediate transport\n simulations\n '
@abstractmethod def __call__(self, conc, rates, dt, source_rate, i): 'Perform the integration across one time step\n\n Parameters\n ----------\n conc : numpy.ndarray\n Initial concentrations for all nuclides in [atom]\n rates : openmc.deplete.ReactionRates\n Reaction rates from operator\n dt : float\n Time in [s] for the entire depletion interval\n source_rate : float\n Power in [W] or source rate in [neutron/sec]\n i : int\n Current depletion step index\n\n Returns\n -------\n proc_time : float\n Time spent in CRAM routines for all materials in [s]\n conc_list : list of numpy.ndarray\n Concentrations at each of the intermediate points with\n the final concentration as the last element\n op_results : list of openmc.deplete.OperatorResult\n Eigenvalue and reaction rates from intermediate transport\n simulations\n '<|docstring|>Perform the integration across one time step Parameters ---------- conc : numpy.ndarray Initial concentrations for all nuclides in [atom] rates : openmc.deplete.ReactionRates Reaction rates from operator dt : float Time in [s] for the entire depletion interval source_rate : float Power in [W] or source rate in [neutron/sec] i : int Current depletion step index Returns ------- proc_time : float Time spent in CRAM routines for all materials in [s] conc_list : list of numpy.ndarray Concentrations at each of the intermediate points with the final concentration as the last element op_results : list of openmc.deplete.OperatorResult Eigenvalue and reaction rates from intermediate transport simulations<|endoftext|>
41d5a579c72327888fcac9fad2b9f79c0795373f4c74121cea1bbb40b4af87f1
@property @abstractmethod def _num_stages(self): 'Number of intermediate transport solutions\n\n Needed to ensure schemes are consistent with restarts\n '
Number of intermediate transport solutions Needed to ensure schemes are consistent with restarts
openmc/deplete/abc.py
_num_stages
xiashaopeng/openmc
262
python
@property @abstractmethod def _num_stages(self): 'Number of intermediate transport solutions\n\n Needed to ensure schemes are consistent with restarts\n '
@property @abstractmethod def _num_stages(self): 'Number of intermediate transport solutions\n\n Needed to ensure schemes are consistent with restarts\n '<|docstring|>Number of intermediate transport solutions Needed to ensure schemes are consistent with restarts<|endoftext|>
cdd32e6261ed1956aaa09f997efc826974281c35b4fd9388bb1b769dbfe7c65f
def __iter__(self): 'Return pair of time step in [s] and source rate in [W] or [neutron/sec]' return zip(self.timesteps, self.source_rates)
Return pair of time step in [s] and source rate in [W] or [neutron/sec]
openmc/deplete/abc.py
__iter__
xiashaopeng/openmc
262
python
def __iter__(self): return zip(self.timesteps, self.source_rates)
def __iter__(self): return zip(self.timesteps, self.source_rates)<|docstring|>Return pair of time step in [s] and source rate in [W] or [neutron/sec]<|endoftext|>
eabc84defb760079a69468062d2ebd1b2690d6a58867b1da8cd644983da84908
def __len__(self): 'Return integer number of depletion intervals' return len(self.timesteps)
Return integer number of depletion intervals
openmc/deplete/abc.py
__len__
xiashaopeng/openmc
262
python
def __len__(self): return len(self.timesteps)
def __len__(self): return len(self.timesteps)<|docstring|>Return integer number of depletion intervals<|endoftext|>
a843297f5d8c2a74f63028030b023247931ab269fc9936b7132e92c7665f5d30
def _get_bos_data_from_operator(self, step_index, source_rate, bos_conc): 'Get beginning of step concentrations, reaction rates from Operator\n ' x = deepcopy(bos_conc) res = self.operator(x, source_rate) self.operator.write_bos_data((step_index + self._i_res)) return (x, res)
Get beginning of step concentrations, reaction rates from Operator
openmc/deplete/abc.py
_get_bos_data_from_operator
xiashaopeng/openmc
262
python
def _get_bos_data_from_operator(self, step_index, source_rate, bos_conc): '\n ' x = deepcopy(bos_conc) res = self.operator(x, source_rate) self.operator.write_bos_data((step_index + self._i_res)) return (x, res)
def _get_bos_data_from_operator(self, step_index, source_rate, bos_conc): '\n ' x = deepcopy(bos_conc) res = self.operator(x, source_rate) self.operator.write_bos_data((step_index + self._i_res)) return (x, res)<|docstring|>Get beginning of step concentrations, reaction rates from Operator<|endoftext|>
dffc200c1368037a762517b09cdba61d9fe72939cba3fa7ca42f4a9cdd818e7e
def _get_bos_data_from_restart(self, step_index, source_rate, bos_conc): 'Get beginning of step concentrations, reaction rates from restart' res = self.operator.prev_res[(- 1)] bos_conc = list(res.data[0]) rates = res.rates[0] k = ufloat(res.k[(0, 0)], res.k[(0, 1)]) rates *= (source_rate / res.source_rate[0]) return (bos_conc, OperatorResult(k, rates))
Get beginning of step concentrations, reaction rates from restart
openmc/deplete/abc.py
_get_bos_data_from_restart
xiashaopeng/openmc
262
python
def _get_bos_data_from_restart(self, step_index, source_rate, bos_conc): res = self.operator.prev_res[(- 1)] bos_conc = list(res.data[0]) rates = res.rates[0] k = ufloat(res.k[(0, 0)], res.k[(0, 1)]) rates *= (source_rate / res.source_rate[0]) return (bos_conc, OperatorResult(k, rates))
def _get_bos_data_from_restart(self, step_index, source_rate, bos_conc): res = self.operator.prev_res[(- 1)] bos_conc = list(res.data[0]) rates = res.rates[0] k = ufloat(res.k[(0, 0)], res.k[(0, 1)]) rates *= (source_rate / res.source_rate[0]) return (bos_conc, OperatorResult(k, rates))<|docstring|>Get beginning of step concentrations, reaction rates from restart<|endoftext|>
77b9f29d3e265724adecbe3b6a8c7848edc329611b7738d7739a5fd2250670bd
def integrate(self, final_step=True): 'Perform the entire depletion process across all steps\n\n Parameters\n ----------\n final_step : bool, optional\n Indicate whether or not a transport solve should be run at the end\n of the last timestep.\n\n .. versionadded:: 0.12.1\n\n ' with self.operator as conc: (t, self._i_res) = self._get_start_data() for (i, (dt, source_rate)) in enumerate(self): if ((i > 0) or (self.operator.prev_res is None)): (conc, res) = self._get_bos_data_from_operator(i, source_rate, conc) else: (conc, res) = self._get_bos_data_from_restart(i, source_rate, conc) (proc_time, conc_list, res_list) = self(conc, res.rates, dt, source_rate, i) conc_list.insert(0, conc) res_list.insert(0, res) conc = conc_list.pop() Results.save(self.operator, conc_list, res_list, [t, (t + dt)], source_rate, (self._i_res + i), proc_time) t += dt res_list = [self.operator(conc, (source_rate if final_step else 0.0))] Results.save(self.operator, [conc], res_list, [t, t], source_rate, (self._i_res + len(self)), proc_time) self.operator.write_bos_data((len(self) + self._i_res))
Perform the entire depletion process across all steps Parameters ---------- final_step : bool, optional Indicate whether or not a transport solve should be run at the end of the last timestep. .. versionadded:: 0.12.1
openmc/deplete/abc.py
integrate
xiashaopeng/openmc
262
python
def integrate(self, final_step=True): 'Perform the entire depletion process across all steps\n\n Parameters\n ----------\n final_step : bool, optional\n Indicate whether or not a transport solve should be run at the end\n of the last timestep.\n\n .. versionadded:: 0.12.1\n\n ' with self.operator as conc: (t, self._i_res) = self._get_start_data() for (i, (dt, source_rate)) in enumerate(self): if ((i > 0) or (self.operator.prev_res is None)): (conc, res) = self._get_bos_data_from_operator(i, source_rate, conc) else: (conc, res) = self._get_bos_data_from_restart(i, source_rate, conc) (proc_time, conc_list, res_list) = self(conc, res.rates, dt, source_rate, i) conc_list.insert(0, conc) res_list.insert(0, res) conc = conc_list.pop() Results.save(self.operator, conc_list, res_list, [t, (t + dt)], source_rate, (self._i_res + i), proc_time) t += dt res_list = [self.operator(conc, (source_rate if final_step else 0.0))] Results.save(self.operator, [conc], res_list, [t, t], source_rate, (self._i_res + len(self)), proc_time) self.operator.write_bos_data((len(self) + self._i_res))
def integrate(self, final_step=True): 'Perform the entire depletion process across all steps\n\n Parameters\n ----------\n final_step : bool, optional\n Indicate whether or not a transport solve should be run at the end\n of the last timestep.\n\n .. versionadded:: 0.12.1\n\n ' with self.operator as conc: (t, self._i_res) = self._get_start_data() for (i, (dt, source_rate)) in enumerate(self): if ((i > 0) or (self.operator.prev_res is None)): (conc, res) = self._get_bos_data_from_operator(i, source_rate, conc) else: (conc, res) = self._get_bos_data_from_restart(i, source_rate, conc) (proc_time, conc_list, res_list) = self(conc, res.rates, dt, source_rate, i) conc_list.insert(0, conc) res_list.insert(0, res) conc = conc_list.pop() Results.save(self.operator, conc_list, res_list, [t, (t + dt)], source_rate, (self._i_res + i), proc_time) t += dt res_list = [self.operator(conc, (source_rate if final_step else 0.0))] Results.save(self.operator, [conc], res_list, [t, t], source_rate, (self._i_res + len(self)), proc_time) self.operator.write_bos_data((len(self) + self._i_res))<|docstring|>Perform the entire depletion process across all steps Parameters ---------- final_step : bool, optional Indicate whether or not a transport solve should be run at the end of the last timestep. .. versionadded:: 0.12.1<|endoftext|>
617bc4fb26c7f8126a636ccdb30c6d79544a6858d20510a0f61d240ff568a579
def integrate(self): 'Perform the entire depletion process across all steps' with self.operator as conc: (t, self._i_res) = self._get_start_data() for (i, (dt, p)) in enumerate(self): if (i == 0): if (self.operator.prev_res is None): (conc, res) = self._get_bos_data_from_operator(i, p, conc) else: (conc, res) = self._get_bos_data_from_restart(i, p, conc) else: res = res_list[(- 1)] (proc_time, conc_list, res_list) = self(conc, res.rates, dt, p, i) conc_list.insert(0, conc) res_list.insert(0, res) conc = conc_list.pop() Results.save(self.operator, conc_list, res_list, [t, (t + dt)], p, (self._i_res + i), proc_time) t += dt Results.save(self.operator, [conc], [res_list[(- 1)]], [t, t], p, (self._i_res + len(self)), proc_time) self.operator.write_bos_data((self._i_res + len(self)))
Perform the entire depletion process across all steps
openmc/deplete/abc.py
integrate
xiashaopeng/openmc
262
python
def integrate(self): with self.operator as conc: (t, self._i_res) = self._get_start_data() for (i, (dt, p)) in enumerate(self): if (i == 0): if (self.operator.prev_res is None): (conc, res) = self._get_bos_data_from_operator(i, p, conc) else: (conc, res) = self._get_bos_data_from_restart(i, p, conc) else: res = res_list[(- 1)] (proc_time, conc_list, res_list) = self(conc, res.rates, dt, p, i) conc_list.insert(0, conc) res_list.insert(0, res) conc = conc_list.pop() Results.save(self.operator, conc_list, res_list, [t, (t + dt)], p, (self._i_res + i), proc_time) t += dt Results.save(self.operator, [conc], [res_list[(- 1)]], [t, t], p, (self._i_res + len(self)), proc_time) self.operator.write_bos_data((self._i_res + len(self)))
def integrate(self): with self.operator as conc: (t, self._i_res) = self._get_start_data() for (i, (dt, p)) in enumerate(self): if (i == 0): if (self.operator.prev_res is None): (conc, res) = self._get_bos_data_from_operator(i, p, conc) else: (conc, res) = self._get_bos_data_from_restart(i, p, conc) else: res = res_list[(- 1)] (proc_time, conc_list, res_list) = self(conc, res.rates, dt, p, i) conc_list.insert(0, conc) res_list.insert(0, res) conc = conc_list.pop() Results.save(self.operator, conc_list, res_list, [t, (t + dt)], p, (self._i_res + i), proc_time) t += dt Results.save(self.operator, [conc], [res_list[(- 1)]], [t, t], p, (self._i_res + len(self)), proc_time) self.operator.write_bos_data((self._i_res + len(self)))<|docstring|>Perform the entire depletion process across all steps<|endoftext|>
e980591384adc922e7cf1fea3aad378c2453ec886f2cd184e1e48b39d860172b
@abstractmethod def __call__(self, A, n0, dt): 'Solve the linear system of equations for depletion\n\n Parameters\n ----------\n A : scipy.sparse.csr_matrix\n Sparse transmutation matrix ``A[j, i]`` desribing rates at\n which isotope ``i`` transmutes to isotope ``j``\n n0 : numpy.ndarray\n Initial compositions, typically given in number of atoms in some\n material or an atom density\n dt : float\n Time [s] of the specific interval to be solved\n\n Returns\n -------\n numpy.ndarray\n Final compositions after ``dt``. Should be of identical shape\n to ``n0``.\n\n '
Solve the linear system of equations for depletion Parameters ---------- A : scipy.sparse.csr_matrix Sparse transmutation matrix ``A[j, i]`` desribing rates at which isotope ``i`` transmutes to isotope ``j`` n0 : numpy.ndarray Initial compositions, typically given in number of atoms in some material or an atom density dt : float Time [s] of the specific interval to be solved Returns ------- numpy.ndarray Final compositions after ``dt``. Should be of identical shape to ``n0``.
openmc/deplete/abc.py
__call__
xiashaopeng/openmc
262
python
@abstractmethod def __call__(self, A, n0, dt): 'Solve the linear system of equations for depletion\n\n Parameters\n ----------\n A : scipy.sparse.csr_matrix\n Sparse transmutation matrix ``A[j, i]`` desribing rates at\n which isotope ``i`` transmutes to isotope ``j``\n n0 : numpy.ndarray\n Initial compositions, typically given in number of atoms in some\n material or an atom density\n dt : float\n Time [s] of the specific interval to be solved\n\n Returns\n -------\n numpy.ndarray\n Final compositions after ``dt``. Should be of identical shape\n to ``n0``.\n\n '
@abstractmethod def __call__(self, A, n0, dt): 'Solve the linear system of equations for depletion\n\n Parameters\n ----------\n A : scipy.sparse.csr_matrix\n Sparse transmutation matrix ``A[j, i]`` desribing rates at\n which isotope ``i`` transmutes to isotope ``j``\n n0 : numpy.ndarray\n Initial compositions, typically given in number of atoms in some\n material or an atom density\n dt : float\n Time [s] of the specific interval to be solved\n\n Returns\n -------\n numpy.ndarray\n Final compositions after ``dt``. Should be of identical shape\n to ``n0``.\n\n '<|docstring|>Solve the linear system of equations for depletion Parameters ---------- A : scipy.sparse.csr_matrix Sparse transmutation matrix ``A[j, i]`` desribing rates at which isotope ``i`` transmutes to isotope ``j`` n0 : numpy.ndarray Initial compositions, typically given in number of atoms in some material or an atom density dt : float Time [s] of the specific interval to be solved Returns ------- numpy.ndarray Final compositions after ``dt``. Should be of identical shape to ``n0``.<|endoftext|>
8b143c10dfb072928e8c29dc8f9342f55b1c8e91d61e9fcef1398ffff7d64f85
def test_get_instance_metrics_lt_92(check): '\n check output when 9.2+\n ' check._is_9_2_or_above.return_value = False res = check._get_instance_metrics(False, False) assert (res['metrics'] == util.COMMON_METRICS)
check output when 9.2+
postgres/tests/test_unit.py
test_get_instance_metrics_lt_92
Nordstrom/integrations-core
0
python
def test_get_instance_metrics_lt_92(check): '\n \n ' check._is_9_2_or_above.return_value = False res = check._get_instance_metrics(False, False) assert (res['metrics'] == util.COMMON_METRICS)
def test_get_instance_metrics_lt_92(check): '\n \n ' check._is_9_2_or_above.return_value = False res = check._get_instance_metrics(False, False) assert (res['metrics'] == util.COMMON_METRICS)<|docstring|>check output when 9.2+<|endoftext|>
be79761cdff9056014633054ba80fac536ba79c13346048ecf9e9bb4086d1581
def test_get_instance_metrics_92(check): '\n check output when <9.2\n ' check._is_9_2_or_above.return_value = True res = check._get_instance_metrics(False, False) assert (res['metrics'] == dict(util.COMMON_METRICS, **util.NEWER_92_METRICS))
check output when <9.2
postgres/tests/test_unit.py
test_get_instance_metrics_92
Nordstrom/integrations-core
0
python
def test_get_instance_metrics_92(check): '\n \n ' check._is_9_2_or_above.return_value = True res = check._get_instance_metrics(False, False) assert (res['metrics'] == dict(util.COMMON_METRICS, **util.NEWER_92_METRICS))
def test_get_instance_metrics_92(check): '\n \n ' check._is_9_2_or_above.return_value = True res = check._get_instance_metrics(False, False) assert (res['metrics'] == dict(util.COMMON_METRICS, **util.NEWER_92_METRICS))<|docstring|>check output when <9.2<|endoftext|>
8a5a92ea1d2d72698a6b4c289811b7ab926c1abca9b4ebbbb646c788a18b2ade
def test_get_instance_metrics_state(check): '\n Ensure data is consistent when the function is called more than once\n ' res = check._get_instance_metrics(False, False) assert (res['metrics'] == dict(util.COMMON_METRICS, **util.NEWER_92_METRICS)) check._is_9_2_or_above.side_effect = Exception res = check._get_instance_metrics([], False) assert (res['metrics'] == dict(util.COMMON_METRICS, **util.NEWER_92_METRICS))
Ensure data is consistent when the function is called more than once
postgres/tests/test_unit.py
test_get_instance_metrics_state
Nordstrom/integrations-core
0
python
def test_get_instance_metrics_state(check): '\n \n ' res = check._get_instance_metrics(False, False) assert (res['metrics'] == dict(util.COMMON_METRICS, **util.NEWER_92_METRICS)) check._is_9_2_or_above.side_effect = Exception res = check._get_instance_metrics([], False) assert (res['metrics'] == dict(util.COMMON_METRICS, **util.NEWER_92_METRICS))
def test_get_instance_metrics_state(check): '\n \n ' res = check._get_instance_metrics(False, False) assert (res['metrics'] == dict(util.COMMON_METRICS, **util.NEWER_92_METRICS)) check._is_9_2_or_above.side_effect = Exception res = check._get_instance_metrics([], False) assert (res['metrics'] == dict(util.COMMON_METRICS, **util.NEWER_92_METRICS))<|docstring|>Ensure data is consistent when the function is called more than once<|endoftext|>
6bd8d4ab672ba496926538183ce8b30738fd8ccdc4c12e4f8ead68e113fd16a2
def test_get_instance_metrics_database_size_metrics(check): '\n Test the function behaves correctly when `database_size_metrics` is passed\n ' expected = util.COMMON_METRICS expected.update(util.NEWER_92_METRICS) expected.update(util.DATABASE_SIZE_METRICS) res = check._get_instance_metrics(True, False) assert (res['metrics'] == expected)
Test the function behaves correctly when `database_size_metrics` is passed
postgres/tests/test_unit.py
test_get_instance_metrics_database_size_metrics
Nordstrom/integrations-core
0
python
def test_get_instance_metrics_database_size_metrics(check): '\n \n ' expected = util.COMMON_METRICS expected.update(util.NEWER_92_METRICS) expected.update(util.DATABASE_SIZE_METRICS) res = check._get_instance_metrics(True, False) assert (res['metrics'] == expected)
def test_get_instance_metrics_database_size_metrics(check): '\n \n ' expected = util.COMMON_METRICS expected.update(util.NEWER_92_METRICS) expected.update(util.DATABASE_SIZE_METRICS) res = check._get_instance_metrics(True, False) assert (res['metrics'] == expected)<|docstring|>Test the function behaves correctly when `database_size_metrics` is passed<|endoftext|>
114f1dc5f7773940280df663b87bf10216ebde843d322fb81256c14eb30f91c5
def test_get_instance_with_default(check): '\n Test the contents of the query string with different `collect_default_db` values\n ' collect_default_db = False res = check._get_instance_metrics(False, collect_default_db) assert (" AND psd.datname not ilike 'postgres'" in res['query']) collect_default_db = True res = check._get_instance_metrics(False, collect_default_db) assert (" AND psd.datname not ilike 'postgres'" not in res['query'])
Test the contents of the query string with different `collect_default_db` values
postgres/tests/test_unit.py
test_get_instance_with_default
Nordstrom/integrations-core
0
python
def test_get_instance_with_default(check): '\n \n ' collect_default_db = False res = check._get_instance_metrics(False, collect_default_db) assert (" AND psd.datname not ilike 'postgres'" in res['query']) collect_default_db = True res = check._get_instance_metrics(False, collect_default_db) assert (" AND psd.datname not ilike 'postgres'" not in res['query'])
def test_get_instance_with_default(check): '\n \n ' collect_default_db = False res = check._get_instance_metrics(False, collect_default_db) assert (" AND psd.datname not ilike 'postgres'" in res['query']) collect_default_db = True res = check._get_instance_metrics(False, collect_default_db) assert (" AND psd.datname not ilike 'postgres'" not in res['query'])<|docstring|>Test the contents of the query string with different `collect_default_db` values<|endoftext|>
c4812ff2b5220e7c3801b5137ecfb85f4bfd3393155ff20bd18df729ac65a7b2
def test_get_version(check): '\n Test _get_version() to make sure the check is properly parsing Postgres versions\n ' db = MagicMock() check.db = db db.cursor().fetchone.return_value = ['9.5.3'] assert (check._get_version() == [9, 5, 3]) db.cursor().fetchone.return_value = ['10.2'] check._clean_state() assert (check._get_version() == [10, 2]) db.cursor().fetchone.return_value = ['11beta3'] check._clean_state() assert (check._get_version() == [11, (- 1), 3]) db.cursor().fetchone.return_value = ['11rc1'] check._clean_state() assert (check._get_version() == [11, (- 1), 1]) db.cursor().fetchone.return_value = ['11nightly3'] check._clean_state() assert (check._get_version() == [11, (- 1), 3])
Test _get_version() to make sure the check is properly parsing Postgres versions
postgres/tests/test_unit.py
test_get_version
Nordstrom/integrations-core
0
python
def test_get_version(check): '\n \n ' db = MagicMock() check.db = db db.cursor().fetchone.return_value = ['9.5.3'] assert (check._get_version() == [9, 5, 3]) db.cursor().fetchone.return_value = ['10.2'] check._clean_state() assert (check._get_version() == [10, 2]) db.cursor().fetchone.return_value = ['11beta3'] check._clean_state() assert (check._get_version() == [11, (- 1), 3]) db.cursor().fetchone.return_value = ['11rc1'] check._clean_state() assert (check._get_version() == [11, (- 1), 1]) db.cursor().fetchone.return_value = ['11nightly3'] check._clean_state() assert (check._get_version() == [11, (- 1), 3])
def test_get_version(check): '\n \n ' db = MagicMock() check.db = db db.cursor().fetchone.return_value = ['9.5.3'] assert (check._get_version() == [9, 5, 3]) db.cursor().fetchone.return_value = ['10.2'] check._clean_state() assert (check._get_version() == [10, 2]) db.cursor().fetchone.return_value = ['11beta3'] check._clean_state() assert (check._get_version() == [11, (- 1), 3]) db.cursor().fetchone.return_value = ['11rc1'] check._clean_state() assert (check._get_version() == [11, (- 1), 1]) db.cursor().fetchone.return_value = ['11nightly3'] check._clean_state() assert (check._get_version() == [11, (- 1), 3])<|docstring|>Test _get_version() to make sure the check is properly parsing Postgres versions<|endoftext|>
e7551c4b37db51e2f0b6e63f19b58e0044a8023c1412caad6b309a6d073cd3ce
def test_is_above(check): '\n Test _is_above() to make sure the check is properly determining order of versions\n ' db = MagicMock() check.db = db db.cursor().fetchone.return_value = ['10.5.4'] assert check._is_above([9, 5, 4]) assert (check._is_above([11, 0, 0]) is False) db.cursor().fetchone.return_value = ['10.5.4'] assert check._is_above([10, 4, 4]) assert (check._is_above([10, 6, 4]) is False) db.cursor().fetchone.return_value = ['10.5.4'] assert check._is_above([10, 5, 3]) assert (check._is_above([10, 5, 5]) is False) db.cursor().fetchone.return_value = ['10.5.4'] assert check._is_above([10, 5, 4]) db.cursor().fetchone.return_value = ['11beta4'] check._clean_state() assert check._is_above([11, (- 1), 3]) db.cursor().fetchone.return_value = ['11.0.0'] check._clean_state() assert check._is_above([11, (- 1), 3]) db.cursor().fetchone.return_value = ['10.0'] check._clean_state() assert check._is_above([10, 0]) assert check._is_above([10, 0, 0]) assert (check._is_above([10, 0, 1]) is False) db.cursor().fetchone.return_value = 'foo' check._clean_state() assert (check._is_above([10, 0]) is False)
Test _is_above() to make sure the check is properly determining order of versions
postgres/tests/test_unit.py
test_is_above
Nordstrom/integrations-core
0
python
def test_is_above(check): '\n \n ' db = MagicMock() check.db = db db.cursor().fetchone.return_value = ['10.5.4'] assert check._is_above([9, 5, 4]) assert (check._is_above([11, 0, 0]) is False) db.cursor().fetchone.return_value = ['10.5.4'] assert check._is_above([10, 4, 4]) assert (check._is_above([10, 6, 4]) is False) db.cursor().fetchone.return_value = ['10.5.4'] assert check._is_above([10, 5, 3]) assert (check._is_above([10, 5, 5]) is False) db.cursor().fetchone.return_value = ['10.5.4'] assert check._is_above([10, 5, 4]) db.cursor().fetchone.return_value = ['11beta4'] check._clean_state() assert check._is_above([11, (- 1), 3]) db.cursor().fetchone.return_value = ['11.0.0'] check._clean_state() assert check._is_above([11, (- 1), 3]) db.cursor().fetchone.return_value = ['10.0'] check._clean_state() assert check._is_above([10, 0]) assert check._is_above([10, 0, 0]) assert (check._is_above([10, 0, 1]) is False) db.cursor().fetchone.return_value = 'foo' check._clean_state() assert (check._is_above([10, 0]) is False)
def test_is_above(check): '\n \n ' db = MagicMock() check.db = db db.cursor().fetchone.return_value = ['10.5.4'] assert check._is_above([9, 5, 4]) assert (check._is_above([11, 0, 0]) is False) db.cursor().fetchone.return_value = ['10.5.4'] assert check._is_above([10, 4, 4]) assert (check._is_above([10, 6, 4]) is False) db.cursor().fetchone.return_value = ['10.5.4'] assert check._is_above([10, 5, 3]) assert (check._is_above([10, 5, 5]) is False) db.cursor().fetchone.return_value = ['10.5.4'] assert check._is_above([10, 5, 4]) db.cursor().fetchone.return_value = ['11beta4'] check._clean_state() assert check._is_above([11, (- 1), 3]) db.cursor().fetchone.return_value = ['11.0.0'] check._clean_state() assert check._is_above([11, (- 1), 3]) db.cursor().fetchone.return_value = ['10.0'] check._clean_state() assert check._is_above([10, 0]) assert check._is_above([10, 0, 0]) assert (check._is_above([10, 0, 1]) is False) db.cursor().fetchone.return_value = 'foo' check._clean_state() assert (check._is_above([10, 0]) is False)<|docstring|>Test _is_above() to make sure the check is properly determining order of versions<|endoftext|>
e6f123f73845c48cc79181a4324373c8f95c4e82643b972879a4a6ddd0b71c4c
def test_malformed_get_custom_queries(check): '\n Test early-exit conditions for _get_custom_queries()\n ' check.log = MagicMock() db = MagicMock() check.db = db malformed_custom_query = {} check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('custom query field `metric_prefix` is required') check.log.reset_mock() malformed_custom_query['metric_prefix'] = 'postgresql' check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('custom query field `query` is required for metric_prefix `%s`', malformed_custom_query['metric_prefix']) check.log.reset_mock() malformed_custom_query['query'] = 'SELECT num FROM sometable' check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('custom query field `columns` is required for metric_prefix `%s`', malformed_custom_query['metric_prefix']) check.log.reset_mock() malformed_custom_query_column = {} malformed_custom_query['columns'] = [malformed_custom_query_column] db.cursor().execute.side_effect = psycopg2.ProgrammingError('FOO') check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('Error executing query for metric_prefix %s: %s', malformed_custom_query['metric_prefix'], 'FOO') check.log.reset_mock() malformed_custom_query_column = {} malformed_custom_query['columns'] = [malformed_custom_query_column] query_return = ['num', 1337] db.cursor().execute.side_effect = None db.cursor().__iter__.return_value = iter([query_return]) check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('query result for metric_prefix %s: expected %s columns, got %s', malformed_custom_query['metric_prefix'], len(malformed_custom_query['columns']), len(query_return)) check.log.reset_mock() db.cursor().__iter__.return_value = iter([[]]) check._get_custom_queries([], [malformed_custom_query]) check.log.debug.assert_called_with('query result for metric_prefix %s: returned an empty result', malformed_custom_query['metric_prefix']) check.log.reset_mock() malformed_custom_query_column['some_key'] = 'some value' db.cursor().__iter__.return_value = iter([[1337]]) check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('column field `name` is required for metric_prefix `%s`', malformed_custom_query['metric_prefix']) check.log.reset_mock() malformed_custom_query_column['name'] = 'num' db.cursor().__iter__.return_value = iter([[1337]]) check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('column field `type` is required for column `%s` of metric_prefix `%s`', malformed_custom_query_column['name'], malformed_custom_query['metric_prefix']) check.log.reset_mock() malformed_custom_query_column['type'] = 'invalid_type' db.cursor().__iter__.return_value = iter([[1337]]) check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('invalid submission method `%s` for column `%s` of metric_prefix `%s`', malformed_custom_query_column['type'], malformed_custom_query_column['name'], malformed_custom_query['metric_prefix']) check.log.reset_mock() malformed_custom_query_column['type'] = 'gauge' query_return = MagicMock() query_return.__float__.side_effect = ValueError('Mocked exception') db.cursor().__iter__.return_value = iter([[query_return]]) check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('non-numeric value `%s` for metric column `%s` of metric_prefix `%s`', query_return, malformed_custom_query_column['name'], malformed_custom_query['metric_prefix'])
Test early-exit conditions for _get_custom_queries()
postgres/tests/test_unit.py
test_malformed_get_custom_queries
Nordstrom/integrations-core
0
python
def test_malformed_get_custom_queries(check): '\n \n ' check.log = MagicMock() db = MagicMock() check.db = db malformed_custom_query = {} check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('custom query field `metric_prefix` is required') check.log.reset_mock() malformed_custom_query['metric_prefix'] = 'postgresql' check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('custom query field `query` is required for metric_prefix `%s`', malformed_custom_query['metric_prefix']) check.log.reset_mock() malformed_custom_query['query'] = 'SELECT num FROM sometable' check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('custom query field `columns` is required for metric_prefix `%s`', malformed_custom_query['metric_prefix']) check.log.reset_mock() malformed_custom_query_column = {} malformed_custom_query['columns'] = [malformed_custom_query_column] db.cursor().execute.side_effect = psycopg2.ProgrammingError('FOO') check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('Error executing query for metric_prefix %s: %s', malformed_custom_query['metric_prefix'], 'FOO') check.log.reset_mock() malformed_custom_query_column = {} malformed_custom_query['columns'] = [malformed_custom_query_column] query_return = ['num', 1337] db.cursor().execute.side_effect = None db.cursor().__iter__.return_value = iter([query_return]) check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('query result for metric_prefix %s: expected %s columns, got %s', malformed_custom_query['metric_prefix'], len(malformed_custom_query['columns']), len(query_return)) check.log.reset_mock() db.cursor().__iter__.return_value = iter([[]]) check._get_custom_queries([], [malformed_custom_query]) check.log.debug.assert_called_with('query result for metric_prefix %s: returned an empty result', malformed_custom_query['metric_prefix']) check.log.reset_mock() malformed_custom_query_column['some_key'] = 'some value' db.cursor().__iter__.return_value = iter([[1337]]) check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('column field `name` is required for metric_prefix `%s`', malformed_custom_query['metric_prefix']) check.log.reset_mock() malformed_custom_query_column['name'] = 'num' db.cursor().__iter__.return_value = iter([[1337]]) check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('column field `type` is required for column `%s` of metric_prefix `%s`', malformed_custom_query_column['name'], malformed_custom_query['metric_prefix']) check.log.reset_mock() malformed_custom_query_column['type'] = 'invalid_type' db.cursor().__iter__.return_value = iter([[1337]]) check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('invalid submission method `%s` for column `%s` of metric_prefix `%s`', malformed_custom_query_column['type'], malformed_custom_query_column['name'], malformed_custom_query['metric_prefix']) check.log.reset_mock() malformed_custom_query_column['type'] = 'gauge' query_return = MagicMock() query_return.__float__.side_effect = ValueError('Mocked exception') db.cursor().__iter__.return_value = iter([[query_return]]) check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('non-numeric value `%s` for metric column `%s` of metric_prefix `%s`', query_return, malformed_custom_query_column['name'], malformed_custom_query['metric_prefix'])
def test_malformed_get_custom_queries(check): '\n \n ' check.log = MagicMock() db = MagicMock() check.db = db malformed_custom_query = {} check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('custom query field `metric_prefix` is required') check.log.reset_mock() malformed_custom_query['metric_prefix'] = 'postgresql' check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('custom query field `query` is required for metric_prefix `%s`', malformed_custom_query['metric_prefix']) check.log.reset_mock() malformed_custom_query['query'] = 'SELECT num FROM sometable' check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('custom query field `columns` is required for metric_prefix `%s`', malformed_custom_query['metric_prefix']) check.log.reset_mock() malformed_custom_query_column = {} malformed_custom_query['columns'] = [malformed_custom_query_column] db.cursor().execute.side_effect = psycopg2.ProgrammingError('FOO') check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('Error executing query for metric_prefix %s: %s', malformed_custom_query['metric_prefix'], 'FOO') check.log.reset_mock() malformed_custom_query_column = {} malformed_custom_query['columns'] = [malformed_custom_query_column] query_return = ['num', 1337] db.cursor().execute.side_effect = None db.cursor().__iter__.return_value = iter([query_return]) check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('query result for metric_prefix %s: expected %s columns, got %s', malformed_custom_query['metric_prefix'], len(malformed_custom_query['columns']), len(query_return)) check.log.reset_mock() db.cursor().__iter__.return_value = iter([[]]) check._get_custom_queries([], [malformed_custom_query]) check.log.debug.assert_called_with('query result for metric_prefix %s: returned an empty result', malformed_custom_query['metric_prefix']) check.log.reset_mock() malformed_custom_query_column['some_key'] = 'some value' db.cursor().__iter__.return_value = iter([[1337]]) check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('column field `name` is required for metric_prefix `%s`', malformed_custom_query['metric_prefix']) check.log.reset_mock() malformed_custom_query_column['name'] = 'num' db.cursor().__iter__.return_value = iter([[1337]]) check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('column field `type` is required for column `%s` of metric_prefix `%s`', malformed_custom_query_column['name'], malformed_custom_query['metric_prefix']) check.log.reset_mock() malformed_custom_query_column['type'] = 'invalid_type' db.cursor().__iter__.return_value = iter([[1337]]) check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('invalid submission method `%s` for column `%s` of metric_prefix `%s`', malformed_custom_query_column['type'], malformed_custom_query_column['name'], malformed_custom_query['metric_prefix']) check.log.reset_mock() malformed_custom_query_column['type'] = 'gauge' query_return = MagicMock() query_return.__float__.side_effect = ValueError('Mocked exception') db.cursor().__iter__.return_value = iter([[query_return]]) check._get_custom_queries([], [malformed_custom_query]) check.log.error.assert_called_once_with('non-numeric value `%s` for metric column `%s` of metric_prefix `%s`', query_return, malformed_custom_query_column['name'], malformed_custom_query['metric_prefix'])<|docstring|>Test early-exit conditions for _get_custom_queries()<|endoftext|>
ad747399eae06b631aec286d3008393c9898d0847cdada130c3a80ea9ce8ee62
@patch('cloudcourseproject.src.model.User') def test_create_token_should_get_user_model_and_return_token_with_username(user_mock): ' create_tocken should create tocken out of model ' user_mock.user_id = 1 token = create_token(user_mock) decoded_token = jwt.decode(token, config['JWT_SECRET_KEY'], algorithms=[ALGORITHM]) assert (decoded_token['user_id'] == user_mock.user_id)
create_tocken should create tocken out of model
tests/test_auth.py
test_create_token_should_get_user_model_and_return_token_with_username
dmitrijbozhkov/cloudcourseproject
0
python
@patch('cloudcourseproject.src.model.User') def test_create_token_should_get_user_model_and_return_token_with_username(user_mock): ' ' user_mock.user_id = 1 token = create_token(user_mock) decoded_token = jwt.decode(token, config['JWT_SECRET_KEY'], algorithms=[ALGORITHM]) assert (decoded_token['user_id'] == user_mock.user_id)
@patch('cloudcourseproject.src.model.User') def test_create_token_should_get_user_model_and_return_token_with_username(user_mock): ' ' user_mock.user_id = 1 token = create_token(user_mock) decoded_token = jwt.decode(token, config['JWT_SECRET_KEY'], algorithms=[ALGORITHM]) assert (decoded_token['user_id'] == user_mock.user_id)<|docstring|>create_tocken should create tocken out of model<|endoftext|>
0ff6217fbe1202962ec9f084db1dc5cc8168c2256577af30e488843d905e7e7d
def test_identify_should_raise_error_if_token_has_no_user_id_in_payload(): " identify gets token and raises InvalidTokenError if it doesn't fave user_id field" invalid_token = jwt.encode({'iat': datetime.utcnow(), 'exp': (datetime.utcnow() + timedelta(days=config['JWT_ACCESS_TOKEN_EXPIRES']))}, config['JWT_SECRET_KEY'], algorithm=ALGORITHM) with pytest.raises(InvalidTokenError) as errorval: identify(invalid_token) assert isinstance(errorval.value, InvalidTokenError)
identify gets token and raises InvalidTokenError if it doesn't fave user_id field
tests/test_auth.py
test_identify_should_raise_error_if_token_has_no_user_id_in_payload
dmitrijbozhkov/cloudcourseproject
0
python
def test_identify_should_raise_error_if_token_has_no_user_id_in_payload(): " " invalid_token = jwt.encode({'iat': datetime.utcnow(), 'exp': (datetime.utcnow() + timedelta(days=config['JWT_ACCESS_TOKEN_EXPIRES']))}, config['JWT_SECRET_KEY'], algorithm=ALGORITHM) with pytest.raises(InvalidTokenError) as errorval: identify(invalid_token) assert isinstance(errorval.value, InvalidTokenError)
def test_identify_should_raise_error_if_token_has_no_user_id_in_payload(): " " invalid_token = jwt.encode({'iat': datetime.utcnow(), 'exp': (datetime.utcnow() + timedelta(days=config['JWT_ACCESS_TOKEN_EXPIRES']))}, config['JWT_SECRET_KEY'], algorithm=ALGORITHM) with pytest.raises(InvalidTokenError) as errorval: identify(invalid_token) assert isinstance(errorval.value, InvalidTokenError)<|docstring|>identify gets token and raises InvalidTokenError if it doesn't fave user_id field<|endoftext|>
322be970f591bc47e73dfee9cb6f744a518be4ac71f17358dfb9bed7fbeb6713
def test_identify_should_raise_tokenexpired_if_token_is_expired(): ' identify should raise TokenExpired if exp is lower, than iat ' invalid_token = jwt.encode({'user_id': 123, 'iat': datetime.utcnow(), 'exp': (datetime.utcnow() - timedelta(days=config['JWT_ACCESS_TOKEN_EXPIRES']))}, config['JWT_SECRET_KEY'], algorithm=ALGORITHM) with pytest.raises(TokenExpired) as errorval: identify(invalid_token) assert isinstance(errorval.value, TokenExpired)
identify should raise TokenExpired if exp is lower, than iat
tests/test_auth.py
test_identify_should_raise_tokenexpired_if_token_is_expired
dmitrijbozhkov/cloudcourseproject
0
python
def test_identify_should_raise_tokenexpired_if_token_is_expired(): ' ' invalid_token = jwt.encode({'user_id': 123, 'iat': datetime.utcnow(), 'exp': (datetime.utcnow() - timedelta(days=config['JWT_ACCESS_TOKEN_EXPIRES']))}, config['JWT_SECRET_KEY'], algorithm=ALGORITHM) with pytest.raises(TokenExpired) as errorval: identify(invalid_token) assert isinstance(errorval.value, TokenExpired)
def test_identify_should_raise_tokenexpired_if_token_is_expired(): ' ' invalid_token = jwt.encode({'user_id': 123, 'iat': datetime.utcnow(), 'exp': (datetime.utcnow() - timedelta(days=config['JWT_ACCESS_TOKEN_EXPIRES']))}, config['JWT_SECRET_KEY'], algorithm=ALGORITHM) with pytest.raises(TokenExpired) as errorval: identify(invalid_token) assert isinstance(errorval.value, TokenExpired)<|docstring|>identify should raise TokenExpired if exp is lower, than iat<|endoftext|>
7c7da42a58731a9bdcc2fd1e3a4fe874f57b5a33a6f67a38852aff765c6f3c7b
def users_userId_address_addressId_get(self, addressId, userId, headers=None, query_params=None, content_type='application/json'): '\n get address id\n of address\n It is method for GET /users/{userId}/address/{addressId}\n ' if (query_params is None): query_params = {} uri = ((((self.client.base_url + '/users/') + userId) + '/address/') + addressId) return self.client.get(uri, None, headers, query_params, content_type)
get address id of address It is method for GET /users/{userId}/address/{addressId}
codegen/python/fixtures/client/requests_client/users_service.py
users_userId_address_addressId_get
feeltheajf/go-raml
0
python
def users_userId_address_addressId_get(self, addressId, userId, headers=None, query_params=None, content_type='application/json'): '\n get address id\n of address\n It is method for GET /users/{userId}/address/{addressId}\n ' if (query_params is None): query_params = {} uri = ((((self.client.base_url + '/users/') + userId) + '/address/') + addressId) return self.client.get(uri, None, headers, query_params, content_type)
def users_userId_address_addressId_get(self, addressId, userId, headers=None, query_params=None, content_type='application/json'): '\n get address id\n of address\n It is method for GET /users/{userId}/address/{addressId}\n ' if (query_params is None): query_params = {} uri = ((((self.client.base_url + '/users/') + userId) + '/address/') + addressId) return self.client.get(uri, None, headers, query_params, content_type)<|docstring|>get address id of address It is method for GET /users/{userId}/address/{addressId}<|endoftext|>
298b7ecc19e7cc3cca05332c580334ee21234d205625669be6f117dfdc277702
def users_userId_delete(self, userId, headers=None, query_params=None, content_type='application/json'): '\n It is method for DELETE /users/{userId}\n ' if (query_params is None): query_params = {} uri = ((self.client.base_url + '/users/') + userId) return self.client.delete(uri, None, headers, query_params, content_type)
It is method for DELETE /users/{userId}
codegen/python/fixtures/client/requests_client/users_service.py
users_userId_delete
feeltheajf/go-raml
0
python
def users_userId_delete(self, userId, headers=None, query_params=None, content_type='application/json'): '\n \n ' if (query_params is None): query_params = {} uri = ((self.client.base_url + '/users/') + userId) return self.client.delete(uri, None, headers, query_params, content_type)
def users_userId_delete(self, userId, headers=None, query_params=None, content_type='application/json'): '\n \n ' if (query_params is None): query_params = {} uri = ((self.client.base_url + '/users/') + userId) return self.client.delete(uri, None, headers, query_params, content_type)<|docstring|>It is method for DELETE /users/{userId}<|endoftext|>
ab026e9bf0bcd23349600dc80be203b56dcfd3b0d512df5886bae4dd74cc554a
def getuserid(self, userId, headers=None, query_params=None, content_type='application/json'): '\n get id\n It is method for GET /users/{userId}\n ' if (query_params is None): query_params = {} uri = ((self.client.base_url + '/users/') + userId) return self.client.get(uri, None, headers, query_params, content_type)
get id It is method for GET /users/{userId}
codegen/python/fixtures/client/requests_client/users_service.py
getuserid
feeltheajf/go-raml
0
python
def getuserid(self, userId, headers=None, query_params=None, content_type='application/json'): '\n get id\n It is method for GET /users/{userId}\n ' if (query_params is None): query_params = {} uri = ((self.client.base_url + '/users/') + userId) return self.client.get(uri, None, headers, query_params, content_type)
def getuserid(self, userId, headers=None, query_params=None, content_type='application/json'): '\n get id\n It is method for GET /users/{userId}\n ' if (query_params is None): query_params = {} uri = ((self.client.base_url + '/users/') + userId) return self.client.get(uri, None, headers, query_params, content_type)<|docstring|>get id It is method for GET /users/{userId}<|endoftext|>
119a3d7a9edd574cd263998d99ca6fbf5776168c7f6bab457211e55b309333a3
def users_userId_post(self, data, userId, headers=None, query_params=None, content_type='application/json'): '\n post without request body\n It is method for POST /users/{userId}\n ' if (query_params is None): query_params = {} uri = ((self.client.base_url + '/users/') + userId) return self.client.post(uri, data, headers, query_params, content_type)
post without request body It is method for POST /users/{userId}
codegen/python/fixtures/client/requests_client/users_service.py
users_userId_post
feeltheajf/go-raml
0
python
def users_userId_post(self, data, userId, headers=None, query_params=None, content_type='application/json'): '\n post without request body\n It is method for POST /users/{userId}\n ' if (query_params is None): query_params = {} uri = ((self.client.base_url + '/users/') + userId) return self.client.post(uri, data, headers, query_params, content_type)
def users_userId_post(self, data, userId, headers=None, query_params=None, content_type='application/json'): '\n post without request body\n It is method for POST /users/{userId}\n ' if (query_params is None): query_params = {} uri = ((self.client.base_url + '/users/') + userId) return self.client.post(uri, data, headers, query_params, content_type)<|docstring|>post without request body It is method for POST /users/{userId}<|endoftext|>
e21919c49ac2ca3875cc5cea6fbf7d10e750aaf9fce34a747715d4b328492819
def users_delete(self, data, headers=None, query_params=None, content_type='application/json'): '\n delete with request body\n It is method for DELETE /users\n ' if (query_params is None): query_params = {} uri = (self.client.base_url + '/users') return self.client.delete(uri, data, headers, query_params, content_type)
delete with request body It is method for DELETE /users
codegen/python/fixtures/client/requests_client/users_service.py
users_delete
feeltheajf/go-raml
0
python
def users_delete(self, data, headers=None, query_params=None, content_type='application/json'): '\n delete with request body\n It is method for DELETE /users\n ' if (query_params is None): query_params = {} uri = (self.client.base_url + '/users') return self.client.delete(uri, data, headers, query_params, content_type)
def users_delete(self, data, headers=None, query_params=None, content_type='application/json'): '\n delete with request body\n It is method for DELETE /users\n ' if (query_params is None): query_params = {} uri = (self.client.base_url + '/users') return self.client.delete(uri, data, headers, query_params, content_type)<|docstring|>delete with request body It is method for DELETE /users<|endoftext|>
e7b1cf810cb8a767dd78a2b7e574cc760565301e4ddd56436a81cca6a6b857dd
def get_users(self, data, headers=None, query_params=None, content_type='application/json'): '\n First line of comment.\n Second line of comment\n It is method for GET /users\n ' if (query_params is None): query_params = {} uri = (self.client.base_url + '/users') return self.client.get(uri, data, headers, query_params, content_type)
First line of comment. Second line of comment It is method for GET /users
codegen/python/fixtures/client/requests_client/users_service.py
get_users
feeltheajf/go-raml
0
python
def get_users(self, data, headers=None, query_params=None, content_type='application/json'): '\n First line of comment.\n Second line of comment\n It is method for GET /users\n ' if (query_params is None): query_params = {} uri = (self.client.base_url + '/users') return self.client.get(uri, data, headers, query_params, content_type)
def get_users(self, data, headers=None, query_params=None, content_type='application/json'): '\n First line of comment.\n Second line of comment\n It is method for GET /users\n ' if (query_params is None): query_params = {} uri = (self.client.base_url + '/users') return self.client.get(uri, data, headers, query_params, content_type)<|docstring|>First line of comment. Second line of comment It is method for GET /users<|endoftext|>