text_prompt
stringlengths
100
17.7k
code_prompt
stringlengths
7
9.86k
<SYSTEM_TASK:> Converts a string written in short algebraic form, the color <END_TASK> <USER_TASK:> Description: def short_alg(algebraic_string, input_color, position): """ Converts a string written in short algebraic form, the color of the side whose turn it is, and the corresponding position into a complete move that can be played. If no moves match, None is returned. Examples: e4, Nf3, exd5, Qxf3, 00, 000, e8=Q :type: algebraic_string: str :type: input_color: Color :type: position: Board """
return make_legal(incomplete_alg(algebraic_string, input_color, position), position)
<SYSTEM_TASK:> remove explicit hydrogen if possible <END_TASK> <USER_TASK:> Description: def implicify_hydrogens(self): """ remove explicit hydrogen if possible :return: number of removed hydrogens """
explicit = defaultdict(list) c = 0 for n, atom in self.atoms(): if atom.element == 'H': for m in self.neighbors(n): if self._node[m].element != 'H': explicit[m].append(n) for n, h in explicit.items(): atom = self._node[n] len_h = len(h) for i in range(len_h, 0, -1): hi = h[:i] if atom.get_implicit_h([y.order for x, y in self._adj[n].items() if x not in hi]) == i: for x in hi: self.remove_node(x) c += 1 break self.flush_cache() return c
<SYSTEM_TASK:> add explicit hydrogens to atoms <END_TASK> <USER_TASK:> Description: def explicify_hydrogens(self): """ add explicit hydrogens to atoms :return: number of added atoms """
tmp = [] for n, atom in self.atoms(): if atom.element != 'H': for _ in range(atom.get_implicit_h([x.order for x in self._adj[n].values()])): tmp.append(n) for n in tmp: self.add_bond(n, self.add_atom(H), Bond()) self.flush_cache() return len(tmp)
<SYSTEM_TASK:> Turn a date into a datetime at midnight. <END_TASK> <USER_TASK:> Description: def to_datetime(date): """Turn a date into a datetime at midnight. """
return datetime.datetime.combine(date, datetime.datetime.min.time())
<SYSTEM_TASK:> Yield an IssueSnapshot for each time the issue size changed <END_TASK> <USER_TASK:> Description: def iter_size_changes(self, issue): """Yield an IssueSnapshot for each time the issue size changed """
# Find the first size change, if any try: size_changes = list(filter(lambda h: h.field == 'Story Points', itertools.chain.from_iterable([c.items for c in issue.changelog.histories]))) except AttributeError: return # If we have no size changes and the issue has a current size then a size must have ben specified at issue creation time. # Return the size at creation time try: current_size = issue.fields.__dict__[self.fields['StoryPoints']] except: current_size = None size = (size_changes[0].fromString) if len(size_changes) else current_size # Issue was created yield IssueSizeSnapshot( change=None, key=issue.key, date=dateutil.parser.parse(issue.fields.created), size=size ) for change in issue.changelog.histories: change_date = dateutil.parser.parse(change.created) #sizes = list(filter(lambda i: i.field == 'Story Points', change.items)) #is_resolved = (sizes[-1].to is not None) if len(sizes) > 0 else is_resolved for item in change.items: if item.field == 'Story Points': # StoryPoints value was changed size = item.toString yield IssueSizeSnapshot( change=item.field, key=issue.key, date=change_date, size=size )
<SYSTEM_TASK:> Yield an IssueSnapshot for each time the issue changed status or <END_TASK> <USER_TASK:> Description: def iter_changes(self, issue, include_resolution_changes=True): """Yield an IssueSnapshot for each time the issue changed status or resolution """
is_resolved = False # Find the first status change, if any try: status_changes = list(filter( lambda h: h.field == 'status', itertools.chain.from_iterable([c.items for c in issue.changelog.histories]))) except AttributeError: return last_status = status_changes[0].fromString if len(status_changes) > 0 else issue.fields.status.name last_resolution = None # Issue was created yield IssueSnapshot( change=None, key=issue.key, date=dateutil.parser.parse(issue.fields.created), status=last_status, resolution=None, is_resolved=is_resolved ) for change in issue.changelog.histories: change_date = dateutil.parser.parse(change.created) resolutions = list(filter(lambda i: i.field == 'resolution', change.items)) is_resolved = (resolutions[-1].to is not None) if len(resolutions) > 0 else is_resolved for item in change.items: if item.field == 'status': # Status was changed last_status = item.toString yield IssueSnapshot( change=item.field, key=issue.key, date=change_date, status=last_status, resolution=last_resolution, is_resolved=is_resolved ) elif item.field == 'resolution': last_resolution = item.toString if include_resolution_changes: yield IssueSnapshot( change=item.field, key=issue.key, date=change_date, status=last_status, resolution=last_resolution, is_resolved=is_resolved )
<SYSTEM_TASK:> Return a list of issues with changelog metadata. <END_TASK> <USER_TASK:> Description: def find_issues(self, criteria={}, jql=None, order='KEY ASC', verbose=False, changelog=True): """Return a list of issues with changelog metadata. Searches for the `issue_types`, `project`, `valid_resolutions` and 'jql_filter' set in the passed-in `criteria` object. Pass a JQL string to further qualify the query results. """
query = [] if criteria.get('project', False): query.append('project IN (%s)' % ', '.join(['"%s"' % p for p in criteria['project']])) if criteria.get('issue_types', False): query.append('issueType IN (%s)' % ', '.join(['"%s"' % t for t in criteria['issue_types']])) if criteria.get('valid_resolutions', False): query.append('(resolution IS EMPTY OR resolution IN (%s))' % ', '.join(['"%s"' % r for r in criteria['valid_resolutions']])) if criteria.get('jql_filter') is not None: query.append('(%s)' % criteria['jql_filter']) if jql is not None: query.append('(%s)' % jql) queryString = "%s ORDER BY %s" % (' AND '.join(query), order,) if verbose: print("Fetching issues with query:", queryString) fromRow=0 issues = [] while True: try: if changelog: pageofissues = self.jira.search_issues(queryString, expand='changelog', maxResults=self.settings['max_results'],startAt=fromRow) else: pageofissues = self.jira.search_issues(queryString, maxResults=self.settings['max_results'],startAt=fromRow) fromRow = fromRow + int(self.settings['max_results']) issues += pageofissues if verbose: print("Got %s lines per jira query from result starting at line number %s " % (self.settings['max_results'], fromRow)) if len(pageofissues)==0: break except JIRAError as e: print("Jira query error with: {}\n{}".format(queryString, e)) return [] if verbose: print("Fetched", len(issues), "issues") return issues
<SYSTEM_TASK:> Lists existing catalogs respect to ui view template format <END_TASK> <USER_TASK:> Description: def list_catalogs(self): """ Lists existing catalogs respect to ui view template format """
_form = CatalogSelectForm(current=self.current) _form.set_choices_of('catalog', [(i, i) for i in fixture_bucket.get_keys()]) self.form_out(_form)
<SYSTEM_TASK:> Get existing catalog and fill the form with the model data. <END_TASK> <USER_TASK:> Description: def get_catalog(self): """ Get existing catalog and fill the form with the model data. If given key not found as catalog, it generates an empty catalog data form. """
catalog_data = fixture_bucket.get(self.input['form']['catalog']) # define add or edit based on catalog data exists add_or_edit = "Edit" if catalog_data.exists else "Add" # generate form catalog_edit_form = CatalogEditForm( current=self.current, title='%s: %s' % (add_or_edit, self.input['form']['catalog'])) # add model data to form if catalog_data.exists: if type(catalog_data.data) == list: # if catalog data is an array it means no other language of value defined, therefor the value is turkish for key, data in enumerate(catalog_data.data): catalog_edit_form.CatalogDatas(catalog_key=key or "0", en='', tr=data) if type(catalog_data.data) == dict: for key, data in catalog_data.data.items(): catalog_edit_form.CatalogDatas(catalog_key=key, en=data['en'], tr=data['tr']) else: catalog_edit_form.CatalogDatas(catalog_key="0", en='', tr='') self.form_out(catalog_edit_form) # schema key for get back what key will be saved, used in save_catalog form self.output["object_key"] = self.input['form']['catalog']
<SYSTEM_TASK:> Saves the catalog data to given key <END_TASK> <USER_TASK:> Description: def save_catalog(self): """ Saves the catalog data to given key Cancels if the cmd is cancel Notifies user with the process. """
if self.input["cmd"] == 'save_catalog': try: edited_object = dict() for i in self.input["form"]["CatalogDatas"]: edited_object[i["catalog_key"]] = {"en": i["en"], "tr": i["tr"]} newobj = fixture_bucket.get(self.input["object_key"]) newobj.data = edited_object newobj.store() # notify user by passing notify in output object self.output["notify"] = "catalog: %s successfully updated." % self.input[ "object_key"] except: raise HTTPError(500, "Form object could not be saved") if self.input["cmd"] == 'cancel': self.output["notify"] = "catalog: %s canceled." % self.input["object_key"]
<SYSTEM_TASK:> Merge multiple dictionaries, keeping the truthy values in case of key collisions. <END_TASK> <USER_TASK:> Description: def merge_truthy(*dicts): """Merge multiple dictionaries, keeping the truthy values in case of key collisions. Accepts any number of dictionaries, or any other object that returns a 2-tuple of key and value pairs when its `.items()` method is called. If a key exists in multiple dictionaries passed to this function, the values from the latter dictionary is kept. If the value of the latter dictionary does not evaluate to True, then the value of the previous dictionary is kept. >>> merge_truthy({'a': 1, 'c': 4}, {'a': None, 'b': 2}, {'b': 3}) {'a': 1, 'b': 3, 'c': 4} """
merged = {} for d in dicts: for k, v in d.items(): merged[k] = v or merged.get(k, v) return merged
<SYSTEM_TASK:> Perform the version upgrade on the database. <END_TASK> <USER_TASK:> Description: def perform(self): """Perform the version upgrade on the database. """
db_versions = self.table.versions() version = self.version if (version.is_processed(db_versions) and not self.config.force_version == self.version.number): self.log( u'version {} is already installed'.format(version.number) ) return self.start() try: self._perform_version(version) except Exception: if sys.version_info < (3, 4): msg = traceback.format_exc().decode('utf8', errors='ignore') else: msg = traceback.format_exc() error = u'\n'.join(self.logs + [u'\n', msg]) self.table.record_log(version.number, error) raise self.finish()
<SYSTEM_TASK:> Inner method for version upgrade. <END_TASK> <USER_TASK:> Description: def _perform_version(self, version): """Inner method for version upgrade. Not intended for standalone use. This method performs the actual version upgrade with all the pre, post operations and addons upgrades. :param version: The migration version to upgrade to :type version: Instance of Version class """
if version.is_noop(): self.log(u'version {} is a noop'.format(version.number)) else: self.log(u'execute base pre-operations') for operation in version.pre_operations(): operation.execute(self.log) if self.config.mode: self.log(u'execute %s pre-operations' % self.config.mode) for operation in version.pre_operations(mode=self.config.mode): operation.execute(self.log) self.perform_addons() self.log(u'execute base post-operations') for operation in version.post_operations(): operation.execute(self.log) if self.config.mode: self.log(u'execute %s post-operations' % self.config.mode) for operation in version.post_operations(self.config.mode): operation.execute(self.log)
<SYSTEM_TASK:> open websocket connection <END_TASK> <USER_TASK:> Description: def _do_upgrade(self): """ open websocket connection """
self.current.output['cmd'] = 'upgrade' self.current.output['user_id'] = self.current.user_id self.terminate_existing_login() self.current.user.bind_private_channel(self.current.session.sess_id) user_sess = UserSessionID(self.current.user_id) user_sess.set(self.current.session.sess_id) self.current.user.is_online(True) # Clean up the locale from session to allow it to be re-read from the user preferences after login for k in translation.DEFAULT_PREFS.keys(): self.current.session[k] = ''
<SYSTEM_TASK:> Authenticate user with given credentials. <END_TASK> <USER_TASK:> Description: def do_view(self): """ Authenticate user with given credentials. Connects user's queue and exchange """
self.current.output['login_process'] = True self.current.task_data['login_successful'] = False if self.current.is_auth: self._do_upgrade() else: try: auth_result = self.current.auth.authenticate( self.current.input['username'], self.current.input['password']) self.current.task_data['login_successful'] = auth_result if auth_result: self._do_upgrade() except ObjectDoesNotExist: self.current.log.exception("Wrong username or another error occurred") pass except: raise if self.current.output.get('cmd') != 'upgrade': self.current.output['status_code'] = 403 else: KeepAlive(self.current.user_id).reset()
<SYSTEM_TASK:> match each pattern to each molecule. <END_TASK> <USER_TASK:> Description: def __get_mapping(self, structures): """ match each pattern to each molecule. if all patterns matches with all molecules return generator of all possible mapping. :param structures: disjoint molecules :return: mapping generator """
for c in permutations(structures, len(self.__patterns)): for m in product(*(x.get_substructure_mapping(y, limit=0) for x, y in zip(self.__patterns, c))): mapping = {} for i in m: mapping.update(i) if mapping: yield mapping
<SYSTEM_TASK:> return the cached value or default if it can't be found <END_TASK> <USER_TASK:> Description: def get(self, default=None): """ return the cached value or default if it can't be found :param default: default value :return: cached value """
d = cache.get(self.key) return ((json.loads(d.decode('utf-8')) if self.serialize else d) if d is not None else default)
<SYSTEM_TASK:> set cache value <END_TASK> <USER_TASK:> Description: def set(self, val, lifetime=None): """ set cache value :param val: any picklable object :param lifetime: exprition time in sec :return: val """
cache.set(self.key, (json.dumps(val) if self.serialize else val), lifetime or settings.DEFAULT_CACHE_EXPIRE_TIME) return val
<SYSTEM_TASK:> Removes given item from the list. <END_TASK> <USER_TASK:> Description: def remove_item(self, val): """ Removes given item from the list. Args: val: Item Returns: Cache backend response. """
return cache.lrem(self.key, json.dumps(val))
<SYSTEM_TASK:> Removes all keys of this namespace <END_TASK> <USER_TASK:> Description: def flush(cls, *args): """ Removes all keys of this namespace Without args, clears all keys starting with cls.PREFIX if called with args, clears keys starting with given cls.PREFIX + args Args: *args: Arbitrary number of arguments. Returns: List of removed keys. """
return _remove_keys([], [(cls._make_key(args) if args else cls.PREFIX) + '*'])
<SYSTEM_TASK:> Deletes session if keepalive request expired <END_TASK> <USER_TASK:> Description: def update_or_expire_session(self): """ Deletes session if keepalive request expired otherwise updates the keepalive timestamp value """
if not hasattr(self, 'key'): return now = time.time() timestamp = float(self.get() or 0) or now sess_id = self.sess_id or UserSessionID(self.user_id).get() if sess_id and now - timestamp > self.SESSION_EXPIRE_TIME: Session(sess_id).delete() return False else: self.set(now) return True
<SYSTEM_TASK:> Sends a message to possible owners of the current workflows <END_TASK> <USER_TASK:> Description: def send_message_for_lane_change(sender, **kwargs): """ Sends a message to possible owners of the current workflows next lane. Args: **kwargs: ``current`` and ``possible_owners`` are required. sender (User): User object """
current = kwargs['current'] owners = kwargs['possible_owners'] if 'lane_change_invite' in current.task_data: msg_context = current.task_data.pop('lane_change_invite') else: msg_context = DEFAULT_LANE_CHANGE_INVITE_MSG wfi = WFCache(current).get_instance() # Deletion of used passive task invitation which belongs to previous lane. TaskInvitation.objects.filter(instance=wfi, role=current.role, wf_name=wfi.wf.name).delete() today = datetime.today() for recipient in owners: inv = TaskInvitation( instance=wfi, role=recipient, wf_name=wfi.wf.name, progress=30, start_date=today, finish_date=today + timedelta(15) ) inv.title = current.task_data.get('INVITATION_TITLE') or wfi.wf.title inv.save() # try to send notification, if it fails go on try: recipient.send_notification(title=msg_context['title'], message="%s %s" % (wfi.wf.title, msg_context['body']), typ=1, # info url='', sender=sender ) except: # todo: specify which exception pass
<SYSTEM_TASK:> Encrypts password of the user. <END_TASK> <USER_TASK:> Description: def set_password(sender, **kwargs): """ Encrypts password of the user. """
if sender.model_class.__name__ == 'User': usr = kwargs['object'] if not usr.password.startswith('$pbkdf2'): usr.set_password(usr.password) usr.save()
<SYSTEM_TASK:> Main screen for channel management. <END_TASK> <USER_TASK:> Description: def channel_list(self): """ Main screen for channel management. Channels listed and operations can be chosen on the screen. If there is an error message like non-choice, it is shown here. """
if self.current.task_data.get('msg', False): if self.current.task_data.get('target_channel_key', False): self.current.output['msgbox'] = {'type': 'info', "title": _(u"Successful Operation"), "msg": self.current.task_data['msg']} del self.current.task_data['msg'] else: self.show_warning_messages() self.current.task_data['new_channel'] = False _form = ChannelListForm(title=_(u'Public Channel List'), help_text=CHANNEL_CHOICE_TEXT) for channel in Channel.objects.filter(typ=15): owner_name = channel.owner.username _form.ChannelList(choice=False, name=channel.name, owner=owner_name, key=channel.key) _form.new_channel = fields.Button(_(u"Merge At New Channel"), cmd="create_new_channel") _form.existing_channel = fields.Button(_(u"Merge With An Existing Channel"), cmd="choose_existing_channel") _form.find_chosen_channel = fields.Button(_(u"Split Channel"), cmd="find_chosen_channel") self.form_out(_form)
<SYSTEM_TASK:> It controls errors. If there is an error, <END_TASK> <USER_TASK:> Description: def channel_choice_control(self): """ It controls errors. If there is an error, returns channel list screen with error message. """
self.current.task_data['control'], self.current.task_data['msg'] \ = self.selection_error_control(self.input['form']) if self.current.task_data['control']: self.current.task_data['option'] = self.input['cmd'] self.current.task_data['split_operation'] = False keys, names = self.return_selected_form_items(self.input['form']['ChannelList']) self.current.task_data['chosen_channels'] = keys self.current.task_data['chosen_channels_names'] = names
<SYSTEM_TASK:> Features of new channel are specified like channel's name, owner etc. <END_TASK> <USER_TASK:> Description: def create_new_channel(self): """ Features of new channel are specified like channel's name, owner etc. """
self.current.task_data['new_channel'] = True _form = NewChannelForm(Channel(), current=self.current) _form.title = _(u"Specify Features of New Channel to Create") _form.forward = fields.Button(_(u"Create"), flow="find_target_channel") self.form_out(_form)
<SYSTEM_TASK:> It saves new channel according to specified channel features. <END_TASK> <USER_TASK:> Description: def save_new_channel(self): """ It saves new channel according to specified channel features. """
form_info = self.input['form'] channel = Channel(typ=15, name=form_info['name'], description=form_info['description'], owner_id=form_info['owner_id']) channel.blocking_save() self.current.task_data['target_channel_key'] = channel.key
<SYSTEM_TASK:> It is a channel choice list and chosen channels <END_TASK> <USER_TASK:> Description: def choose_existing_channel(self): """ It is a channel choice list and chosen channels at previous step shouldn't be on the screen. """
if self.current.task_data.get('msg', False): self.show_warning_messages() _form = ChannelListForm() _form.title = _(u"Choose a Channel Which Will Be Merged With Chosen Channels") for channel in Channel.objects.filter(typ=15).exclude( key__in=self.current.task_data['chosen_channels']): owner_name = channel.owner.username _form.ChannelList(choice=False, name=channel.name, owner=owner_name, key=channel.key) _form.choose = fields.Button(_(u"Choose")) self.form_out(_form)
<SYSTEM_TASK:> It controls errors. It generates an error message <END_TASK> <USER_TASK:> Description: def existing_choice_control(self): """ It controls errors. It generates an error message if zero or more than one channels are selected. """
self.current.task_data['existing'] = False self.current.task_data['msg'] = _(u"You should choose just one channel to do operation.") keys, names = self.return_selected_form_items(self.input['form']['ChannelList']) if len(keys) == 1: self.current.task_data['existing'] = True self.current.task_data['target_channel_key'] = keys[0]
<SYSTEM_TASK:> A channel can be splitted to new channel or other existing channel. <END_TASK> <USER_TASK:> Description: def split_channel(self): """ A channel can be splitted to new channel or other existing channel. It creates subscribers list as selectable to moved. """
if self.current.task_data.get('msg', False): self.show_warning_messages() self.current.task_data['split_operation'] = True channel = Channel.objects.get(self.current.task_data['chosen_channels'][0]) _form = SubscriberListForm(title=_(u'Choose Subscribers to Migrate')) for subscriber in Subscriber.objects.filter(channel=channel): subscriber_name = subscriber.user.username _form.SubscriberList(choice=False, name=subscriber_name, key=subscriber.key) _form.new_channel = fields.Button(_(u"Move to a New Channel"), cmd="create_new_channel") _form.existing_channel = fields.Button(_(u"Move to an Existing Channel"), cmd="choose_existing_channel") self.form_out(_form)
<SYSTEM_TASK:> It controls subscribers choice and generates <END_TASK> <USER_TASK:> Description: def subscriber_choice_control(self): """ It controls subscribers choice and generates error message if there is a non-choice. """
self.current.task_data['option'] = None self.current.task_data['chosen_subscribers'], names = self.return_selected_form_items( self.input['form']['SubscriberList']) self.current.task_data[ 'msg'] = "You should choose at least one subscriber for migration operation." if self.current.task_data['chosen_subscribers']: self.current.task_data['option'] = self.input['cmd'] del self.current.task_data['msg']
<SYSTEM_TASK:> Channels and theirs subscribers are moved <END_TASK> <USER_TASK:> Description: def move_complete_channel(self): """ Channels and theirs subscribers are moved completely to new channel or existing channel. """
to_channel = Channel.objects.get(self.current.task_data['target_channel_key']) chosen_channels = self.current.task_data['chosen_channels'] chosen_channels_names = self.current.task_data['chosen_channels_names'] with BlockSave(Subscriber, query_dict={'channel_id': to_channel.key}): for s in Subscriber.objects.filter(channel_id__in=chosen_channels, typ=15): s.channel = to_channel s.save() with BlockDelete(Message): Message.objects.filter(channel_id__in=chosen_channels, typ=15).delete() with BlockDelete(Channel): Channel.objects.filter(key__in=chosen_channels).delete() self.current.task_data[ 'msg'] = _(u"Chosen channels(%s) have been merged to '%s' channel successfully.") % \ (', '.join(chosen_channels_names), to_channel.name)
<SYSTEM_TASK:> After splitting operation, only chosen subscribers <END_TASK> <USER_TASK:> Description: def move_chosen_subscribers(self): """ After splitting operation, only chosen subscribers are moved to new channel or existing channel. """
from_channel = Channel.objects.get(self.current.task_data['chosen_channels'][0]) to_channel = Channel.objects.get(self.current.task_data['target_channel_key']) with BlockSave(Subscriber, query_dict={'channel_id': to_channel.key}): for subscriber in Subscriber.objects.filter( key__in=self.current.task_data['chosen_subscribers']): subscriber.channel = to_channel subscriber.save() if self.current.task_data['new_channel']: self.copy_and_move_messages(from_channel, to_channel) self.current.task_data[ 'msg'] = _(u"Chosen subscribers and messages of them migrated from '%s' channel to " u"'%s' channel successfully.") % (from_channel.name, to_channel.name)
<SYSTEM_TASK:> While splitting channel and moving chosen subscribers to new channel, <END_TASK> <USER_TASK:> Description: def copy_and_move_messages(from_channel, to_channel): """ While splitting channel and moving chosen subscribers to new channel, old channel's messages are copied and moved to new channel. Args: from_channel (Channel object): move messages from channel to_channel (Channel object): move messages to channel """
with BlockSave(Message, query_dict={'channel_id': to_channel.key}): for message in Message.objects.filter(channel=from_channel, typ=15): message.key = '' message.channel = to_channel message.save()
<SYSTEM_TASK:> It shows incorrect operations or successful operation messages. <END_TASK> <USER_TASK:> Description: def show_warning_messages(self, title=_(u"Incorrect Operation"), box_type='warning'): """ It shows incorrect operations or successful operation messages. Args: title (string): title of message box box_type (string): type of message box (warning, info) """
msg = self.current.task_data['msg'] self.current.output['msgbox'] = {'type': box_type, "title": title, "msg": msg} del self.current.task_data['msg']
<SYSTEM_TASK:> It returns chosen keys list from a given form. <END_TASK> <USER_TASK:> Description: def return_selected_form_items(form_info): """ It returns chosen keys list from a given form. Args: form_info: serialized list of dict form data Returns: selected_keys(list): Chosen keys list selected_names(list): Chosen channels' or subscribers' names. """
selected_keys = [] selected_names = [] for chosen in form_info: if chosen['choice']: selected_keys.append(chosen['key']) selected_names.append(chosen['name']) return selected_keys, selected_names
<SYSTEM_TASK:> It controls the selection from the form according <END_TASK> <USER_TASK:> Description: def selection_error_control(self, form_info): """ It controls the selection from the form according to the operations, and returns an error message if it does not comply with the rules. Args: form_info: Channel or subscriber form from the user Returns: True or False error message """
keys, names = self.return_selected_form_items(form_info['ChannelList']) chosen_channels_number = len(keys) if form_info['new_channel'] and chosen_channels_number < 2: return False, _( u"You should choose at least two channel to merge operation at a new channel.") elif form_info['existing_channel'] and chosen_channels_number == 0: return False, _( u"You should choose at least one channel to merge operation with existing channel.") elif form_info['find_chosen_channel'] and chosen_channels_number != 1: return False, _(u"You should choose one channel for split operation.") return True, None
<SYSTEM_TASK:> Yields the sequence of prime numbers via the Sieve of Eratosthenes. <END_TASK> <USER_TASK:> Description: def _eratosthenes(): """Yields the sequence of prime numbers via the Sieve of Eratosthenes."""
d = {} # map each composite integer to its first-found prime factor for q in count(2): # q gets 2, 3, 4, 5, ... ad infinitum p = d.pop(q, None) if p is None: # q not a key in D, so q is prime, therefore, yield it yield q # mark q squared as not-prime (with q as first-found prime factor) d[q * q] = q else: # let x <- smallest (N*p)+q which wasn't yet known to be composite # we just learned x is composite, with p first-found prime factor, # since p is the first-found prime factor of q -- find and mark it x = p + q while x in d: x += p d[x] = p
<SYSTEM_TASK:> Morgan like algorithm for graph nodes ordering <END_TASK> <USER_TASK:> Description: def atoms_order(self): """ Morgan like algorithm for graph nodes ordering :return: dict of atom-weight pairs """
if not len(self): # for empty containers return {} elif len(self) == 1: # optimize single atom containers return dict.fromkeys(self, 2) params = {n: (int(node), tuple(sorted(int(edge) for edge in self._adj[n].values()))) for n, node in self.atoms()} newlevels = {} countprime = iter(primes) weights = {x: newlevels.get(y) or newlevels.setdefault(y, next(countprime)) for x, y in sorted(params.items(), key=itemgetter(1))} tries = len(self) * 4 numb = len(set(weights.values())) stab = 0 while tries: oldnumb = numb neweights = {} countprime = iter(primes) # weights[n] ** 2 NEED for differentiation of molecules like A-B or any other complete graphs. tmp = {n: reduce(mul, (weights[x] for x in m), weights[n] ** 2) for n, m in self._adj.items()} weights = {x: (neweights.get(y) or neweights.setdefault(y, next(countprime))) for x, y in sorted(tmp.items(), key=itemgetter(1))} numb = len(set(weights.values())) if numb == len(self): # each atom now unique break elif numb == oldnumb: x = Counter(weights.values()) if x[min(x)] > 1: if stab == 3: break elif stab >= 2: break stab += 1 elif stab: stab = 0 tries -= 1 if not tries and numb < oldnumb: warning('morgan. number of attempts exceeded. uniqueness has decreased. next attempt will be made') tries = 1 else: warning('morgan. number of attempts exceeded') return weights
<SYSTEM_TASK:> Return a list of fields' mappings <END_TASK> <USER_TASK:> Description: def get_field_cache(self, cache_type='es'): """Return a list of fields' mappings"""
if cache_type == 'kibana': try: search_results = urlopen(self.get_url).read().decode('utf-8') except HTTPError: # as e: # self.pr_err("get_field_cache(kibana), HTTPError: %s" % e) return [] index_pattern = json.loads(search_results) # Results look like: {"_index":".kibana","_type":"index-pattern","_id":"aaa*","_version":6,"found":true,"_source":{"title":"aaa*","fields":"<what we want>"}} # noqa fields_str = index_pattern['_source']['fields'] return json.loads(fields_str) elif cache_type == 'es' or cache_type.startswith('elastic'): search_results = urlopen(self.es_get_url).read().decode('utf-8') es_mappings = json.loads(search_results) # Results look like: {"<index_name>":{"mappings":{"<doc_type>":{"<field_name>":{"full_name":"<field_name>","mapping":{"<sub-field_name>":{"type":"date","index_name":"<sub-field_name>","boost":1.0,"index":"not_analyzed","store":false,"doc_values":false,"term_vector":"no","norms":{"enabled":false},"index_options":"docs","index_analyzer":"_date/16","search_analyzer":"_date/max","postings_format":"default","doc_values_format":"default","similarity":"default","fielddata":{},"ignore_malformed":false,"coerce":true,"precision_step":16,"format":"dateOptionalTime","null_value":null,"include_in_all":false,"numeric_resolution":"milliseconds","locale":""}}}, # noqa # now convert the mappings into the .kibana format field_cache = [] for (index_name, val) in iteritems(es_mappings): if index_name != self.index: # only get non-'.kibana' indices # self.pr_dbg("index: %s" % index_name) m_dict = es_mappings[index_name]['mappings'] # self.pr_dbg('m_dict %s' % m_dict) mappings = self.get_index_mappings(m_dict) # self.pr_dbg('mappings %s' % mappings) field_cache.extend(mappings) field_cache = self.dedup_field_cache(field_cache) return field_cache self.pr_err("Unknown cache type: %s" % cache_type) return None
<SYSTEM_TASK:> Where field_cache is a list of fields' mappings <END_TASK> <USER_TASK:> Description: def post_field_cache(self, field_cache): """Where field_cache is a list of fields' mappings"""
index_pattern = self.field_cache_to_index_pattern(field_cache) # self.pr_dbg("request/post: %s" % index_pattern) resp = requests.post(self.post_url, data=index_pattern).text # resp = {"_index":".kibana","_type":"index-pattern","_id":"aaa*","_version":1,"created":true} # noqa resp = json.loads(resp) return 0
<SYSTEM_TASK:> Assert minimum set of fields in cache, does not validate contents <END_TASK> <USER_TASK:> Description: def check_mapping(self, m): """Assert minimum set of fields in cache, does not validate contents"""
if 'name' not in m: self.pr_dbg("Missing %s" % "name") return False # self.pr_dbg("Checking %s" % m['name']) for x in ['analyzed', 'indexed', 'type', 'scripted', 'count']: if x not in m or m[x] == "": self.pr_dbg("Missing %s" % x) self.pr_dbg("Full %s" % m) return False if 'doc_values' not in m or m['doc_values'] == "": if not m['name'].startswith('_'): self.pr_dbg("Missing %s" % "doc_values") return False m['doc_values'] = False return True
<SYSTEM_TASK:> Converts ES field mappings to .kibana field mappings <END_TASK> <USER_TASK:> Description: def get_field_mappings(self, field): """Converts ES field mappings to .kibana field mappings"""
retdict = {} retdict['indexed'] = False retdict['analyzed'] = False for (key, val) in iteritems(field): if key in self.mappings: if (key == 'type' and (val == "long" or val == "integer" or val == "double" or val == "float")): val = "number" # self.pr_dbg("\t\t\tkey: %s" % key) # self.pr_dbg("\t\t\t\tval: %s" % val) retdict[key] = val if key == 'index' and val != "no": retdict['indexed'] = True # self.pr_dbg("\t\t\tkey: %s" % key) # self.pr_dbg("\t\t\t\tval: %s" % val) if val == "analyzed": retdict['analyzed'] = True return retdict
<SYSTEM_TASK:> Test if k_cache is incomplete <END_TASK> <USER_TASK:> Description: def is_kibana_cache_incomplete(self, es_cache, k_cache): """Test if k_cache is incomplete Assume k_cache is always correct, but could be missing new fields that es_cache has """
# convert list into dict, with each item's ['name'] as key k_dict = {} for field in k_cache: # self.pr_dbg("field: %s" % field) k_dict[field['name']] = field for ign_f in self.mappings_ignore: k_dict[field['name']][ign_f] = 0 es_dict = {} for field in es_cache: es_dict[field['name']] = field for ign_f in self.mappings_ignore: es_dict[field['name']][ign_f] = 0 es_set = set(es_dict.keys()) k_set = set(k_dict.keys()) # reasons why kibana cache could be incomplete: # k_dict is missing keys that are within es_dict # We don't care if k has keys that es doesn't # es {1,2} k {1,2,3}; intersection {1,2}; len(es-{}) 0 # es {1,2} k {1,2}; intersection {1,2}; len(es-{}) 0 # es {1,2} k {}; intersection {}; len(es-{}) 2 # es {1,2} k {1}; intersection {1}; len(es-{}) 1 # es {2,3} k {1}; intersection {}; len(es-{}) 2 # es {2,3} k {1,2}; intersection {2}; len(es-{}) 1 return len(es_set - k_set.intersection(es_set)) > 0
<SYSTEM_TASK:> Convert list into a data structure we can query easier <END_TASK> <USER_TASK:> Description: def list_to_compare_dict(self, list_form): """Convert list into a data structure we can query easier"""
compare_dict = {} for field in list_form: if field['name'] in compare_dict: self.pr_dbg("List has duplicate field %s:\n%s" % (field['name'], compare_dict[field['name']])) if compare_dict[field['name']] != field: self.pr_dbg("And values are different:\n%s" % field) return None compare_dict[field['name']] = field for ign_f in self.mappings_ignore: compare_dict[field['name']][ign_f] = 0 return compare_dict
<SYSTEM_TASK:> Verify original is subset of replica <END_TASK> <USER_TASK:> Description: def compare_field_caches(self, replica, original): """Verify original is subset of replica"""
if original is None: original = [] if replica is None: replica = [] self.pr_dbg("Comparing orig with %s fields to replica with %s fields" % (len(original), len(replica))) # convert list into dict, with each item's ['name'] as key orig = self.list_to_compare_dict(original) if orig is None: self.pr_dbg("Original has duplicate fields") return 1 repl = self.list_to_compare_dict(replica) if repl is None: self.pr_dbg("Replica has duplicate fields") return 1 # search orig for each item in repl # if any items in repl not within orig or vice versa, then complain # make sure contents of each item match orig_found = {} for (key, field) in iteritems(repl): field_name = field['name'] if field_name not in orig: self.pr_dbg("Replica has field not found in orig %s: %s" % (field_name, field)) return 1 orig_found[field_name] = True if orig[field_name] != field: self.pr_dbg("Field in replica doesn't match orig:") self.pr_dbg("orig:%s\nrepl:%s" % (orig[field_name], field)) return 1 unfound = set(orig_found.keys()) - set(repl.keys()) if len(unfound) > 0: self.pr_dbg("Orig contains fields that were not in replica") self.pr_dbg('%s' % unfound) return 1 # We don't care about case when replica has more fields than orig # unfound = set(repl.keys()) - set(orig_found.keys()) # if len(unfound) > 0: # self.pr_dbg("Replica contains fields that were not in orig") # self.pr_dbg('%s' % unfound) # return 1 self.pr_dbg("Original matches replica") return 0
<SYSTEM_TASK:> starts a deamon thread for a given target function and arguments. <END_TASK> <USER_TASK:> Description: def start_daemon_thread(target, args=()): """starts a deamon thread for a given target function and arguments."""
th = Thread(target=target, args=args) th.daemon = True th.start() return th
<SYSTEM_TASK:> returns all the keys in a dictionary. <END_TASK> <USER_TASK:> Description: def serialize_dict_keys(d, prefix=""): """returns all the keys in a dictionary. >>> serialize_dict_keys({"a": {"b": {"c": 1, "b": 2} } }) ['a', 'a.b', 'a.b.c', 'a.b.b'] """
keys = [] for k, v in d.iteritems(): fqk = '%s%s' % (prefix, k) keys.append(fqk) if isinstance(v, dict): keys.extend(serialize_dict_keys(v, prefix="%s." % fqk)) return keys
<SYSTEM_TASK:> Finds if square on the board is occupied by a ``Piece`` <END_TASK> <USER_TASK:> Description: def contains_opposite_color_piece(self, square, position): """ Finds if square on the board is occupied by a ``Piece`` belonging to the opponent. :type: square: Location :type: position: Board :rtype: bool """
return not position.is_square_empty(square) and \ position.piece_at_square(square).color != self.color
<SYSTEM_TASK:> Mark a message as translateable, and translate it. <END_TASK> <USER_TASK:> Description: def gettext(message, domain=DEFAULT_DOMAIN): """Mark a message as translateable, and translate it. All messages in the application that are translateable should be wrapped with this function. When importing this function, it should be renamed to '_'. For example: .. code-block:: python from zengine.lib.translation import gettext as _ print(_('Hello, world!')) 'Merhaba, dünya!' For the messages that will be formatted later on, instead of using the position-based formatting, key-based formatting should be used. This gives the translator an idea what the variables in the format are going to be, and makes it possible for the translator to reorder the variables. For example: .. code-block:: python name, number = 'Elizabeth', 'II' _('Queen %(name)s %(number)s') % {'name': name, 'number': number} 'Kraliçe II. Elizabeth' The message returned by this function depends on the language of the current user. If this function is called before a language is installed (which is normally done by ZEngine when the user connects), this function will simply return the message without modification. If there are messages containing unicode characters, in Python 2 these messages must be marked as unicode. Otherwise, python will not be able to correctly match these messages with translations. For example: .. code-block:: python print(_('Café')) 'Café' print(_(u'Café')) 'Kahve' Args: message (basestring, unicode): The input message. domain (basestring): The domain of the message. Defaults to 'messages', which is the domain where all application messages should be located. Returns: unicode: The translated message. """
if six.PY2: return InstalledLocale._active_catalogs[domain].ugettext(message) else: return InstalledLocale._active_catalogs[domain].gettext(message)
<SYSTEM_TASK:> Mark a message as translatable, but delay the translation until the message is used. <END_TASK> <USER_TASK:> Description: def gettext_lazy(message, domain=DEFAULT_DOMAIN): """Mark a message as translatable, but delay the translation until the message is used. Sometimes, there are some messages that need to be translated, but the translation can't be done at the point the message itself is written. For example, the names of the fields in a Model can't be translated at the point they are written, otherwise the translation would be done when the file is imported, long before a user even connects. To avoid this, `gettext_lazy` should be used. For example: .. code-block:: python from zengine.lib.translation import gettext_lazy, InstalledLocale from pyoko import model, fields class User(model.Model): name = fields.String(gettext_lazy('User Name')) print(User.name.title) 'User Name' InstalledLocale.install_language('tr') print(User.name.title) 'Kullanıcı Adı' Args: message (basestring, unicode): The input message. domain (basestring): The domain of the message. Defaults to 'messages', which is the domain where all application messages should be located. Returns: unicode: The translated message, with the translation itself being delayed until the text is actually used. """
return LazyProxy(gettext, message, domain=domain, enable_cache=False)
<SYSTEM_TASK:> Mark a message as translateable, and translate it considering plural forms. <END_TASK> <USER_TASK:> Description: def ngettext(singular, plural, n, domain=DEFAULT_DOMAIN): """Mark a message as translateable, and translate it considering plural forms. Some messages may need to change based on a number. For example, consider a message like the following: .. code-block:: python def alert_msg(msg_count): print( 'You have %d %s' % (msg_count, 'message' if msg_count == 1 else 'messages')) alert_msg(1) 'You have 1 message' alert_msg(5) 'You have 5 messages' To translate this message, you can use ngettext to consider the plural forms: .. code-block:: python from zengine.lib.translation import ngettext def alert_msg(msg_count): print(ngettext('You have %(count)d message', 'You have %(count)d messages', msg_count) % {'count': msg_count}) alert_msg(1) '1 mesajınız var' alert_msg(5) '5 mesajlarınız var' When doing formatting, both singular and plural forms of the message should have the exactly same variables. Args: singular (unicode): The singular form of the message. plural (unicode): The plural form of the message. n (int): The number that is used to decide which form should be used. domain (basestring): The domain of the message. Defaults to 'messages', which is the domain where all application messages should be located. Returns: unicode: The correct pluralization, translated. """
if six.PY2: return InstalledLocale._active_catalogs[domain].ungettext(singular, plural, n) else: return InstalledLocale._active_catalogs[domain].ngettext(singular, plural, n)
<SYSTEM_TASK:> Mark a message with plural forms translateable, and delay the translation <END_TASK> <USER_TASK:> Description: def ngettext_lazy(singular, plural, n, domain=DEFAULT_DOMAIN): """Mark a message with plural forms translateable, and delay the translation until the message is used. Works the same was a `ngettext`, with a delaying functionality similiar to `gettext_lazy`. Args: singular (unicode): The singular form of the message. plural (unicode): The plural form of the message. n (int): The number that is used to decide which form should be used. domain (basestring): The domain of the message. Defaults to 'messages', which is the domain where all application messages should be located. Returns: unicode: The correct pluralization, with the translation being delayed until the message is used. """
return LazyProxy(ngettext, singular, plural, n, domain=domain, enable_cache=False)
<SYSTEM_TASK:> Install the translations for language specified by `language_code`. <END_TASK> <USER_TASK:> Description: def install_language(cls, language_code): """Install the translations for language specified by `language_code`. If we don't have translations for this language, then the default language will be used. If the language specified is already installed, then this is a no-op. """
# Skip if the language is already installed if language_code == cls.language: return try: cls._active_catalogs = cls._translation_catalogs[language_code] cls.language = language_code log.debug('Installed language %s', language_code) except KeyError: default = settings.DEFAULT_LANG log.warning('Unknown language %s, falling back to %s', language_code, default) cls._active_catalogs = cls._translation_catalogs[default] cls.language = default
<SYSTEM_TASK:> Install the locale specified by `language_code`, for localizations of type `locale_type`. <END_TASK> <USER_TASK:> Description: def install_locale(cls, locale_code, locale_type): """Install the locale specified by `language_code`, for localizations of type `locale_type`. If we can't perform localized formatting for the specified locale, then the default localization format will be used. If the locale specified is already installed for the selected type, then this is a no-op. """
# Skip if the locale is already installed if locale_code == getattr(cls, locale_type): return try: # We create a Locale instance to see if the locale code is supported locale = Locale(locale_code) log.debug('Installed locale %s', locale_code) except UnknownLocaleError: default = settings.DEFAULT_LOCALIZATION_FORMAT log.warning('Unknown locale %s, falling back to %s', locale_code, default) locale = Locale(default) setattr(cls, locale_type, locale.language)
<SYSTEM_TASK:> Returns the entry specified by index <END_TASK> <USER_TASK:> Description: def get_by_index(self, index): """ Returns the entry specified by index Note that the table is 1-based ie an index of 0 is invalid. This is due to the fact that a zero value index signals that a completely unindexed header follows. The entry will either be from the static table or the dynamic table depending on the value of index. """
index -= 1 if 0 <= index < len(CocaineHeaders.STATIC_TABLE): return CocaineHeaders.STATIC_TABLE[index] index -= len(CocaineHeaders.STATIC_TABLE) if 0 <= index < len(self.dynamic_entries): return self.dynamic_entries[index] raise InvalidTableIndex("Invalid table index %d" % index)
<SYSTEM_TASK:> Adds a new entry to the table <END_TASK> <USER_TASK:> Description: def add(self, name, value): """ Adds a new entry to the table We reduce the table size if the entry will make the table size greater than maxsize. """
# We just clear the table if the entry is too big size = table_entry_size(name, value) if size > self._maxsize: self.dynamic_entries.clear() self._current_size = 0 # Add new entry if the table actually has a size elif self._maxsize > 0: self.dynamic_entries.appendleft((name, value)) self._current_size += size self._shrink()
<SYSTEM_TASK:> Searches the table for the entry specified by name <END_TASK> <USER_TASK:> Description: def search(self, name, value): """ Searches the table for the entry specified by name and value Returns one of the following: - ``None``, no match at all - ``(index, name, None)`` for partial matches on name only. - ``(index, name, value)`` for perfect matches. """
partial = None header_name_search_result = CocaineHeaders.STATIC_TABLE_MAPPING.get(name) if header_name_search_result: index = header_name_search_result[1].get(value) if index is not None: return index, name, value partial = (header_name_search_result[0], name, None) offset = len(CocaineHeaders.STATIC_TABLE) for (i, (n, v)) in enumerate(self.dynamic_entries): if n == name: if v == value: return i + offset + 1, n, v elif partial is None: partial = (i + offset + 1, n, None) return partial
<SYSTEM_TASK:> Shrinks the dynamic table to be at or below maxsize <END_TASK> <USER_TASK:> Description: def _shrink(self): """ Shrinks the dynamic table to be at or below maxsize """
cursize = self._current_size while cursize > self._maxsize: name, value = self.dynamic_entries.pop() cursize -= table_entry_size(name, value) self._current_size = cursize
<SYSTEM_TASK:> Safely print a unicode string <END_TASK> <USER_TASK:> Description: def safe_print(ustring, errors='replace', **kwargs): """ Safely print a unicode string """
encoding = sys.stdout.encoding or 'utf-8' if sys.version_info[0] == 3: print(ustring, **kwargs) else: bytestr = ustring.encode(encoding, errors=errors) print(bytestr, **kwargs)
<SYSTEM_TASK:> Creates the view used to edit permissions. <END_TASK> <USER_TASK:> Description: def edit_permissions(self): """Creates the view used to edit permissions. To create the view, data in the following format is passed to the UI in the objects field: .. code-block:: python { "type": "tree-toggle", "action": "set_permission", "tree": [ { "checked": true, "name": "Workflow 1 Name", "id": "workflow1", "children": [ { "checked": true, "name": "Task 1 Name", "id": "workflow1..task1", "children": [] }, { "checked": false, "id": "workflow1..task2", "name": "Task 2 Name", "children": [] } ] }, { "checked": true, "name": "Workflow 2 Name", "id": "workflow2", "children": [ { "checked": true, "name": "Workflow 2 Lane 1 Name", "id": "workflow2.lane1", "children": [ { "checked": true, "name": "Workflow 2 Task 1 Name", "id": "workflow2.lane1.task1", "children": [] }, { "checked": false, "name": "Workflow 2 Task 2 Name", "id": "workflow2.lane1.task2", "children": [] } ] } ] } ] } "type" field denotes that the object is a tree view which has elements that can be toggled. "action" field is the "name" field is the human readable name. "id" field is used to make requests to the backend. "checked" field shows whether the role has the permission or not. "children" field is the sub-permissions of the permission. """
# Get the role that was selected in the CRUD view key = self.current.input['object_id'] self.current.task_data['role_id'] = key role = RoleModel.objects.get(key=key) # Get the cached permission tree, or build a new one if there is none cached # TODO: Add an extra view in case there was no cache, as in 'please wait calculating permissions' permission_tree = self._permission_trees(PermissionModel.objects) # Apply the selected role to the permission tree, setting the 'checked' field # of the permission the role has role_tree = self._apply_role_tree(permission_tree, role) # Apply final formatting, and output the tree to the UI self.output['objects'] = [ { 'type': 'tree-toggle', 'action': 'apply_change', 'trees': self._format_tree_output(role_tree), }, ] self.form_out(PermissionForm())
<SYSTEM_TASK:> Get the cached permission tree, or build a new one if necessary. <END_TASK> <USER_TASK:> Description: def _permission_trees(permissions): """Get the cached permission tree, or build a new one if necessary."""
treecache = PermissionTreeCache() cached = treecache.get() if not cached: tree = PermissionTreeBuilder() for permission in permissions: tree.insert(permission) result = tree.serialize() treecache.set(result) return result return cached
<SYSTEM_TASK:> Traverses the permission tree, returning the permission at given permission path. <END_TASK> <USER_TASK:> Description: def _traverse_tree(tree, path): """Traverses the permission tree, returning the permission at given permission path."""
path_steps = (step for step in path.split('.') if step != '') # Special handling for first step, because the first step isn't under 'objects' first_step = path_steps.next() subtree = tree[first_step] for step in path_steps: subtree = subtree['children'][step] return subtree
<SYSTEM_TASK:> Recursively format all subtrees. <END_TASK> <USER_TASK:> Description: def _format_subtree(self, subtree): """Recursively format all subtrees."""
subtree['children'] = list(subtree['children'].values()) for child in subtree['children']: self._format_subtree(child) return subtree
<SYSTEM_TASK:> Applies changes to the permissions of the role. <END_TASK> <USER_TASK:> Description: def apply_change(self): """Applies changes to the permissions of the role. To make a change to the permission of the role, a request in the following format should be sent: .. code-block:: python { 'change': { 'id': 'workflow2.lane1.task1', 'checked': false }, } The 'id' field of the change is the id of the tree element that was sent to the UI (see `Permissions.edit_permissions`). 'checked' field is the new state of the element. """
changes = self.input['change'] key = self.current.task_data['role_id'] role = RoleModel.objects.get(key=key) for change in changes: permission = PermissionModel.objects.get(code=change['id']) if change['checked'] is True: role.add_permission(permission) else: role.remove_permission(permission) role.save()
<SYSTEM_TASK:> write single molecule into file <END_TASK> <USER_TASK:> Description: def write(self, data): """ write single molecule into file """
m = self._convert_structure(data) self._file.write(self._format_mol(*m)) self._file.write('M END\n') for k, v in data.meta.items(): self._file.write(f'> <{k}>\n{v}\n') self._file.write('$$$$\n')
<SYSTEM_TASK:> If we aren't come to the end of the wf, <END_TASK> <USER_TASK:> Description: def save_workflow_to_cache(self, serialized_wf_instance): """ If we aren't come to the end of the wf, saves the wf state and task_data to cache Task_data items that starts with underscore "_" are treated as local and does not passed to subsequent task steps. """
# self.current.task_data['flow'] = None task_data = self.current.task_data.copy() for k, v in list(task_data.items()): if k.startswith('_'): del task_data[k] if 'cmd' in task_data: del task_data['cmd'] self.wf_state.update({'step': serialized_wf_instance, 'data': task_data, 'name': self.current.workflow_name, 'wf_id': self.workflow_spec.wf_id }) if self.current.lane_id: self.current.pool[self.current.lane_id] = self.current.role.key self.wf_state['pool'] = self.current.pool self.current.log.debug("POOL Content before WF Save: %s" % self.current.pool) self.current.wf_cache.save(self.wf_state)
<SYSTEM_TASK:> loads the serialized wf state and data from cache <END_TASK> <USER_TASK:> Description: def load_workflow_from_cache(self): """ loads the serialized wf state and data from cache updates the self.current.task_data """
if not self.current.new_token: self.wf_state = self.current.wf_cache.get(self.wf_state) self.current.task_data = self.wf_state['data'] self.current.set_client_cmds() self.current.pool = self.wf_state['pool'] return self.wf_state['step']
<SYSTEM_TASK:> Serializes the current WF. <END_TASK> <USER_TASK:> Description: def serialize_workflow(self): """ Serializes the current WF. Returns: WF state data. """
self.workflow.refresh_waiting_tasks() return CompactWorkflowSerializer().serialize_workflow(self.workflow, include_spec=False)
<SYSTEM_TASK:> Tries to find the path of the workflow diagram file <END_TASK> <USER_TASK:> Description: def find_workflow_path(self): """ Tries to find the path of the workflow diagram file in `WORKFLOW_PACKAGES_PATHS`. Returns: Path of the workflow spec file (BPMN diagram) """
for pth in settings.WORKFLOW_PACKAGES_PATHS: path = "%s/%s.bpmn" % (pth, self.current.workflow_name) if os.path.exists(path): return path err_msg = "BPMN file cannot found: %s" % self.current.workflow_name log.error(err_msg) raise RuntimeError(err_msg)
<SYSTEM_TASK:> Generates and caches the workflow spec package from <END_TASK> <USER_TASK:> Description: def get_worfklow_spec(self): """ Generates and caches the workflow spec package from BPMN diagrams that read from disk Returns: SpiffWorkflow Spec object. """
# TODO: convert from in-process to redis based caching if self.current.workflow_name not in self.workflow_spec_cache: # path = self.find_workflow_path() # spec_package = InMemoryPackager.package_in_memory(self.current.workflow_name, path) # spec = BpmnSerializer().deserialize_workflow_spec(spec_package) try: self.current.wf_object = BPMNWorkflow.objects.get(name=self.current.workflow_name) except ObjectDoesNotExist: self.current.wf_object = BPMNWorkflow.objects.get(name='not_found') self.current.task_data['non-existent-wf'] = self.current.workflow_name self.current.workflow_name = 'not_found' xml_content = self.current.wf_object.xml.body spec = ZopsSerializer().deserialize_workflow_spec(xml_content, self.current.workflow_name) spec.wf_id = self.current.wf_object.key self.workflow_spec_cache[self.current.workflow_name] = spec return self.workflow_spec_cache[self.current.workflow_name]
<SYSTEM_TASK:> Calls the real save method if we pass the beggining of the wf <END_TASK> <USER_TASK:> Description: def _save_or_delete_workflow(self): """ Calls the real save method if we pass the beggining of the wf """
if not self.current.task_type.startswith('Start'): if self.current.task_name.startswith('End') and not self.are_we_in_subprocess(): self.wf_state['finished'] = True self.wf_state['finish_date'] = datetime.now().strftime( settings.DATETIME_DEFAULT_FORMAT) if self.current.workflow_name not in settings.EPHEMERAL_WORKFLOWS and not \ self.wf_state['in_external']: wfi = WFCache(self.current).get_instance() TaskInvitation.objects.filter(instance=wfi, role=self.current.role, wf_name=wfi.wf.name).delete() self.current.log.info("Delete WFCache: %s %s" % (self.current.workflow_name, self.current.token)) self.save_workflow_to_cache(self.serialize_workflow())
<SYSTEM_TASK:> Initializes the workflow with given request, response objects and diagram name. <END_TASK> <USER_TASK:> Description: def start_engine(self, **kwargs): """ Initializes the workflow with given request, response objects and diagram name. Args: session: input: workflow_name (str): Name of workflow diagram without ".bpmn" suffix. File must be placed under one of configured :py:attr:`~zengine.settings.WORKFLOW_PACKAGES_PATHS` """
self.current = WFCurrent(**kwargs) self.wf_state = {'in_external': False, 'finished': False} if not self.current.new_token: self.wf_state = self.current.wf_cache.get(self.wf_state) self.current.workflow_name = self.wf_state['name'] # if we have a pre-selected object to work with, # inserting it as current.input['id'] and task_data['object_id'] if 'subject' in self.wf_state: self.current.input['id'] = self.wf_state['subject'] self.current.task_data['object_id'] = self.wf_state['subject'] self.check_for_authentication() self.check_for_permission() self.workflow = self.load_or_create_workflow() # if form data exists in input (user submitted) # put form data in wf task_data if 'form' in self.current.input: form = self.current.input['form'] if 'form_name' in form: self.current.task_data[form['form_name']] = form # in wf diagram, if property is stated as init = True # demanded initial values are assigned and put to cache start_init_values = self.workflow_spec.wf_properties.get('init', 'False') == 'True' if start_init_values: WFInit = get_object_from_path(settings.WF_INITIAL_VALUES)() WFInit.assign_wf_initial_values(self.current) log_msg = ("\n\n::::::::::: ENGINE STARTED :::::::::::\n" "\tWF: %s (Possible) TASK:%s\n" "\tCMD:%s\n" "\tSUBCMD:%s" % ( self.workflow.name, self.workflow.get_tasks(Task.READY), self.current.input.get('cmd'), self.current.input.get('subcmd'))) log.debug(log_msg) sys._zops_wf_state_log = log_msg self.current.workflow = self.workflow
<SYSTEM_TASK:> Logs the state of workflow and content of task_data. <END_TASK> <USER_TASK:> Description: def generate_wf_state_log(self): """ Logs the state of workflow and content of task_data. """
output = '\n- - - - - -\n' output += "WORKFLOW: %s ( %s )" % (self.current.workflow_name.upper(), self.current.workflow.name) output += "\nTASK: %s ( %s )\n" % (self.current.task_name, self.current.task_type) output += "DATA:" for k, v in self.current.task_data.items(): if v: output += "\n\t%s: %s" % (k, v) output += "\nCURRENT:" output += "\n\tACTIVITY: %s" % self.current.activity output += "\n\tPOOL: %s" % self.current.pool output += "\n\tIN EXTERNAL: %s" % self.wf_state['in_external'] output += "\n\tLANE: %s" % self.current.lane_name output += "\n\tTOKEN: %s" % self.current.token sys._zops_wf_state_log = output return output
<SYSTEM_TASK:> Main workflow switcher. <END_TASK> <USER_TASK:> Description: def switch_from_external_to_main_wf(self): """ Main workflow switcher. This method recreates main workflow from `main wf` dict which was set by external workflow swicther previously. """
# in external assigned as True in switch_to_external_wf. # external_wf should finish EndEvent and it's name should be # also EndEvent for switching again to main wf. if self.wf_state['in_external'] and self.current.task_type == 'EndEvent' and \ self.current.task_name == 'EndEvent': # main_wf information was copied in switch_to_external_wf and it takes this information. main_wf = self.wf_state['main_wf'] # main_wf_name is assigned to current workflow name again. self.current.workflow_name = main_wf['name'] # For external WF, check permission and authentication. But after cleaning current task. self._clear_current_task() # check for auth and perm. current task cleared, do against new workflow_name self.check_for_authentication() self.check_for_permission() # WF knowledge is taken for main wf. self.workflow_spec = self.get_worfklow_spec() # WF instance is started again where leave off. self.workflow = self.deserialize_workflow(main_wf['step']) # Current WF is this WF instance. self.current.workflow = self.workflow # in_external is assigned as False self.wf_state['in_external'] = False # finished is assigned as False, because still in progress. self.wf_state['finished'] = False # pool info of main_wf is assigned. self.wf_state['pool'] = main_wf['pool'] self.current.pool = self.wf_state['pool'] # With main_wf is executed. self.run()
<SYSTEM_TASK:> External workflow switcher. <END_TASK> <USER_TASK:> Description: def switch_to_external_wf(self): """ External workflow switcher. This method copies main workflow information into a temporary dict `main_wf` and makes external workflow acting as main workflow. """
# External WF name should be stated at main wf diagram and type should be service task. if (self.current.task_type == 'ServiceTask' and self.current.task.task_spec.type == 'external'): log.debug("Entering to EXTERNAL WF") # Main wf information is copied to main_wf. main_wf = self.wf_state.copy() # workflow name from main wf diagram is assigned to current workflow name. # workflow name must be either in task_data with key 'external_wf' or in main diagram's # topic. self.current.workflow_name = self.current.task_data.pop('external_wf', False) or self.\ current.task.task_spec.topic # For external WF, check permission and authentication. But after cleaning current task. self._clear_current_task() # check for auth and perm. current task cleared, do against new workflow_name self.check_for_authentication() self.check_for_permission() # wf knowledge is taken for external wf. self.workflow_spec = self.get_worfklow_spec() # New WF instance is created for external wf. self.workflow = self.create_workflow() # Current WF is this WF instance. self.current.workflow = self.workflow # main_wf: main wf information. # in_external: it states external wf in progress. # finished: it shows that main wf didn't finish still progress in external wf. self.wf_state = {'main_wf': main_wf, 'in_external': True, 'finished': False}
<SYSTEM_TASK:> Clear tasks related attributes, checks permissions <END_TASK> <USER_TASK:> Description: def _clear_current_task(self): """ Clear tasks related attributes, checks permissions While switching WF to WF, authentication and permissions are checked for new WF. """
self.current.task_name = None self.current.task_type = None self.current.task = None
<SYSTEM_TASK:> Main loop of the workflow engine <END_TASK> <USER_TASK:> Description: def run(self): """ Main loop of the workflow engine - Updates ::class:`~WFCurrent` object. - Checks for Permissions. - Activates all READY tasks. - Runs referenced activities (method calls). - Saves WF states. - Stops if current task is a UserTask or EndTask. - Deletes state object if we finish the WF. """
# FIXME: raise if first task after line change isn't a UserTask # FIXME: raise if last task of a workflow is a UserTask # actually this check should be done at parser is_lane_changed = False while self._should_we_run(): self.check_for_rerun_user_task() task = None for task in self.workflow.get_tasks(state=Task.READY): self.current.old_lane = self.current.lane_name self.current._update_task(task) if self.catch_lane_change(): return self.check_for_permission() self.check_for_lane_permission() self.log_wf_state() self.switch_lang() self.run_activity() self.parse_workflow_messages() self.workflow.complete_task_from_id(self.current.task.id) self._save_or_delete_workflow() self.switch_to_external_wf() if task is None: break self.switch_from_external_to_main_wf() self.current.output['token'] = self.current.token # look for incoming ready task(s) for task in self.workflow.get_tasks(state=Task.READY): self.current._update_task(task) self.catch_lane_change() self.handle_wf_finalization()
<SYSTEM_TASK:> Switch to the language of the current user. <END_TASK> <USER_TASK:> Description: def switch_lang(self): """Switch to the language of the current user. If the current language is already the specified one, nothing will be done. """
locale = self.current.locale translation.InstalledLocale.install_language(locale['locale_language']) translation.InstalledLocale.install_locale(locale['locale_datetime'], 'datetime') translation.InstalledLocale.install_locale(locale['locale_number'], 'number')
<SYSTEM_TASK:> trigger a lane_user_change signal if we switched to a new lane <END_TASK> <USER_TASK:> Description: def catch_lane_change(self): """ trigger a lane_user_change signal if we switched to a new lane and new lane's user is different from current one """
if self.current.lane_name: if self.current.old_lane and self.current.lane_name != self.current.old_lane: # if lane_name not found in pool or it's user different from the current(old) user if (self.current.lane_id not in self.current.pool or self.current.pool[self.current.lane_id] != self.current.user_id): self.current.log.info("LANE CHANGE : %s >> %s" % (self.current.old_lane, self.current.lane_name)) if self.current.lane_auto_sendoff: self.current.sendoff_current_user() self.current.flow_enabled = False if self.current.lane_auto_invite: self.current.invite_other_parties(self._get_possible_lane_owners()) return True
<SYSTEM_TASK:> Transmits client message that defined in <END_TASK> <USER_TASK:> Description: def parse_workflow_messages(self): """ Transmits client message that defined in a workflow task's inputOutput extension .. code-block:: xml <bpmn2:extensionElements> <camunda:inputOutput> <camunda:inputParameter name="client_message"> <camunda:map> <camunda:entry key="title">Teşekkürler</camunda:entry> <camunda:entry key="body">İşlem Başarılı</camunda:entry> <camunda:entry key="type">info</camunda:entry> </camunda:map> </camunda:inputParameter> </camunda:inputOutput> </bpmn2:extensionElements> """
if 'client_message' in self.current.spec.data: m = self.current.spec.data['client_message'] self.current.msg_box(title=m.get('title'), msg=m.get('body'), typ=m.get('type', 'info'))
<SYSTEM_TASK:> runs the method that referenced from current task <END_TASK> <USER_TASK:> Description: def run_activity(self): """ runs the method that referenced from current task """
activity = self.current.activity if activity: if activity not in self.wf_activities: self._load_activity(activity) self.current.log.debug( "Calling Activity %s from %s" % (activity, self.wf_activities[activity])) self.wf_activities[self.current.activity](self.current)
<SYSTEM_TASK:> Imports the module that contains the referenced method. <END_TASK> <USER_TASK:> Description: def _import_object(self, path, look_for_cls_method): """ Imports the module that contains the referenced method. Args: path: python path of class/function look_for_cls_method (bool): If True, treat the last part of path as class method. Returns: Tuple. (class object, class name, method to be called) """
last_nth = 2 if look_for_cls_method else 1 path = path.split('.') module_path = '.'.join(path[:-last_nth]) class_name = path[-last_nth] module = importlib.import_module(module_path) if look_for_cls_method and path[-last_nth:][0] == path[-last_nth]: class_method = path[-last_nth:][1] else: class_method = None return getattr(module, class_name), class_name, class_method
<SYSTEM_TASK:> Iterates trough the all enabled `~zengine.settings.ACTIVITY_MODULES_IMPORT_PATHS` to find the given path. <END_TASK> <USER_TASK:> Description: def _load_activity(self, activity): """ Iterates trough the all enabled `~zengine.settings.ACTIVITY_MODULES_IMPORT_PATHS` to find the given path. """
fpths = [] full_path = '' errors = [] paths = settings.ACTIVITY_MODULES_IMPORT_PATHS number_of_paths = len(paths) for index_no in range(number_of_paths): full_path = "%s.%s" % (paths[index_no], activity) for look4kls in (0, 1): try: self.current.log.info("try to load from %s[%s]" % (full_path, look4kls)) kls, cls_name, cls_method = self._import_object(full_path, look4kls) if cls_method: self.current.log.info("WILLCall %s(current).%s()" % (kls, cls_method)) self.wf_activities[activity] = lambda crnt: getattr(kls(crnt), cls_method)() else: self.wf_activities[activity] = kls return except (ImportError, AttributeError): fpths.append(full_path) errmsg = "{activity} not found under these paths:\n\n >>> {paths} \n\n" \ "Error Messages:\n {errors}" errors.append("\n========================================================>\n" "| PATH | %s" "\n========================================================>\n\n" "%s" % (full_path, traceback.format_exc())) assert index_no != number_of_paths - 1, errmsg.format(activity=activity, paths='\n >>> '.join( set(fpths)), errors='\n\n'.join(errors) ) except: self.current.log.exception("Cannot found the %s" % activity)
<SYSTEM_TASK:> One or more permissions can be associated with a lane <END_TASK> <USER_TASK:> Description: def check_for_lane_permission(self): """ One or more permissions can be associated with a lane of a workflow. In a similar way, a lane can be restricted with relation to other lanes of the workflow. This method called on lane changes and checks user has required permissions and relations. Raises: HTTPForbidden: if the current user hasn't got the required permissions and proper relations """
# TODO: Cache lane_data in app memory if self.current.lane_permission: log.debug("HAS LANE PERM: %s" % self.current.lane_permission) perm = self.current.lane_permission if not self.current.has_permission(perm): raise HTTPError(403, "You don't have required lane permission: %s" % perm) if self.current.lane_relations: context = self.get_pool_context() log.debug("HAS LANE RELS: %s" % self.current.lane_relations) try: cond_result = eval(self.current.lane_relations, context) except: log.exception("CONDITION EVAL ERROR : %s || %s" % ( self.current.lane_relations, context)) raise if not cond_result: log.debug("LANE RELATION ERR: %s %s" % (self.current.lane_relations, context)) raise HTTPError(403, "You aren't qualified for this lane: %s" % self.current.lane_relations)
<SYSTEM_TASK:> Removes the ``token`` key from ``current.output`` if WF is over. <END_TASK> <USER_TASK:> Description: def handle_wf_finalization(self): """ Removes the ``token`` key from ``current.output`` if WF is over. """
if ((not self.current.flow_enabled or ( self.current.task_type.startswith('End') and not self.are_we_in_subprocess())) and 'token' in self.current.output): del self.current.output['token']
<SYSTEM_TASK:> modified NX dfs <END_TASK> <USER_TASK:> Description: def __dfs(self, start, weights, depth_limit): """ modified NX dfs """
adj = self._adj stack = [(start, depth_limit, iter(sorted(adj[start], key=weights)))] visited = {start} disconnected = defaultdict(list) edges = defaultdict(list) while stack: parent, depth_now, children = stack[-1] try: child = next(children) except StopIteration: stack.pop() else: if child not in visited: edges[parent].append(child) visited.add(child) if depth_now > 1: front = adj[child].keys() - {parent} if front: stack.append((child, depth_now - 1, iter(sorted(front, key=weights)))) elif child not in disconnected: disconnected[parent].append(child) return visited, edges, disconnected
<SYSTEM_TASK:> Constructor from command line args. <END_TASK> <USER_TASK:> Description: def from_parse_args(cls, args): """Constructor from command line args. :param args: parse command line arguments :type args: argparse.ArgumentParser """
return cls(args.migration_file, args.database, db_user=args.db_user, db_password=args.db_password, db_port=args.db_port, db_host=args.db_host, mode=args.mode, allow_serie=args.allow_serie, force_version=args.force_version, web_host=args.web_host, web_port=args.web_port, web_custom_html=args.web_custom_html, )
<SYSTEM_TASK:> Creates some aliases for attributes of ``current``. <END_TASK> <USER_TASK:> Description: def set_current(self, current): """ Creates some aliases for attributes of ``current``. Args: current: :attr:`~zengine.engine.WFCurrent` object. """
self.current = current self.input = current.input # self.req = current.request # self.resp = current.response self.output = current.output self.cmd = current.task_data['cmd'] if self.cmd and NEXT_CMD_SPLITTER in self.cmd: self.cmd, self.next_cmd = self.cmd.split(NEXT_CMD_SPLITTER) else: self.next_cmd = None
<SYSTEM_TASK:> Renders form. Applies form modifiers, then writes <END_TASK> <USER_TASK:> Description: def form_out(self, _form=None): """ Renders form. Applies form modifiers, then writes result to response payload. If supplied, given form object instance will be used instead of view's default ObjectForm. Args: _form (:py:attr:`~zengine.forms.json_form.JsonForm`): Form object to override `self.object_form` """
_form = _form or self.object_form self.output['forms'] = _form.serialize() self._add_meta_props(_form) self.output['forms']['grouping'] = _form.Meta.grouping self.output['forms']['constraints'] = _form.Meta.constraints self._patch_form(self.output['forms']) self.set_client_cmd('form')
<SYSTEM_TASK:> Prepare a helper dictionary for the domain to temporarily hold some information. <END_TASK> <USER_TASK:> Description: def _prepare_domain(mapping): """Prepare a helper dictionary for the domain to temporarily hold some information."""
# Parse the domain-directory mapping try: domain, dir = mapping.split(':') except ValueError: print("Please provide the sources in the form of '<domain>:<directory>'") sys.exit(1) try: default_language = settings.TRANSLATION_DOMAINS[domain] except KeyError: print("Unknown domain {domain}, check the settings file to make sure" " this domain is set in TRANSLATION_DOMAINS".format(domain=domain)) sys.exit(1) # Create a temporary file to hold the `.pot` file for this domain handle, path = tempfile.mkstemp(prefix='zengine_i18n_', suffix='.pot') return (domain, { 'default': default_language, 'pot': path, 'source': dir, })
<SYSTEM_TASK:> Check that all domains specified in the settings was provided in the options. <END_TASK> <USER_TASK:> Description: def _validate_domains(domains): """Check that all domains specified in the settings was provided in the options."""
missing = set(settings.TRANSLATION_DOMAINS.keys()) - set(domains.keys()) if missing: print('The following domains have been set in the configuration, ' 'but their sources were not provided, use the `--source` ' 'option to specify their sources: {domains}'.format(domains=', '.join(missing))) sys.exit(1)
<SYSTEM_TASK:> Extract the translations into `.pot` files <END_TASK> <USER_TASK:> Description: def _extract_translations(self, domains): """Extract the translations into `.pot` files"""
for domain, options in domains.items(): # Create the extractor extractor = babel_frontend.extract_messages() extractor.initialize_options() # The temporary location to write the `.pot` file extractor.output_file = options['pot'] # Add the comments marked with 'tn:' to the translation file for translators to read. Strip the marker. extractor.add_comments = ['tn:'] extractor.strip_comments = True # The directory where the sources for this domain are located extractor.input_paths = [options['source']] # Pass the metadata to the translator extractor.msgid_bugs_address = self.manager.args.contact extractor.copyright_holder = self.manager.args.copyright extractor.version = self.manager.args.version extractor.project = self.manager.args.project extractor.finalize_options() # Add keywords for lazy translation functions, based on their non-lazy variants extractor.keywords.update({ 'gettext_lazy': extractor.keywords['gettext'], 'ngettext_lazy': extractor.keywords['ngettext'], '__': extractor.keywords['gettext'], # double underscore for lazy }) # Do the extraction _run_babel_command(extractor)
<SYSTEM_TASK:> Update or initialize the `.po` translation files <END_TASK> <USER_TASK:> Description: def _init_update_po_files(self, domains): """Update or initialize the `.po` translation files"""
for language in settings.TRANSLATIONS: for domain, options in domains.items(): if language == options['default']: continue # Default language of the domain doesn't need translations if os.path.isfile(_po_path(language, domain)): # If the translation already exists, update it, keeping the parts already translated self._update_po_file(language, domain, options['pot']) else: # The translation doesn't exist, create a new translation file self._init_po_file(language, domain, options['pot'])
<SYSTEM_TASK:> Remove the temporary '.pot' files that were created for the domains. <END_TASK> <USER_TASK:> Description: def _cleanup(self, domains): """Remove the temporary '.pot' files that were created for the domains."""
for option in domains.values(): try: os.remove(option['pot']) except (IOError, OSError): # It is not a problem if we can't actually remove the temporary file pass
<SYSTEM_TASK:> read workflows, checks if it's updated, <END_TASK> <USER_TASK:> Description: def run(self): """ read workflows, checks if it's updated, tries to update if there aren't any running instances of that wf """
from zengine.lib.cache import WFSpecNames if self.manager.args.clear: self._clear_models() return if self.manager.args.wf_path: paths = self.get_wf_from_path(self.manager.args.wf_path) else: paths = self.get_workflows() self.count = 0 self.do_with_submit(self.load_diagram, paths, threads=self.manager.args.threads) WFSpecNames().refresh() print("%s BPMN file loaded" % self.count)
<SYSTEM_TASK:> Scans and loads all wf found under WORKFLOW_PACKAGES_PATHS <END_TASK> <USER_TASK:> Description: def get_workflows(self): """ Scans and loads all wf found under WORKFLOW_PACKAGES_PATHS Yields: XML content of diagram file """
for pth in settings.WORKFLOW_PACKAGES_PATHS: for f in glob.glob("%s/*.bpmn" % pth): with open(f) as fp: yield os.path.basename(os.path.splitext(f)[0]), fp.read()
<SYSTEM_TASK:> The model or models are checked for migrations that need to be done. <END_TASK> <USER_TASK:> Description: def check_migration_and_solr(self): """ The model or models are checked for migrations that need to be done. Solr is also checked. """
from pyoko.db.schema_update import SchemaUpdater from socket import error as socket_error from pyoko.conf import settings from importlib import import_module import_module(settings.MODELS_MODULE) registry = import_module('pyoko.model').model_registry models = [model for model in registry.get_base_models()] try: print(__(u"Checking migration and solr ...")) updater = SchemaUpdater(models, 1, False) updater.run(check_only=True) except socket_error as e: print(__(u"{0}Error not connected, open redis and rabbitmq{1}").format(CheckList.FAIL, CheckList.ENDC))
<SYSTEM_TASK:> Redis checks the connection <END_TASK> <USER_TASK:> Description: def check_redis(): """ Redis checks the connection It displays on the screen whether or not you have a connection. """
from pyoko.db.connection import cache from redis.exceptions import ConnectionError try: cache.ping() print(CheckList.OKGREEN + "{0}Redis is working{1}" + CheckList.ENDC) except ConnectionError as e: print(__(u"{0}Redis is not working{1} ").format(CheckList.FAIL, CheckList.ENDC), e.message)
<SYSTEM_TASK:> Riak checks the connection <END_TASK> <USER_TASK:> Description: def check_riak(): """ Riak checks the connection It displays on the screen whether or not you have a connection. """
from pyoko.db.connection import client from socket import error as socket_error try: if client.ping(): print(__(u"{0}Riak is working{1}").format(CheckList.OKGREEN, CheckList.ENDC)) else: print(__(u"{0}Riak is not working{1}").format(CheckList.FAIL, CheckList.ENDC)) except socket_error as e: print(__(u"{0}Riak is not working{1}").format(CheckList.FAIL, CheckList.ENDC), e.message)