text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def get_version(): """ Return formatted version string. Returns: str: string with project version or empty string. """ if all([VERSION, UPDATED, any([isinstance(UPDATED, date), isinstance(UPDATED, datetime), ]), ]): return FORMAT_STRING.format(**{"version": VERSION, "updated": UPDATED, }) elif VERSION: return VERSION elif UPDATED: return localize(UPDATED) if any([isinstance(UPDATED, date), isinstance(UPDATED, datetime), ]) else "" else: return ""
[ "def", "get_version", "(", ")", ":", "if", "all", "(", "[", "VERSION", ",", "UPDATED", ",", "any", "(", "[", "isinstance", "(", "UPDATED", ",", "date", ")", ",", "isinstance", "(", "UPDATED", ",", "datetime", ")", ",", "]", ")", ",", "]", ")", ":", "return", "FORMAT_STRING", ".", "format", "(", "*", "*", "{", "\"version\"", ":", "VERSION", ",", "\"updated\"", ":", "UPDATED", ",", "}", ")", "elif", "VERSION", ":", "return", "VERSION", "elif", "UPDATED", ":", "return", "localize", "(", "UPDATED", ")", "if", "any", "(", "[", "isinstance", "(", "UPDATED", ",", "date", ")", ",", "isinstance", "(", "UPDATED", ",", "datetime", ")", ",", "]", ")", "else", "\"\"", "else", ":", "return", "\"\"" ]
25.45
29.95
def _inject_conversion(self, value, conversion): """ value: '{x}', conversion: 's' -> '{x!s}' """ t = type(value) return value[:-1] + t(u'!') + conversion + t(u'}')
[ "def", "_inject_conversion", "(", "self", ",", "value", ",", "conversion", ")", ":", "t", "=", "type", "(", "value", ")", "return", "value", "[", ":", "-", "1", "]", "+", "t", "(", "u'!'", ")", "+", "conversion", "+", "t", "(", "u'}'", ")" ]
33.166667
8.5
def transmit(self, payload, **kwargs): """ Send a completion status call to the integrated channel using the client. Args: payload: The learner completion data payload to send to the integrated channel. kwargs: Contains integrated channel-specific information for customized transmission variables. - app_label: The app label of the integrated channel for whom to store learner data records for. - model_name: The name of the specific learner data record model to use. - remote_user_id: The remote ID field name of the learner on the audit model. """ IntegratedChannelLearnerDataTransmissionAudit = apps.get_model( # pylint: disable=invalid-name app_label=kwargs.get('app_label', 'integrated_channel'), model_name=kwargs.get('model_name', 'LearnerDataTransmissionAudit'), ) # Since we have started sending courses to integrated channels instead of course runs, # we need to attempt to send transmissions with course keys and course run ids in order to # ensure that we account for whether courses or course runs exist in the integrated channel. # The exporters have been changed to return multiple transmission records to attempt, # one by course key and one by course run id. # If the transmission with the course key succeeds, the next one will get skipped. # If it fails, the one with the course run id will be attempted and (presumably) succeed. for learner_data in payload.export(): serialized_payload = learner_data.serialize(enterprise_configuration=self.enterprise_configuration) LOGGER.debug('Attempting to transmit serialized payload: %s', serialized_payload) enterprise_enrollment_id = learner_data.enterprise_course_enrollment_id if learner_data.completed_timestamp is None: # The user has not completed the course, so we shouldn't send a completion status call LOGGER.info('Skipping in-progress enterprise enrollment {}'.format(enterprise_enrollment_id)) continue previous_transmissions = IntegratedChannelLearnerDataTransmissionAudit.objects.filter( enterprise_course_enrollment_id=enterprise_enrollment_id, error_message='' ) if previous_transmissions.exists(): # We've already sent a completion status call for this enrollment LOGGER.info('Skipping previously sent enterprise enrollment {}'.format(enterprise_enrollment_id)) continue try: code, body = self.client.create_course_completion( getattr(learner_data, kwargs.get('remote_user_id')), serialized_payload ) LOGGER.info( 'Successfully sent completion status call for enterprise enrollment {}'.format( enterprise_enrollment_id, ) ) except RequestException as request_exception: code = 500 body = str(request_exception) self.handle_transmission_error(learner_data, request_exception) learner_data.status = str(code) learner_data.error_message = body if code >= 400 else '' learner_data.save()
[ "def", "transmit", "(", "self", ",", "payload", ",", "*", "*", "kwargs", ")", ":", "IntegratedChannelLearnerDataTransmissionAudit", "=", "apps", ".", "get_model", "(", "# pylint: disable=invalid-name", "app_label", "=", "kwargs", ".", "get", "(", "'app_label'", ",", "'integrated_channel'", ")", ",", "model_name", "=", "kwargs", ".", "get", "(", "'model_name'", ",", "'LearnerDataTransmissionAudit'", ")", ",", ")", "# Since we have started sending courses to integrated channels instead of course runs,", "# we need to attempt to send transmissions with course keys and course run ids in order to", "# ensure that we account for whether courses or course runs exist in the integrated channel.", "# The exporters have been changed to return multiple transmission records to attempt,", "# one by course key and one by course run id.", "# If the transmission with the course key succeeds, the next one will get skipped.", "# If it fails, the one with the course run id will be attempted and (presumably) succeed.", "for", "learner_data", "in", "payload", ".", "export", "(", ")", ":", "serialized_payload", "=", "learner_data", ".", "serialize", "(", "enterprise_configuration", "=", "self", ".", "enterprise_configuration", ")", "LOGGER", ".", "debug", "(", "'Attempting to transmit serialized payload: %s'", ",", "serialized_payload", ")", "enterprise_enrollment_id", "=", "learner_data", ".", "enterprise_course_enrollment_id", "if", "learner_data", ".", "completed_timestamp", "is", "None", ":", "# The user has not completed the course, so we shouldn't send a completion status call", "LOGGER", ".", "info", "(", "'Skipping in-progress enterprise enrollment {}'", ".", "format", "(", "enterprise_enrollment_id", ")", ")", "continue", "previous_transmissions", "=", "IntegratedChannelLearnerDataTransmissionAudit", ".", "objects", ".", "filter", "(", "enterprise_course_enrollment_id", "=", "enterprise_enrollment_id", ",", "error_message", "=", "''", ")", "if", "previous_transmissions", ".", "exists", "(", ")", ":", "# We've already sent a completion status call for this enrollment", "LOGGER", ".", "info", "(", "'Skipping previously sent enterprise enrollment {}'", ".", "format", "(", "enterprise_enrollment_id", ")", ")", "continue", "try", ":", "code", ",", "body", "=", "self", ".", "client", ".", "create_course_completion", "(", "getattr", "(", "learner_data", ",", "kwargs", ".", "get", "(", "'remote_user_id'", ")", ")", ",", "serialized_payload", ")", "LOGGER", ".", "info", "(", "'Successfully sent completion status call for enterprise enrollment {}'", ".", "format", "(", "enterprise_enrollment_id", ",", ")", ")", "except", "RequestException", "as", "request_exception", ":", "code", "=", "500", "body", "=", "str", "(", "request_exception", ")", "self", ".", "handle_transmission_error", "(", "learner_data", ",", "request_exception", ")", "learner_data", ".", "status", "=", "str", "(", "code", ")", "learner_data", ".", "error_message", "=", "body", "if", "code", ">=", "400", "else", "''", "learner_data", ".", "save", "(", ")" ]
57.728814
34.067797
def run(self, order=None): """ self.runner must be present """ for event in self.runner.run(order=order): self.receive(event)
[ "def", "run", "(", "self", ",", "order", "=", "None", ")", ":", "for", "event", "in", "self", ".", "runner", ".", "run", "(", "order", "=", "order", ")", ":", "self", ".", "receive", "(", "event", ")" ]
23.666667
7.333333
def rnd_date(start=date(1970, 1, 1), end=None, **kwargs): """ Generate a random date between ``start`` to ``end``. :param start: Left bound :type start: string or datetime.date, (default date(1970, 1, 1)) :param end: Right bound :type end: string or datetime.date, (default date.today()) :return: a datetime.date object **中文文档** 随机生成一个位于 ``start`` 和 ``end`` 之间的日期。 """ if end is None: end = date.today() start = parser.parse_date(start) end = parser.parse_date(end) _assert_correct_start_end(start, end) return _rnd_date(start, end)
[ "def", "rnd_date", "(", "start", "=", "date", "(", "1970", ",", "1", ",", "1", ")", ",", "end", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "end", "is", "None", ":", "end", "=", "date", ".", "today", "(", ")", "start", "=", "parser", ".", "parse_date", "(", "start", ")", "end", "=", "parser", ".", "parse_date", "(", "end", ")", "_assert_correct_start_end", "(", "start", ",", "end", ")", "return", "_rnd_date", "(", "start", ",", "end", ")" ]
29.2
15.9
def get_success_url(self): """Reverses the ``redis_metric_aggregate_detail`` URL using ``self.metric_slugs`` as an argument.""" slugs = '+'.join(self.metric_slugs) url = reverse('redis_metric_aggregate_detail', args=[slugs]) # Django 1.6 quotes reversed URLs, which changes + into %2B. We want # want to keep the + in the url (it's ok according to RFC 1738) # https://docs.djangoproject.com/en/1.6/releases/1.6/#quoting-in-reverse return url.replace("%2B", "+")
[ "def", "get_success_url", "(", "self", ")", ":", "slugs", "=", "'+'", ".", "join", "(", "self", ".", "metric_slugs", ")", "url", "=", "reverse", "(", "'redis_metric_aggregate_detail'", ",", "args", "=", "[", "slugs", "]", ")", "# Django 1.6 quotes reversed URLs, which changes + into %2B. We want", "# want to keep the + in the url (it's ok according to RFC 1738)", "# https://docs.djangoproject.com/en/1.6/releases/1.6/#quoting-in-reverse", "return", "url", ".", "replace", "(", "\"%2B\"", ",", "\"+\"", ")" ]
57.444444
17.111111
def _get_parts_list(to_go, so_far=[[]], ticker=None): """ Iterates over to_go, building the list of parts. To provide items for the beginning, use so_far. """ try: part = to_go.pop(0) except IndexError: return so_far, ticker # Lists of input groups if isinstance(part, list) and any(isinstance(e, list) for e in part): while len(part) > 0: so_far, ticker = _get_parts_list(part, so_far, ticker) ticker.tick() # Input Group elif isinstance(part, list) and any(isinstance(e, Input) for e in part): while len(part) > 0: so_far, ticker = _get_parts_list(part, so_far, ticker) # Magic Inputs elif isinstance(part, Input) and part.is_magic: inputs = part.eval() while len(inputs) > 0: so_far, ticker = _get_parts_list(inputs, so_far, ticker) ticker.tick() # Normal inputs elif isinstance(part, Input) and not part.is_magic: so_far[ticker.value].append(part) # Everything else else: so_far = _append(so_far, part) return so_far, ticker
[ "def", "_get_parts_list", "(", "to_go", ",", "so_far", "=", "[", "[", "]", "]", ",", "ticker", "=", "None", ")", ":", "try", ":", "part", "=", "to_go", ".", "pop", "(", "0", ")", "except", "IndexError", ":", "return", "so_far", ",", "ticker", "# Lists of input groups", "if", "isinstance", "(", "part", ",", "list", ")", "and", "any", "(", "isinstance", "(", "e", ",", "list", ")", "for", "e", "in", "part", ")", ":", "while", "len", "(", "part", ")", ">", "0", ":", "so_far", ",", "ticker", "=", "_get_parts_list", "(", "part", ",", "so_far", ",", "ticker", ")", "ticker", ".", "tick", "(", ")", "# Input Group", "elif", "isinstance", "(", "part", ",", "list", ")", "and", "any", "(", "isinstance", "(", "e", ",", "Input", ")", "for", "e", "in", "part", ")", ":", "while", "len", "(", "part", ")", ">", "0", ":", "so_far", ",", "ticker", "=", "_get_parts_list", "(", "part", ",", "so_far", ",", "ticker", ")", "# Magic Inputs", "elif", "isinstance", "(", "part", ",", "Input", ")", "and", "part", ".", "is_magic", ":", "inputs", "=", "part", ".", "eval", "(", ")", "while", "len", "(", "inputs", ")", ">", "0", ":", "so_far", ",", "ticker", "=", "_get_parts_list", "(", "inputs", ",", "so_far", ",", "ticker", ")", "ticker", ".", "tick", "(", ")", "# Normal inputs", "elif", "isinstance", "(", "part", ",", "Input", ")", "and", "not", "part", ".", "is_magic", ":", "so_far", "[", "ticker", ".", "value", "]", ".", "append", "(", "part", ")", "# Everything else", "else", ":", "so_far", "=", "_append", "(", "so_far", ",", "part", ")", "return", "so_far", ",", "ticker" ]
33.96875
17.65625
def bmgs(ctx, event): """ [bookie] List betting market groups for an event :param str event: Event id """ eg = Event(event, peerplays_instance=ctx.peerplays) click.echo(pretty_print(eg.bettingmarketgroups, ctx=ctx))
[ "def", "bmgs", "(", "ctx", ",", "event", ")", ":", "eg", "=", "Event", "(", "event", ",", "peerplays_instance", "=", "ctx", ".", "peerplays", ")", "click", ".", "echo", "(", "pretty_print", "(", "eg", ".", "bettingmarketgroups", ",", "ctx", "=", "ctx", ")", ")" ]
33.428571
14.428571
def qxq(q1, q2): """ Multiply two quaternions. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/qxq_c.html :param q1: First SPICE quaternion. :type q1: 4-Element Array of floats :param q2: Second SPICE quaternion. :type q2: 4-Element Array of floats :return: Product of q1 and q2. :rtype: 4-Element Array of floats """ q1 = stypes.toDoubleVector(q1) q2 = stypes.toDoubleVector(q2) vout = stypes.emptyDoubleVector(4) libspice.qxq_c(q1, q2, vout) return stypes.cVectorToPython(vout)
[ "def", "qxq", "(", "q1", ",", "q2", ")", ":", "q1", "=", "stypes", ".", "toDoubleVector", "(", "q1", ")", "q2", "=", "stypes", ".", "toDoubleVector", "(", "q2", ")", "vout", "=", "stypes", ".", "emptyDoubleVector", "(", "4", ")", "libspice", ".", "qxq_c", "(", "q1", ",", "q2", ",", "vout", ")", "return", "stypes", ".", "cVectorToPython", "(", "vout", ")" ]
29.555556
10.111111
def on_status_update(self, channel, callback): """ Callback to execute on status of update of channel """ if channel not in self._callbacks: self._callbacks[channel] = [] self._callbacks[channel].append(callback)
[ "def", "on_status_update", "(", "self", ",", "channel", ",", "callback", ")", ":", "if", "channel", "not", "in", "self", ".", "_callbacks", ":", "self", ".", "_callbacks", "[", "channel", "]", "=", "[", "]", "self", ".", "_callbacks", "[", "channel", "]", ".", "append", "(", "callback", ")" ]
36.857143
5.142857
def pop(self): """Leave the current scope :returns: TODO """ res = self._scope_stack.pop() self._dlog("popping scope, scope level = {}".format(self.level())) self._curr_scope = self._scope_stack[-1] return res
[ "def", "pop", "(", "self", ")", ":", "res", "=", "self", ".", "_scope_stack", ".", "pop", "(", ")", "self", ".", "_dlog", "(", "\"popping scope, scope level = {}\"", ".", "format", "(", "self", ".", "level", "(", ")", ")", ")", "self", ".", "_curr_scope", "=", "self", ".", "_scope_stack", "[", "-", "1", "]", "return", "res" ]
28.666667
16.777778
def from_sequence(chain, list_of_residues, sequence_type = None): '''Takes in a chain identifier and protein sequence and returns a Sequence object of Residues, indexed from 1.''' s = Sequence(sequence_type) count = 1 for ResidueAA in list_of_residues: s.add(Residue(chain, count, ResidueAA, sequence_type)) count += 1 return s
[ "def", "from_sequence", "(", "chain", ",", "list_of_residues", ",", "sequence_type", "=", "None", ")", ":", "s", "=", "Sequence", "(", "sequence_type", ")", "count", "=", "1", "for", "ResidueAA", "in", "list_of_residues", ":", "s", ".", "add", "(", "Residue", "(", "chain", ",", "count", ",", "ResidueAA", ",", "sequence_type", ")", ")", "count", "+=", "1", "return", "s" ]
48
25.5
def _convert_agg_to_wx_bitmap(agg, bbox): """ Convert the region of the agg buffer bounded by bbox to a wx.Bitmap. If bbox is None, the entire buffer is converted. Note: agg must be a backend_agg.RendererAgg instance. """ if bbox is None: # agg => rgba buffer -> bitmap return wx.BitmapFromBufferRGBA(int(agg.width), int(agg.height), agg.buffer_rgba()) else: # agg => rgba buffer -> bitmap => clipped bitmap return _WX28_clipped_agg_as_bitmap(agg, bbox)
[ "def", "_convert_agg_to_wx_bitmap", "(", "agg", ",", "bbox", ")", ":", "if", "bbox", "is", "None", ":", "# agg => rgba buffer -> bitmap", "return", "wx", ".", "BitmapFromBufferRGBA", "(", "int", "(", "agg", ".", "width", ")", ",", "int", "(", "agg", ".", "height", ")", ",", "agg", ".", "buffer_rgba", "(", ")", ")", "else", ":", "# agg => rgba buffer -> bitmap => clipped bitmap", "return", "_WX28_clipped_agg_as_bitmap", "(", "agg", ",", "bbox", ")" ]
36.714286
16.142857
def _recurse_on_row(self, col_dict, nested_value): """Apply the schema specified by the given dict to the nested value by recursing on it. Parameters ---------- col_dict : dict The schema to apply to the nested value. nested_value : A value nested in a BigQuery row. Returns ------- Union[dict, list] ``dict`` or ``list`` of ``dict`` objects from applied schema. """ row_value = None # Multiple nested records if col_dict['mode'] == 'REPEATED' and isinstance(nested_value, list): row_value = [self._transform_row(record['v'], col_dict['fields']) for record in nested_value] # A single nested record else: row_value = self._transform_row(nested_value, col_dict['fields']) return row_value
[ "def", "_recurse_on_row", "(", "self", ",", "col_dict", ",", "nested_value", ")", ":", "row_value", "=", "None", "# Multiple nested records", "if", "col_dict", "[", "'mode'", "]", "==", "'REPEATED'", "and", "isinstance", "(", "nested_value", ",", "list", ")", ":", "row_value", "=", "[", "self", ".", "_transform_row", "(", "record", "[", "'v'", "]", ",", "col_dict", "[", "'fields'", "]", ")", "for", "record", "in", "nested_value", "]", "# A single nested record", "else", ":", "row_value", "=", "self", ".", "_transform_row", "(", "nested_value", ",", "col_dict", "[", "'fields'", "]", ")", "return", "row_value" ]
30.964286
23.214286
def load_value(self, key, binary=False): """ Load an arbitrary value identified by `key`. :param str key: The key that identifies the value :return: The loaded value """ with self.load_stream(key, binary=binary) as s: return s.read()
[ "def", "load_value", "(", "self", ",", "key", ",", "binary", "=", "False", ")", ":", "with", "self", ".", "load_stream", "(", "key", ",", "binary", "=", "binary", ")", "as", "s", ":", "return", "s", ".", "read", "(", ")" ]
31.777778
11.555556
def convert(cls, obj, flatten=True): """ This function converts object into a Dict optionally Flattening it :param obj: Object to be converted :param flatten: boolean to specify if the dict has to be flattened :return dict: the dict of the object (Flattened or Un-flattened) """ dict_result = cls.object_to_dict(obj) if flatten: dict_result = FlatDict(dict_result) return dict_result
[ "def", "convert", "(", "cls", ",", "obj", ",", "flatten", "=", "True", ")", ":", "dict_result", "=", "cls", ".", "object_to_dict", "(", "obj", ")", "if", "flatten", ":", "dict_result", "=", "FlatDict", "(", "dict_result", ")", "return", "dict_result" ]
43
15.363636
def path_get(p: tcod.path.AStar, idx: int) -> Tuple[int, int]: """Get a point on a path. Args: p (AStar): An AStar instance. idx (int): Should be in range: 0 <= inx < :any:`path_size` """ x = ffi.new("int *") y = ffi.new("int *") lib.TCOD_path_get(p._path_c, idx, x, y) return x[0], y[0]
[ "def", "path_get", "(", "p", ":", "tcod", ".", "path", ".", "AStar", ",", "idx", ":", "int", ")", "->", "Tuple", "[", "int", ",", "int", "]", ":", "x", "=", "ffi", ".", "new", "(", "\"int *\"", ")", "y", "=", "ffi", ".", "new", "(", "\"int *\"", ")", "lib", ".", "TCOD_path_get", "(", "p", ".", "_path_c", ",", "idx", ",", "x", ",", "y", ")", "return", "x", "[", "0", "]", ",", "y", "[", "0", "]" ]
29.272727
16
def get_context_data(self, **kwargs): """Add context data to view""" context = super().get_context_data(**kwargs) context.update({ 'title': self.title, 'submit_value': self.submit_value, 'cancel_url': self.get_cancel_url(), 'model_verbose_name': self.get_model_class()._meta.verbose_name }) return context
[ "def", "get_context_data", "(", "self", ",", "*", "*", "kwargs", ")", ":", "context", "=", "super", "(", ")", ".", "get_context_data", "(", "*", "*", "kwargs", ")", "context", ".", "update", "(", "{", "'title'", ":", "self", ".", "title", ",", "'submit_value'", ":", "self", ".", "submit_value", ",", "'cancel_url'", ":", "self", ".", "get_cancel_url", "(", ")", ",", "'model_verbose_name'", ":", "self", ".", "get_model_class", "(", ")", ".", "_meta", ".", "verbose_name", "}", ")", "return", "context" ]
34.909091
16
def unlock_kinetis_read_until_ack(jlink, address): """Polls the device until the request is acknowledged. Sends a read request to the connected device to read the register at the given 'address'. Polls indefinitely until either the request is ACK'd or the request ends in a fault. Args: jlink (JLink): the connected J-Link address (int) the address of the register to poll Returns: ``SWDResponse`` object on success. Raises: KinetisException: when read exits with non-ack or non-wait status. Note: This function is required in order to avoid reading corrupt or otherwise invalid data from registers when communicating over SWD. """ request = swd.ReadRequest(address, ap=True) response = None while True: response = request.send(jlink) if response.ack(): break elif response.wait(): continue raise KinetisException('Read exited with status: %s', response.status) return response
[ "def", "unlock_kinetis_read_until_ack", "(", "jlink", ",", "address", ")", ":", "request", "=", "swd", ".", "ReadRequest", "(", "address", ",", "ap", "=", "True", ")", "response", "=", "None", "while", "True", ":", "response", "=", "request", ".", "send", "(", "jlink", ")", "if", "response", ".", "ack", "(", ")", ":", "break", "elif", "response", ".", "wait", "(", ")", ":", "continue", "raise", "KinetisException", "(", "'Read exited with status: %s'", ",", "response", ".", "status", ")", "return", "response" ]
31.15625
23.125
def is_unclaimed(work): """Returns True if work piece is unclaimed.""" if work['is_completed']: return False cutoff_time = time.time() - MAX_PROCESSING_TIME if (work['claimed_worker_id'] and work['claimed_worker_start_time'] is not None and work['claimed_worker_start_time'] >= cutoff_time): return False return True
[ "def", "is_unclaimed", "(", "work", ")", ":", "if", "work", "[", "'is_completed'", "]", ":", "return", "False", "cutoff_time", "=", "time", ".", "time", "(", ")", "-", "MAX_PROCESSING_TIME", "if", "(", "work", "[", "'claimed_worker_id'", "]", "and", "work", "[", "'claimed_worker_start_time'", "]", "is", "not", "None", "and", "work", "[", "'claimed_worker_start_time'", "]", ">=", "cutoff_time", ")", ":", "return", "False", "return", "True" ]
33.7
15.1
def set_LObj(self,LObj=None): """ Set the LObj attribute, storing objects the instance depends on For example: A Detect object depends on a vessel and some apertures That link between should be stored somewhere (for saving/loading). LObj does this: it stores the ID (as dict) of all objects depended on. Parameters ---------- LObj : None / dict / :class:`~tofu.pathfile.ID` / list of such Provide either: - A dict (derived from :meth:`~tofu.pathfile.ID._todict`) - A :class:`~tofu.pathfile.ID` instance - A list of dict or :class:`~tofu.pathfile.ID` instances """ self._LObj = {} if LObj is not None: if type(LObj) is not list: LObj = [LObj] for ii in range(0,len(LObj)): if type(LObj[ii]) is ID: LObj[ii] = LObj[ii]._todict() ClsU = list(set([oo['Cls'] for oo in LObj])) for c in ClsU: self._LObj[c] = [oo for oo in LObj if oo['Cls']==c]
[ "def", "set_LObj", "(", "self", ",", "LObj", "=", "None", ")", ":", "self", ".", "_LObj", "=", "{", "}", "if", "LObj", "is", "not", "None", ":", "if", "type", "(", "LObj", ")", "is", "not", "list", ":", "LObj", "=", "[", "LObj", "]", "for", "ii", "in", "range", "(", "0", ",", "len", "(", "LObj", ")", ")", ":", "if", "type", "(", "LObj", "[", "ii", "]", ")", "is", "ID", ":", "LObj", "[", "ii", "]", "=", "LObj", "[", "ii", "]", ".", "_todict", "(", ")", "ClsU", "=", "list", "(", "set", "(", "[", "oo", "[", "'Cls'", "]", "for", "oo", "in", "LObj", "]", ")", ")", "for", "c", "in", "ClsU", ":", "self", ".", "_LObj", "[", "c", "]", "=", "[", "oo", "for", "oo", "in", "LObj", "if", "oo", "[", "'Cls'", "]", "==", "c", "]" ]
40
19.333333
def do_alarm_definition_create(mc, args): '''Create an alarm definition.''' fields = {} fields['name'] = args.name if args.description: fields['description'] = args.description fields['expression'] = args.expression if args.alarm_actions: fields['alarm_actions'] = args.alarm_actions if args.ok_actions: fields['ok_actions'] = args.ok_actions if args.undetermined_actions: fields['undetermined_actions'] = args.undetermined_actions if args.severity: if not _validate_severity(args.severity): return fields['severity'] = args.severity if args.match_by: fields['match_by'] = args.match_by.split(',') try: alarm = mc.alarm_definitions.create(**fields) except (osc_exc.ClientException, k_exc.HttpError) as he: raise osc_exc.CommandError('%s\n%s' % (he.message, he.details)) else: print(jsonutils.dumps(alarm, indent=2))
[ "def", "do_alarm_definition_create", "(", "mc", ",", "args", ")", ":", "fields", "=", "{", "}", "fields", "[", "'name'", "]", "=", "args", ".", "name", "if", "args", ".", "description", ":", "fields", "[", "'description'", "]", "=", "args", ".", "description", "fields", "[", "'expression'", "]", "=", "args", ".", "expression", "if", "args", ".", "alarm_actions", ":", "fields", "[", "'alarm_actions'", "]", "=", "args", ".", "alarm_actions", "if", "args", ".", "ok_actions", ":", "fields", "[", "'ok_actions'", "]", "=", "args", ".", "ok_actions", "if", "args", ".", "undetermined_actions", ":", "fields", "[", "'undetermined_actions'", "]", "=", "args", ".", "undetermined_actions", "if", "args", ".", "severity", ":", "if", "not", "_validate_severity", "(", "args", ".", "severity", ")", ":", "return", "fields", "[", "'severity'", "]", "=", "args", ".", "severity", "if", "args", ".", "match_by", ":", "fields", "[", "'match_by'", "]", "=", "args", ".", "match_by", ".", "split", "(", "','", ")", "try", ":", "alarm", "=", "mc", ".", "alarm_definitions", ".", "create", "(", "*", "*", "fields", ")", "except", "(", "osc_exc", ".", "ClientException", ",", "k_exc", ".", "HttpError", ")", "as", "he", ":", "raise", "osc_exc", ".", "CommandError", "(", "'%s\\n%s'", "%", "(", "he", ".", "message", ",", "he", ".", "details", ")", ")", "else", ":", "print", "(", "jsonutils", ".", "dumps", "(", "alarm", ",", "indent", "=", "2", ")", ")" ]
37.4
14.6
def change_vartype(self, vartype, inplace=True): """Create a binary quadratic model with the specified vartype. Args: vartype (:class:`.Vartype`/str/set, optional): Variable type for the changed model. Accepted input values: * :class:`.Vartype.SPIN`, ``'SPIN'``, ``{-1, 1}`` * :class:`.Vartype.BINARY`, ``'BINARY'``, ``{0, 1}`` inplace (bool, optional, default=True): If True, the binary quadratic model is updated in-place; otherwise, a new binary quadratic model is returned. Returns: :class:`.BinaryQuadraticModel`. A new binary quadratic model with vartype matching input 'vartype'. Examples: This example creates an Ising model and then creates a QUBO from it. >>> import dimod ... >>> bqm_spin = dimod.BinaryQuadraticModel({1: 1, 2: 2}, {(1, 2): 0.5}, 0.5, dimod.SPIN) >>> bqm_qubo = bqm_spin.change_vartype('BINARY', inplace=False) >>> bqm_spin.offset, bqm_spin.vartype (0.5, <Vartype.SPIN: frozenset({1, -1})>) >>> bqm_qubo.offset, bqm_qubo.vartype (-2.0, <Vartype.BINARY: frozenset({0, 1})>) """ if not inplace: # create a new model of the appropriate type, then add self's biases to it new_model = BinaryQuadraticModel({}, {}, 0.0, vartype) new_model.add_variables_from(self.linear, vartype=self.vartype) new_model.add_interactions_from(self.quadratic, vartype=self.vartype) new_model.add_offset(self.offset) return new_model # in this case we are doing things in-place, if the desired vartype matches self.vartype, # then we don't need to do anything if vartype is self.vartype: return self if self.vartype is Vartype.SPIN and vartype is Vartype.BINARY: linear, quadratic, offset = self.spin_to_binary(self.linear, self.quadratic, self.offset) elif self.vartype is Vartype.BINARY and vartype is Vartype.SPIN: linear, quadratic, offset = self.binary_to_spin(self.linear, self.quadratic, self.offset) else: raise RuntimeError("something has gone wrong. unknown vartype conversion.") # drop everything for v in linear: self.remove_variable(v) self.add_offset(-self.offset) self.vartype = vartype self.add_variables_from(linear) self.add_interactions_from(quadratic) self.add_offset(offset) return self
[ "def", "change_vartype", "(", "self", ",", "vartype", ",", "inplace", "=", "True", ")", ":", "if", "not", "inplace", ":", "# create a new model of the appropriate type, then add self's biases to it", "new_model", "=", "BinaryQuadraticModel", "(", "{", "}", ",", "{", "}", ",", "0.0", ",", "vartype", ")", "new_model", ".", "add_variables_from", "(", "self", ".", "linear", ",", "vartype", "=", "self", ".", "vartype", ")", "new_model", ".", "add_interactions_from", "(", "self", ".", "quadratic", ",", "vartype", "=", "self", ".", "vartype", ")", "new_model", ".", "add_offset", "(", "self", ".", "offset", ")", "return", "new_model", "# in this case we are doing things in-place, if the desired vartype matches self.vartype,", "# then we don't need to do anything", "if", "vartype", "is", "self", ".", "vartype", ":", "return", "self", "if", "self", ".", "vartype", "is", "Vartype", ".", "SPIN", "and", "vartype", "is", "Vartype", ".", "BINARY", ":", "linear", ",", "quadratic", ",", "offset", "=", "self", ".", "spin_to_binary", "(", "self", ".", "linear", ",", "self", ".", "quadratic", ",", "self", ".", "offset", ")", "elif", "self", ".", "vartype", "is", "Vartype", ".", "BINARY", "and", "vartype", "is", "Vartype", ".", "SPIN", ":", "linear", ",", "quadratic", ",", "offset", "=", "self", ".", "binary_to_spin", "(", "self", ".", "linear", ",", "self", ".", "quadratic", ",", "self", ".", "offset", ")", "else", ":", "raise", "RuntimeError", "(", "\"something has gone wrong. unknown vartype conversion.\"", ")", "# drop everything", "for", "v", "in", "linear", ":", "self", ".", "remove_variable", "(", "v", ")", "self", ".", "add_offset", "(", "-", "self", ".", "offset", ")", "self", ".", "vartype", "=", "vartype", "self", ".", "add_variables_from", "(", "linear", ")", "self", ".", "add_interactions_from", "(", "quadratic", ")", "self", ".", "add_offset", "(", "offset", ")", "return", "self" ]
39.815385
26.538462
def export(g, csv_fname): """ export a graph to CSV for simpler viewing """ with open(csv_fname, "w") as f: num_tuples = 0 f.write('"num","subject","predicate","object"\n') for subj, pred, obj in g: num_tuples += 1 f.write('"' + str(num_tuples) + '",') f.write('"' + get_string_from_rdf(subj) + '",') f.write('"' + get_string_from_rdf(pred) + '",') f.write('"' + get_string_from_rdf(obj) + '"\n') print("Finished exporting " , num_tuples, " tuples")
[ "def", "export", "(", "g", ",", "csv_fname", ")", ":", "with", "open", "(", "csv_fname", ",", "\"w\"", ")", "as", "f", ":", "num_tuples", "=", "0", "f", ".", "write", "(", "'\"num\",\"subject\",\"predicate\",\"object\"\\n'", ")", "for", "subj", ",", "pred", ",", "obj", "in", "g", ":", "num_tuples", "+=", "1", "f", ".", "write", "(", "'\"'", "+", "str", "(", "num_tuples", ")", "+", "'\",'", ")", "f", ".", "write", "(", "'\"'", "+", "get_string_from_rdf", "(", "subj", ")", "+", "'\",'", ")", "f", ".", "write", "(", "'\"'", "+", "get_string_from_rdf", "(", "pred", ")", "+", "'\",'", ")", "f", ".", "write", "(", "'\"'", "+", "get_string_from_rdf", "(", "obj", ")", "+", "'\"\\n'", ")", "print", "(", "\"Finished exporting \"", ",", "num_tuples", ",", "\" tuples\"", ")" ]
44.5
13.083333
def register_function_hooks(self, func): """Looks at an object method and registers it for relevent transitions.""" for hook_kind, hooks in func.xworkflows_hook.items(): for field_name, hook in hooks: if field_name and field_name != self.state_field: continue for transition in self.workflow.transitions: if hook.applies_to(transition): implem = self.implementations[transition.name] implem.add_hook(hook)
[ "def", "register_function_hooks", "(", "self", ",", "func", ")", ":", "for", "hook_kind", ",", "hooks", "in", "func", ".", "xworkflows_hook", ".", "items", "(", ")", ":", "for", "field_name", ",", "hook", "in", "hooks", ":", "if", "field_name", "and", "field_name", "!=", "self", ".", "state_field", ":", "continue", "for", "transition", "in", "self", ".", "workflow", ".", "transitions", ":", "if", "hook", ".", "applies_to", "(", "transition", ")", ":", "implem", "=", "self", ".", "implementations", "[", "transition", ".", "name", "]", "implem", ".", "add_hook", "(", "hook", ")" ]
54.4
12.6
def iter_segments(obj, neurite_filter=None, neurite_order=NeuriteIter.FileOrder): '''Return an iterator to the segments in a collection of neurites Parameters: obj: neuron, population, neurite, section, or iterable containing neurite objects neurite_filter: optional top level filter on properties of neurite neurite objects neurite_order: order upon which neurite should be iterated. Values: - NeuriteIter.FileOrder: order of appearance in the file - NeuriteIter.NRN: NRN simulator order: soma -> axon -> basal -> apical Note: This is a convenience function provided for generic access to neuron segments. It may have a performance overhead WRT custom-made segment analysis functions that leverage numpy and section-wise iteration. ''' sections = iter((obj,) if isinstance(obj, Section) else iter_sections(obj, neurite_filter=neurite_filter, neurite_order=neurite_order)) return chain.from_iterable(zip(sec.points[:-1], sec.points[1:]) for sec in sections)
[ "def", "iter_segments", "(", "obj", ",", "neurite_filter", "=", "None", ",", "neurite_order", "=", "NeuriteIter", ".", "FileOrder", ")", ":", "sections", "=", "iter", "(", "(", "obj", ",", ")", "if", "isinstance", "(", "obj", ",", "Section", ")", "else", "iter_sections", "(", "obj", ",", "neurite_filter", "=", "neurite_filter", ",", "neurite_order", "=", "neurite_order", ")", ")", "return", "chain", ".", "from_iterable", "(", "zip", "(", "sec", ".", "points", "[", ":", "-", "1", "]", ",", "sec", ".", "points", "[", "1", ":", "]", ")", "for", "sec", "in", "sections", ")" ]
52.454545
31.636364
def prepare(self, context, stream_id): """Invoke prepare() of this custom grouping""" self.grouping.prepare(context, self.source_comp_name, stream_id, self.task_ids)
[ "def", "prepare", "(", "self", ",", "context", ",", "stream_id", ")", ":", "self", ".", "grouping", ".", "prepare", "(", "context", ",", "self", ".", "source_comp_name", ",", "stream_id", ",", "self", ".", "task_ids", ")" ]
57
15
def value_from_datadict(self, data, files, name): """ Given a dictionary of data and this widget's name, returns the value of this widget. Returns None if it's not provided. """ value = super(FileSizeWidget, self).value_from_datadict(data, files, name) if value not in EMPTY_VALUES: try: return parse_size(value) except ValueError: pass return value
[ "def", "value_from_datadict", "(", "self", ",", "data", ",", "files", ",", "name", ")", ":", "value", "=", "super", "(", "FileSizeWidget", ",", "self", ")", ".", "value_from_datadict", "(", "data", ",", "files", ",", "name", ")", "if", "value", "not", "in", "EMPTY_VALUES", ":", "try", ":", "return", "parse_size", "(", "value", ")", "except", "ValueError", ":", "pass", "return", "value" ]
37.5
15.166667
def wait_travel(self, character, thing, dest, cb=None): """Schedule a thing to travel someplace, then wait for it to finish, and call ``cb`` if provided :param character: name of the character :param thing: name of the thing :param dest: name of the destination (a place) :param cb: function to be called when I'm done :return: ``None`` """ self.wait_turns(self.engine.character[character].thing[thing].travel_to(dest), cb=cb)
[ "def", "wait_travel", "(", "self", ",", "character", ",", "thing", ",", "dest", ",", "cb", "=", "None", ")", ":", "self", ".", "wait_turns", "(", "self", ".", "engine", ".", "character", "[", "character", "]", ".", "thing", "[", "thing", "]", ".", "travel_to", "(", "dest", ")", ",", "cb", "=", "cb", ")" ]
44.545455
17.363636
def run(self): """Run the App using the current profile. The current profile has the install_json and args pre-loaded. """ install_json = self.profile.get('install_json') program_language = self.profile.get('install_json').get('programLanguage', 'python').lower() print('{}{}'.format(c.Style.BRIGHT, '-' * 100)) if install_json.get('programMain') is not None: program_main = install_json.get('programMain').replace('.py', '') elif self.profile.get('script') is not None: # TODO: remove this option on version 1.0.0 program_main = self.profile.get('script').replace('.py', '') else: print('{}{}No Program Main or Script defined.'.format(c.Style.BRIGHT, c.Fore.RED)) sys.exit(1) self.run_display_profile(program_main) self.run_display_description() self.run_validate_program_main(program_main) # get the commands commands = self.run_commands(program_language, program_main) self.log.info('[run] Running command {}'.format(commands.get('print_command'))) # output command print( 'Executing: {}{}{}'.format(c.Style.BRIGHT, c.Fore.GREEN, commands.get('print_command')) ) if self.args.docker: return self.run_docker(commands) return self.run_local(commands)
[ "def", "run", "(", "self", ")", ":", "install_json", "=", "self", ".", "profile", ".", "get", "(", "'install_json'", ")", "program_language", "=", "self", ".", "profile", ".", "get", "(", "'install_json'", ")", ".", "get", "(", "'programLanguage'", ",", "'python'", ")", ".", "lower", "(", ")", "print", "(", "'{}{}'", ".", "format", "(", "c", ".", "Style", ".", "BRIGHT", ",", "'-'", "*", "100", ")", ")", "if", "install_json", ".", "get", "(", "'programMain'", ")", "is", "not", "None", ":", "program_main", "=", "install_json", ".", "get", "(", "'programMain'", ")", ".", "replace", "(", "'.py'", ",", "''", ")", "elif", "self", ".", "profile", ".", "get", "(", "'script'", ")", "is", "not", "None", ":", "# TODO: remove this option on version 1.0.0", "program_main", "=", "self", ".", "profile", ".", "get", "(", "'script'", ")", ".", "replace", "(", "'.py'", ",", "''", ")", "else", ":", "print", "(", "'{}{}No Program Main or Script defined.'", ".", "format", "(", "c", ".", "Style", ".", "BRIGHT", ",", "c", ".", "Fore", ".", "RED", ")", ")", "sys", ".", "exit", "(", "1", ")", "self", ".", "run_display_profile", "(", "program_main", ")", "self", ".", "run_display_description", "(", ")", "self", ".", "run_validate_program_main", "(", "program_main", ")", "# get the commands", "commands", "=", "self", ".", "run_commands", "(", "program_language", ",", "program_main", ")", "self", ".", "log", ".", "info", "(", "'[run] Running command {}'", ".", "format", "(", "commands", ".", "get", "(", "'print_command'", ")", ")", ")", "# output command", "print", "(", "'Executing: {}{}{}'", ".", "format", "(", "c", ".", "Style", ".", "BRIGHT", ",", "c", ".", "Fore", ".", "GREEN", ",", "commands", ".", "get", "(", "'print_command'", ")", ")", ")", "if", "self", ".", "args", ".", "docker", ":", "return", "self", ".", "run_docker", "(", "commands", ")", "return", "self", ".", "run_local", "(", "commands", ")" ]
38
25.888889
def close(self): """Close a port on dummy_serial.""" if VERBOSE: _print_out('\nDummy_serial: Closing port\n') if not self._isOpen: raise IOError('Dummy_serial: The port is already closed') self._isOpen = False self.port = None
[ "def", "close", "(", "self", ")", ":", "if", "VERBOSE", ":", "_print_out", "(", "'\\nDummy_serial: Closing port\\n'", ")", "if", "not", "self", ".", "_isOpen", ":", "raise", "IOError", "(", "'Dummy_serial: The port is already closed'", ")", "self", ".", "_isOpen", "=", "False", "self", ".", "port", "=", "None" ]
29.5
19.8
def _get_recurrence_model(input_model): """ Returns the annual and cumulative recurrence rates predicted by the recurrence model """ if not isinstance(input_model, (TruncatedGRMFD, EvenlyDiscretizedMFD, YoungsCoppersmith1985MFD)): raise ValueError('Recurrence model not recognised') # Get model annual occurrence rates annual_rates = input_model.get_annual_occurrence_rates() annual_rates = np.array([[val[0], val[1]] for val in annual_rates]) # Get cumulative rates cumulative_rates = np.array([np.sum(annual_rates[iloc:, 1]) for iloc in range(0, len(annual_rates), 1)]) return annual_rates, cumulative_rates
[ "def", "_get_recurrence_model", "(", "input_model", ")", ":", "if", "not", "isinstance", "(", "input_model", ",", "(", "TruncatedGRMFD", ",", "EvenlyDiscretizedMFD", ",", "YoungsCoppersmith1985MFD", ")", ")", ":", "raise", "ValueError", "(", "'Recurrence model not recognised'", ")", "# Get model annual occurrence rates", "annual_rates", "=", "input_model", ".", "get_annual_occurrence_rates", "(", ")", "annual_rates", "=", "np", ".", "array", "(", "[", "[", "val", "[", "0", "]", ",", "val", "[", "1", "]", "]", "for", "val", "in", "annual_rates", "]", ")", "# Get cumulative rates", "cumulative_rates", "=", "np", ".", "array", "(", "[", "np", ".", "sum", "(", "annual_rates", "[", "iloc", ":", ",", "1", "]", ")", "for", "iloc", "in", "range", "(", "0", ",", "len", "(", "annual_rates", ")", ",", "1", ")", "]", ")", "return", "annual_rates", ",", "cumulative_rates" ]
46.9375
15.5625
def find_notignored_git_files(self, context, silent_build): """ Return a list of files that are not ignored by git """ def git(args, error_message, cwd=context.parent_dir, **error_kwargs): output, status = command_output("git {0}".format(args), cwd=cwd) if status != 0: error_kwargs['output'] = output error_kwargs['directory'] = context.parent_dir raise HarpoonError(error_message, **error_kwargs) return output changed_files = git("diff --name-only", "Failed to determine what files have changed") untracked_files = git("ls-files --others --exclude-standard", "Failed to find untracked files") valid = set() under_source_control = git("ls-files --exclude-standard", "Failed to find all the files under source control") git_submodules = [regexes["whitespace"].split(line.strip())[1] for line in git("submodule status", "Failed to find submodules", cwd=context.git_root)] git_submodules = [os.path.normpath(os.path.relpath(os.path.abspath(p), os.path.abspath(os.path.relpath(context.parent_dir, context.git_root)))) for p in git_submodules] valid = under_source_control + untracked_files for filename in list(valid): matched = False if context.exclude: for excluder in context.exclude: if fnmatch.fnmatch(filename, excluder): matched = True break if matched: continue location = os.path.join(context.parent_dir, filename) if os.path.islink(location) and os.path.isdir(location): actual_path = os.path.abspath(os.path.realpath(location)) parent_dir = os.path.abspath(os.path.realpath(context.parent_dir)) include_from = os.path.relpath(actual_path, parent_dir) to_include = git("ls-files --exclude-standard -- {0}".format(include_from), "Failed to find files under a symlink") for found in to_include: valid += [os.path.join(filename, os.path.relpath(found, include_from))] elif os.path.isdir(location) and filename in git_submodules: to_include = git("ls-files --exclude-standard", "Failed to find files in a submodule", cwd=location) valid = [v for v in valid if v != filename] for found in to_include: valid.append(os.path.join(filename, found)) return set(self.convert_nonascii(valid))
[ "def", "find_notignored_git_files", "(", "self", ",", "context", ",", "silent_build", ")", ":", "def", "git", "(", "args", ",", "error_message", ",", "cwd", "=", "context", ".", "parent_dir", ",", "*", "*", "error_kwargs", ")", ":", "output", ",", "status", "=", "command_output", "(", "\"git {0}\"", ".", "format", "(", "args", ")", ",", "cwd", "=", "cwd", ")", "if", "status", "!=", "0", ":", "error_kwargs", "[", "'output'", "]", "=", "output", "error_kwargs", "[", "'directory'", "]", "=", "context", ".", "parent_dir", "raise", "HarpoonError", "(", "error_message", ",", "*", "*", "error_kwargs", ")", "return", "output", "changed_files", "=", "git", "(", "\"diff --name-only\"", ",", "\"Failed to determine what files have changed\"", ")", "untracked_files", "=", "git", "(", "\"ls-files --others --exclude-standard\"", ",", "\"Failed to find untracked files\"", ")", "valid", "=", "set", "(", ")", "under_source_control", "=", "git", "(", "\"ls-files --exclude-standard\"", ",", "\"Failed to find all the files under source control\"", ")", "git_submodules", "=", "[", "regexes", "[", "\"whitespace\"", "]", ".", "split", "(", "line", ".", "strip", "(", ")", ")", "[", "1", "]", "for", "line", "in", "git", "(", "\"submodule status\"", ",", "\"Failed to find submodules\"", ",", "cwd", "=", "context", ".", "git_root", ")", "]", "git_submodules", "=", "[", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "relpath", "(", "os", ".", "path", ".", "abspath", "(", "p", ")", ",", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "relpath", "(", "context", ".", "parent_dir", ",", "context", ".", "git_root", ")", ")", ")", ")", "for", "p", "in", "git_submodules", "]", "valid", "=", "under_source_control", "+", "untracked_files", "for", "filename", "in", "list", "(", "valid", ")", ":", "matched", "=", "False", "if", "context", ".", "exclude", ":", "for", "excluder", "in", "context", ".", "exclude", ":", "if", "fnmatch", ".", "fnmatch", "(", "filename", ",", "excluder", ")", ":", "matched", "=", "True", "break", "if", "matched", ":", "continue", "location", "=", "os", ".", "path", ".", "join", "(", "context", ".", "parent_dir", ",", "filename", ")", "if", "os", ".", "path", ".", "islink", "(", "location", ")", "and", "os", ".", "path", ".", "isdir", "(", "location", ")", ":", "actual_path", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "realpath", "(", "location", ")", ")", "parent_dir", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "realpath", "(", "context", ".", "parent_dir", ")", ")", "include_from", "=", "os", ".", "path", ".", "relpath", "(", "actual_path", ",", "parent_dir", ")", "to_include", "=", "git", "(", "\"ls-files --exclude-standard -- {0}\"", ".", "format", "(", "include_from", ")", ",", "\"Failed to find files under a symlink\"", ")", "for", "found", "in", "to_include", ":", "valid", "+=", "[", "os", ".", "path", ".", "join", "(", "filename", ",", "os", ".", "path", ".", "relpath", "(", "found", ",", "include_from", ")", ")", "]", "elif", "os", ".", "path", ".", "isdir", "(", "location", ")", "and", "filename", "in", "git_submodules", ":", "to_include", "=", "git", "(", "\"ls-files --exclude-standard\"", ",", "\"Failed to find files in a submodule\"", ",", "cwd", "=", "location", ")", "valid", "=", "[", "v", "for", "v", "in", "valid", "if", "v", "!=", "filename", "]", "for", "found", "in", "to_include", ":", "valid", ".", "append", "(", "os", ".", "path", ".", "join", "(", "filename", ",", "found", ")", ")", "return", "set", "(", "self", ".", "convert_nonascii", "(", "valid", ")", ")" ]
52.571429
31.673469
def update_version(self, service_id, version_number, **kwargs): """Update a particular version for a particular service.""" body = self._formdata(kwargs, FastlyVersion.FIELDS) content = self._fetch("/service/%s/version/%d/" % (service_id, version_number), method="PUT", body=body) return FastlyVersion(self, content)
[ "def", "update_version", "(", "self", ",", "service_id", ",", "version_number", ",", "*", "*", "kwargs", ")", ":", "body", "=", "self", ".", "_formdata", "(", "kwargs", ",", "FastlyVersion", ".", "FIELDS", ")", "content", "=", "self", ".", "_fetch", "(", "\"/service/%s/version/%d/\"", "%", "(", "service_id", ",", "version_number", ")", ",", "method", "=", "\"PUT\"", ",", "body", "=", "body", ")", "return", "FastlyVersion", "(", "self", ",", "content", ")" ]
64
21
def do_translate(parser, token): """ This will mark a string for translation and will translate the string for the current language. Usage:: {% trans "this is a test" %} This will mark the string for translation so it will be pulled out by mark-messages.py into the .po files and will run the string through the translation engine. There is a second form:: {% trans "this is a test" noop %} This will only mark for translation, but will return the string unchanged. Use it when you need to store values into forms that should be translated later on. You can use variables instead of constant strings to translate stuff you marked somewhere else:: {% trans variable %} This will just try to translate the contents of the variable ``variable``. Make sure that the string in there is something that is in the .po file. It is possible to store the translated string into a variable:: {% trans "this is a test" as var %} {{ var }} Contextual translations are also supported:: {% trans "this is a test" context "greeting" %} This is equivalent to calling pgettext instead of (u)gettext. """ bits = token.split_contents() if len(bits) < 2: raise TemplateSyntaxError("'%s' takes at least one argument" % bits[0]) message_string = parser.compile_filter(bits[1]) remaining = bits[2:] noop = False asvar = None message_context = None seen = set() invalid_context = {'as', 'noop'} while remaining: option = remaining.pop(0) if option in seen: raise TemplateSyntaxError( "The '%s' option was specified more than once." % option, ) elif option == 'noop': noop = True elif option == 'context': try: value = remaining.pop(0) except IndexError: msg = "No argument provided to the '%s' tag for the context option." % bits[0] six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2]) if value in invalid_context: raise TemplateSyntaxError( "Invalid argument '%s' provided to the '%s' tag for the context option" % (value, bits[0]), ) message_context = parser.compile_filter(value) elif option == 'as': try: value = remaining.pop(0) except IndexError: msg = "No argument provided to the '%s' tag for the as option." % bits[0] six.reraise(TemplateSyntaxError, TemplateSyntaxError(msg), sys.exc_info()[2]) asvar = value else: raise TemplateSyntaxError( "Unknown argument for '%s' tag: '%s'. The only options " "available are 'noop', 'context' \"xxx\", and 'as VAR'." % ( bits[0], option, ) ) seen.add(option) if phrase_settings.PHRASE_ENABLED: return PhraseTranslateNode(message_string, noop, asvar, message_context) else: return TranslateNode(message_string, noop, asvar, message_context)
[ "def", "do_translate", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "if", "len", "(", "bits", ")", "<", "2", ":", "raise", "TemplateSyntaxError", "(", "\"'%s' takes at least one argument\"", "%", "bits", "[", "0", "]", ")", "message_string", "=", "parser", ".", "compile_filter", "(", "bits", "[", "1", "]", ")", "remaining", "=", "bits", "[", "2", ":", "]", "noop", "=", "False", "asvar", "=", "None", "message_context", "=", "None", "seen", "=", "set", "(", ")", "invalid_context", "=", "{", "'as'", ",", "'noop'", "}", "while", "remaining", ":", "option", "=", "remaining", ".", "pop", "(", "0", ")", "if", "option", "in", "seen", ":", "raise", "TemplateSyntaxError", "(", "\"The '%s' option was specified more than once.\"", "%", "option", ",", ")", "elif", "option", "==", "'noop'", ":", "noop", "=", "True", "elif", "option", "==", "'context'", ":", "try", ":", "value", "=", "remaining", ".", "pop", "(", "0", ")", "except", "IndexError", ":", "msg", "=", "\"No argument provided to the '%s' tag for the context option.\"", "%", "bits", "[", "0", "]", "six", ".", "reraise", "(", "TemplateSyntaxError", ",", "TemplateSyntaxError", "(", "msg", ")", ",", "sys", ".", "exc_info", "(", ")", "[", "2", "]", ")", "if", "value", "in", "invalid_context", ":", "raise", "TemplateSyntaxError", "(", "\"Invalid argument '%s' provided to the '%s' tag for the context option\"", "%", "(", "value", ",", "bits", "[", "0", "]", ")", ",", ")", "message_context", "=", "parser", ".", "compile_filter", "(", "value", ")", "elif", "option", "==", "'as'", ":", "try", ":", "value", "=", "remaining", ".", "pop", "(", "0", ")", "except", "IndexError", ":", "msg", "=", "\"No argument provided to the '%s' tag for the as option.\"", "%", "bits", "[", "0", "]", "six", ".", "reraise", "(", "TemplateSyntaxError", ",", "TemplateSyntaxError", "(", "msg", ")", ",", "sys", ".", "exc_info", "(", ")", "[", "2", "]", ")", "asvar", "=", "value", "else", ":", "raise", "TemplateSyntaxError", "(", "\"Unknown argument for '%s' tag: '%s'. The only options \"", "\"available are 'noop', 'context' \\\"xxx\\\", and 'as VAR'.\"", "%", "(", "bits", "[", "0", "]", ",", "option", ",", ")", ")", "seen", ".", "add", "(", "option", ")", "if", "phrase_settings", ".", "PHRASE_ENABLED", ":", "return", "PhraseTranslateNode", "(", "message_string", ",", "noop", ",", "asvar", ",", "message_context", ")", "else", ":", "return", "TranslateNode", "(", "message_string", ",", "noop", ",", "asvar", ",", "message_context", ")" ]
40.320513
18.961538
def wait(timeout=None, flush=True): """Wait for an event. Args: timeout (Optional[int]): The time in seconds that this function will wait before giving up and returning None. With the default value of None, this will block forever. flush (bool): If True a call to :any:`tdl.flush` will be made before listening for events. Returns: Type[Event]: An event, or None if the function has timed out. Anything added via :any:`push` will also be returned. """ if timeout is not None: timeout = timeout + _time.clock() # timeout at this time while True: if _eventQueue: return _eventQueue.pop(0) if flush: # a full 'round' of events need to be processed before flushing _tdl.flush() if timeout and _time.clock() >= timeout: return None # return None on timeout _time.sleep(0.001) # sleep 1ms _processEvents()
[ "def", "wait", "(", "timeout", "=", "None", ",", "flush", "=", "True", ")", ":", "if", "timeout", "is", "not", "None", ":", "timeout", "=", "timeout", "+", "_time", ".", "clock", "(", ")", "# timeout at this time", "while", "True", ":", "if", "_eventQueue", ":", "return", "_eventQueue", ".", "pop", "(", "0", ")", "if", "flush", ":", "# a full 'round' of events need to be processed before flushing", "_tdl", ".", "flush", "(", ")", "if", "timeout", "and", "_time", ".", "clock", "(", ")", ">=", "timeout", ":", "return", "None", "# return None on timeout", "_time", ".", "sleep", "(", "0.001", ")", "# sleep 1ms", "_processEvents", "(", ")" ]
36.074074
19.407407
def split_add_ops(text): """Specialized function splitting text at add/sub operators. Operands are *not* translated. Example result ['op1', '+', 'op2', '-', 'op3']""" n = 0 text = text.replace('++', '##').replace( '--', '@@') #text does not normally contain any of these spotted = False # set to true if noticed anything other than +- or white space last = 0 while n < len(text): e = text[n] if e == '+' or e == '-': if spotted: yield text[last:n].replace('##', '++').replace('@@', '--') yield e last = n + 1 spotted = False elif e == '/' or e == '*' or e == '%': spotted = False elif e != ' ': spotted = True n += 1 yield text[last:n].replace('##', '++').replace('@@', '--')
[ "def", "split_add_ops", "(", "text", ")", ":", "n", "=", "0", "text", "=", "text", ".", "replace", "(", "'++'", ",", "'##'", ")", ".", "replace", "(", "'--'", ",", "'@@'", ")", "#text does not normally contain any of these", "spotted", "=", "False", "# set to true if noticed anything other than +- or white space", "last", "=", "0", "while", "n", "<", "len", "(", "text", ")", ":", "e", "=", "text", "[", "n", "]", "if", "e", "==", "'+'", "or", "e", "==", "'-'", ":", "if", "spotted", ":", "yield", "text", "[", "last", ":", "n", "]", ".", "replace", "(", "'##'", ",", "'++'", ")", ".", "replace", "(", "'@@'", ",", "'--'", ")", "yield", "e", "last", "=", "n", "+", "1", "spotted", "=", "False", "elif", "e", "==", "'/'", "or", "e", "==", "'*'", "or", "e", "==", "'%'", ":", "spotted", "=", "False", "elif", "e", "!=", "' '", ":", "spotted", "=", "True", "n", "+=", "1", "yield", "text", "[", "last", ":", "n", "]", ".", "replace", "(", "'##'", ",", "'++'", ")", ".", "replace", "(", "'@@'", ",", "'--'", ")" ]
38
17.272727
def candidate_pair_priority(local, remote, ice_controlling): """ See RFC 5245 - 5.7.2. Computing Pair Priority and Ordering Pairs """ G = ice_controlling and local.priority or remote.priority D = ice_controlling and remote.priority or local.priority return (1 << 32) * min(G, D) + 2 * max(G, D) + (G > D and 1 or 0)
[ "def", "candidate_pair_priority", "(", "local", ",", "remote", ",", "ice_controlling", ")", ":", "G", "=", "ice_controlling", "and", "local", ".", "priority", "or", "remote", ".", "priority", "D", "=", "ice_controlling", "and", "remote", ".", "priority", "or", "local", ".", "priority", "return", "(", "1", "<<", "32", ")", "*", "min", "(", "G", ",", "D", ")", "+", "2", "*", "max", "(", "G", ",", "D", ")", "+", "(", "G", ">", "D", "and", "1", "or", "0", ")" ]
47.571429
17
def reciprocal_rank(truth, recommend): """Reciprocal Rank (RR). Args: truth (numpy 1d array): Set of truth samples. recommend (numpy 1d array): Ordered set of recommended samples. Returns: float: RR. """ for n in range(recommend.size): if recommend[n] in truth: return 1. / (n + 1) return 0.
[ "def", "reciprocal_rank", "(", "truth", ",", "recommend", ")", ":", "for", "n", "in", "range", "(", "recommend", ".", "size", ")", ":", "if", "recommend", "[", "n", "]", "in", "truth", ":", "return", "1.", "/", "(", "n", "+", "1", ")", "return", "0." ]
23.2
19.666667
def serialize(cls, installation): """ :type installation: core.Installation :rtype: list """ return [ {cls._FIELD_ID: converter.serialize(installation.id_)}, {cls._FIELD_TOKEN: converter.serialize(installation.token)}, { cls._FIELD_SERVER_PUBLIC_KEY: converter.serialize( installation.server_public_key ), }, ]
[ "def", "serialize", "(", "cls", ",", "installation", ")", ":", "return", "[", "{", "cls", ".", "_FIELD_ID", ":", "converter", ".", "serialize", "(", "installation", ".", "id_", ")", "}", ",", "{", "cls", ".", "_FIELD_TOKEN", ":", "converter", ".", "serialize", "(", "installation", ".", "token", ")", "}", ",", "{", "cls", ".", "_FIELD_SERVER_PUBLIC_KEY", ":", "converter", ".", "serialize", "(", "installation", ".", "server_public_key", ")", ",", "}", ",", "]" ]
27.8125
21.0625
def crypto_hash_sha512(message): """ Hashes and returns the message ``message``. :param message: bytes :rtype: bytes """ digest = ffi.new("unsigned char[]", crypto_hash_sha512_BYTES) rc = lib.crypto_hash_sha512(digest, message, len(message)) ensure(rc == 0, 'Unexpected library error', raising=exc.RuntimeError) return ffi.buffer(digest, crypto_hash_sha512_BYTES)[:]
[ "def", "crypto_hash_sha512", "(", "message", ")", ":", "digest", "=", "ffi", ".", "new", "(", "\"unsigned char[]\"", ",", "crypto_hash_sha512_BYTES", ")", "rc", "=", "lib", ".", "crypto_hash_sha512", "(", "digest", ",", "message", ",", "len", "(", "message", ")", ")", "ensure", "(", "rc", "==", "0", ",", "'Unexpected library error'", ",", "raising", "=", "exc", ".", "RuntimeError", ")", "return", "ffi", ".", "buffer", "(", "digest", ",", "crypto_hash_sha512_BYTES", ")", "[", ":", "]" ]
31.769231
14.230769
def use_active_composition_view(self): """Pass through to provider CompositionLookupSession.use_active_composition_view""" self._operable_views['composition'] = ACTIVE # self._get_provider_session('composition_lookup_session') # To make sure the session is tracked for session in self._get_provider_sessions(): try: session.use_active_composition_view() except AttributeError: pass
[ "def", "use_active_composition_view", "(", "self", ")", ":", "self", ".", "_operable_views", "[", "'composition'", "]", "=", "ACTIVE", "# self._get_provider_session('composition_lookup_session') # To make sure the session is tracked", "for", "session", "in", "self", ".", "_get_provider_sessions", "(", ")", ":", "try", ":", "session", ".", "use_active_composition_view", "(", ")", "except", "AttributeError", ":", "pass" ]
51.333333
17.222222
def disable(self): """Disable a NApp if it is enabled.""" core_napps_manager = CoreNAppsManager(base_path=self._enabled) core_napps_manager.disable(self.user, self.napp)
[ "def", "disable", "(", "self", ")", ":", "core_napps_manager", "=", "CoreNAppsManager", "(", "base_path", "=", "self", ".", "_enabled", ")", "core_napps_manager", ".", "disable", "(", "self", ".", "user", ",", "self", ".", "napp", ")" ]
47.5
17
def domain_name(self, domain_name): """ Sets the domain_name of this RegisterDomainRequest. A domain name as described in RFC-1034 that will be registered with ApplePay :param domain_name: The domain_name of this RegisterDomainRequest. :type: str """ if domain_name is None: raise ValueError("Invalid value for `domain_name`, must not be `None`") if len(domain_name) > 255: raise ValueError("Invalid value for `domain_name`, length must be less than `255`") if len(domain_name) < 1: raise ValueError("Invalid value for `domain_name`, length must be greater than or equal to `1`") self._domain_name = domain_name
[ "def", "domain_name", "(", "self", ",", "domain_name", ")", ":", "if", "domain_name", "is", "None", ":", "raise", "ValueError", "(", "\"Invalid value for `domain_name`, must not be `None`\"", ")", "if", "len", "(", "domain_name", ")", ">", "255", ":", "raise", "ValueError", "(", "\"Invalid value for `domain_name`, length must be less than `255`\"", ")", "if", "len", "(", "domain_name", ")", "<", "1", ":", "raise", "ValueError", "(", "\"Invalid value for `domain_name`, length must be greater than or equal to `1`\"", ")", "self", ".", "_domain_name", "=", "domain_name" ]
42
25.529412
def index_of(self, data): """ Finds the position of a node in the list. The index of the first occurrence of the data is returned (indexes start at 0) :param data: data of the seeked node :type: object :returns: the int index or -1 if the node is not in the list """ current_node = self._first_node pos = 0 while current_node: if current_node.data() == data: return pos else: current_node = current_node.next() pos += 1 return -1
[ "def", "index_of", "(", "self", ",", "data", ")", ":", "current_node", "=", "self", ".", "_first_node", "pos", "=", "0", "while", "current_node", ":", "if", "current_node", ".", "data", "(", ")", "==", "data", ":", "return", "pos", "else", ":", "current_node", "=", "current_node", ".", "next", "(", ")", "pos", "+=", "1", "return", "-", "1" ]
30.157895
17.315789
def past_trades(self, timestamp=0, symbol='btcusd'): """ Fetch past trades :param timestamp: :param symbol: :return: """ payload = { "request": "/v1/mytrades", "nonce": self._nonce, "symbol": symbol, "timestamp": timestamp } signed_payload = self._sign_payload(payload) r = requests.post(self.URL + "/mytrades", headers=signed_payload, verify=True) json_resp = r.json() return json_resp
[ "def", "past_trades", "(", "self", ",", "timestamp", "=", "0", ",", "symbol", "=", "'btcusd'", ")", ":", "payload", "=", "{", "\"request\"", ":", "\"/v1/mytrades\"", ",", "\"nonce\"", ":", "self", ".", "_nonce", ",", "\"symbol\"", ":", "symbol", ",", "\"timestamp\"", ":", "timestamp", "}", "signed_payload", "=", "self", ".", "_sign_payload", "(", "payload", ")", "r", "=", "requests", ".", "post", "(", "self", ".", "URL", "+", "\"/mytrades\"", ",", "headers", "=", "signed_payload", ",", "verify", "=", "True", ")", "json_resp", "=", "r", ".", "json", "(", ")", "return", "json_resp" ]
27.052632
17.263158
def indent(self): """ Indents text at cursor position. """ cursor = self.editor.textCursor() assert isinstance(cursor, QtGui.QTextCursor) if cursor.hasSelection(): self.indent_selection(cursor) else: # simply insert indentation at the cursor position tab_len = self.editor.tab_length cursor.beginEditBlock() if self.editor.use_spaces_instead_of_tabs: nb_space_to_add = tab_len - cursor.positionInBlock() % tab_len cursor.insertText(nb_space_to_add * " ") else: cursor.insertText('\t') cursor.endEditBlock()
[ "def", "indent", "(", "self", ")", ":", "cursor", "=", "self", ".", "editor", ".", "textCursor", "(", ")", "assert", "isinstance", "(", "cursor", ",", "QtGui", ".", "QTextCursor", ")", "if", "cursor", ".", "hasSelection", "(", ")", ":", "self", ".", "indent_selection", "(", "cursor", ")", "else", ":", "# simply insert indentation at the cursor position", "tab_len", "=", "self", ".", "editor", ".", "tab_length", "cursor", ".", "beginEditBlock", "(", ")", "if", "self", ".", "editor", ".", "use_spaces_instead_of_tabs", ":", "nb_space_to_add", "=", "tab_len", "-", "cursor", ".", "positionInBlock", "(", ")", "%", "tab_len", "cursor", ".", "insertText", "(", "nb_space_to_add", "*", "\" \"", ")", "else", ":", "cursor", ".", "insertText", "(", "'\\t'", ")", "cursor", ".", "endEditBlock", "(", ")" ]
37.611111
11.166667
def _concatenate_spike_clusters(*pairs): """Concatenate a list of pairs (spike_ids, spike_clusters).""" pairs = [(_as_array(x), _as_array(y)) for (x, y) in pairs] concat = np.vstack(np.hstack((x[:, None], y[:, None])) for x, y in pairs) reorder = np.argsort(concat[:, 0]) concat = concat[reorder, :] return concat[:, 0].astype(np.int64), concat[:, 1].astype(np.int64)
[ "def", "_concatenate_spike_clusters", "(", "*", "pairs", ")", ":", "pairs", "=", "[", "(", "_as_array", "(", "x", ")", ",", "_as_array", "(", "y", ")", ")", "for", "(", "x", ",", "y", ")", "in", "pairs", "]", "concat", "=", "np", ".", "vstack", "(", "np", ".", "hstack", "(", "(", "x", "[", ":", ",", "None", "]", ",", "y", "[", ":", ",", "None", "]", ")", ")", "for", "x", ",", "y", "in", "pairs", ")", "reorder", "=", "np", ".", "argsort", "(", "concat", "[", ":", ",", "0", "]", ")", "concat", "=", "concat", "[", "reorder", ",", ":", "]", "return", "concat", "[", ":", ",", "0", "]", ".", "astype", "(", "np", ".", "int64", ")", ",", "concat", "[", ":", ",", "1", "]", ".", "astype", "(", "np", ".", "int64", ")" ]
50.875
10.375
def app_template_global(self, name: Optional[str]=None) -> Callable: """Add an application wide template global. This is designed to be used as a decorator, and has the same arguments as :meth:`~quart.Quart.template_global`. An example usage, .. code-block:: python blueprint = Blueprint(__name__) @blueprint.app_template_global() def global(value): ... """ def decorator(func: Callable) -> Callable: self.add_app_template_global(func, name=name) return func return decorator
[ "def", "app_template_global", "(", "self", ",", "name", ":", "Optional", "[", "str", "]", "=", "None", ")", "->", "Callable", ":", "def", "decorator", "(", "func", ":", "Callable", ")", "->", "Callable", ":", "self", ".", "add_app_template_global", "(", "func", ",", "name", "=", "name", ")", "return", "func", "return", "decorator" ]
34.941176
18.823529
def access_array(self, id_, lineno, scope=None, default_type=None): """ Called whenever an accessed variable is expected to be an array. ZX BASIC requires arrays to be declared before usage, so they're checked. Also checks for class array. """ if not self.check_is_declared(id_, lineno, 'array', scope): return None if not self.check_class(id_, CLASS.array, lineno, scope): return None return self.access_id(id_, lineno, scope=scope, default_type=default_type)
[ "def", "access_array", "(", "self", ",", "id_", ",", "lineno", ",", "scope", "=", "None", ",", "default_type", "=", "None", ")", ":", "if", "not", "self", ".", "check_is_declared", "(", "id_", ",", "lineno", ",", "'array'", ",", "scope", ")", ":", "return", "None", "if", "not", "self", ".", "check_class", "(", "id_", ",", "CLASS", ".", "array", ",", "lineno", ",", "scope", ")", ":", "return", "None", "return", "self", ".", "access_id", "(", "id_", ",", "lineno", ",", "scope", "=", "scope", ",", "default_type", "=", "default_type", ")" ]
36.333333
24.466667
def mk_req(self, url, **kwargs): """ Helper function to create a tornado HTTPRequest object, kwargs get passed in to create the HTTPRequest object. See: http://tornado.readthedocs.org/en/latest/httpclient.html#request-objects """ req_url = self.base_url + url req_kwargs = kwargs req_kwargs['ca_certs'] = req_kwargs.get('ca_certs', self.certs) # have to do this because tornado's HTTP client doesn't # play nice with elasticsearch req_kwargs['allow_nonstandard_methods'] = req_kwargs.get( 'allow_nonstandard_methods', True ) return HTTPRequest(req_url, **req_kwargs)
[ "def", "mk_req", "(", "self", ",", "url", ",", "*", "*", "kwargs", ")", ":", "req_url", "=", "self", ".", "base_url", "+", "url", "req_kwargs", "=", "kwargs", "req_kwargs", "[", "'ca_certs'", "]", "=", "req_kwargs", ".", "get", "(", "'ca_certs'", ",", "self", ".", "certs", ")", "# have to do this because tornado's HTTP client doesn't", "# play nice with elasticsearch", "req_kwargs", "[", "'allow_nonstandard_methods'", "]", "=", "req_kwargs", ".", "get", "(", "'allow_nonstandard_methods'", ",", "True", ")", "return", "HTTPRequest", "(", "req_url", ",", "*", "*", "req_kwargs", ")" ]
42.4375
16.1875
def get_field_definition(node): """"node is a class attribute that is a mongoengine. Returns the definition statement for the attribute """ name = node.attrname cls = get_node_parent_class(node) definition = cls.lookup(name)[1][0].statement() return definition
[ "def", "get_field_definition", "(", "node", ")", ":", "name", "=", "node", ".", "attrname", "cls", "=", "get_node_parent_class", "(", "node", ")", "definition", "=", "cls", ".", "lookup", "(", "name", ")", "[", "1", "]", "[", "0", "]", ".", "statement", "(", ")", "return", "definition" ]
31.333333
11.666667
def restore_instances(self, instances): """Restore a set of instances into the CLIPS data base. The Python equivalent of the CLIPS restore-instances command. Instances can be passed as a set of strings or as a file. """ instances = instances.encode() if os.path.exists(instances): ret = lib.EnvRestoreInstances(self._env, instances) if ret == -1: raise CLIPSError(self._env) else: ret = lib.EnvRestoreInstancesFromString(self._env, instances, -1) if ret == -1: raise CLIPSError(self._env) return ret
[ "def", "restore_instances", "(", "self", ",", "instances", ")", ":", "instances", "=", "instances", ".", "encode", "(", ")", "if", "os", ".", "path", ".", "exists", "(", "instances", ")", ":", "ret", "=", "lib", ".", "EnvRestoreInstances", "(", "self", ".", "_env", ",", "instances", ")", "if", "ret", "==", "-", "1", ":", "raise", "CLIPSError", "(", "self", ".", "_env", ")", "else", ":", "ret", "=", "lib", ".", "EnvRestoreInstancesFromString", "(", "self", ".", "_env", ",", "instances", ",", "-", "1", ")", "if", "ret", "==", "-", "1", ":", "raise", "CLIPSError", "(", "self", ".", "_env", ")", "return", "ret" ]
31.45
20.25
def paint( self, painter, option, widget ): """ Draws this item with the inputed painter. :param painter | <QPainter> rect | <QRect> """ if self._dirty: self.rebuild() scene = self.scene() if not scene: return grid = scene.gridRect() typ = self.chartType() # draw the line chart if typ == XChartScene.Type.Line: painter.setRenderHint(painter.Antialiasing) # draw the path area area = self._buildData.get('path_area') if area and self.isShaded(): clr = QColor(self.color()) clr.setAlpha(120) painter.setPen(Qt.NoPen) painter.setBrush(clr) painter.drawPath(area) # draw the line data pen = QPen(self.color()) pen.setWidth(2) painter.setPen(pen) painter.setBrush(Qt.NoBrush) painter.drawPath(self.path()) if ( self.showPointsInLine() ): palette = QApplication.palette() pen = QPen(palette.color(palette.Base)) pen.setWidth(2) painter.setBrush(self.color()) painter.setPen(pen) for point in self._ellipses: painter.drawEllipse(point, self.pointRadius(), self.pointRadius()) # draw a bar chart elif typ == XChartScene.Type.Bar: painter.setRenderHint(painter.Antialiasing) pen = QPen(self.color()) pen.setWidth(1) painter.setPen(pen) for key, value, sub_path in self._subpaths: gradient = QLinearGradient() clr = QColor(self.color()) if ( sub_path != self._hoveredPath ): clr.setAlpha(130) gradient.setColorAt(0.0, clr.lighter(140)) gradient.setColorAt(0.1, clr.lighter(120)) gradient.setColorAt(0.25, clr.lighter(110)) gradient.setColorAt(1.0, clr.lighter(105)) if ( self.orientation() == Qt.Horizontal ): gradient.setStart(0, sub_path.boundingRect().top()) gradient.setFinalStop(0, sub_path.boundingRect().bottom()) else: gradient.setStart(sub_path.boundingRect().left(), 0) gradient.setFinalStop(sub_path.boundingRect().right(), 0) painter.setBrush(gradient) painter.drawPath(sub_path) # draw a simple pie chart (calculated by scene) elif typ == XChartScene.Type.Pie: painter.setRenderHint(painter.Antialiasing) center = self.pieCenter() radius = self.radius() for key, value, sub_path in self._subpaths: clr = self.keyColor(key) gradient = QRadialGradient(QPointF(0, 0), radius) a = QColor(clr.lighter(140)) b = QColor(clr.lighter(110)) a.setAlpha(40) b.setAlpha(80) # look for mouse over if ( sub_path == self._hoveredPath ): a.setAlpha(100) b.setAlpha(200) gradient.setColorAt(0, a) gradient.setColorAt(1, b) pen = QPen(clr) pen.setWidth(1) painter.setBrush(gradient) painter.setPen(pen) painter.drawPath(sub_path)
[ "def", "paint", "(", "self", ",", "painter", ",", "option", ",", "widget", ")", ":", "if", "self", ".", "_dirty", ":", "self", ".", "rebuild", "(", ")", "scene", "=", "self", ".", "scene", "(", ")", "if", "not", "scene", ":", "return", "grid", "=", "scene", ".", "gridRect", "(", ")", "typ", "=", "self", ".", "chartType", "(", ")", "# draw the line chart\r", "if", "typ", "==", "XChartScene", ".", "Type", ".", "Line", ":", "painter", ".", "setRenderHint", "(", "painter", ".", "Antialiasing", ")", "# draw the path area\r", "area", "=", "self", ".", "_buildData", ".", "get", "(", "'path_area'", ")", "if", "area", "and", "self", ".", "isShaded", "(", ")", ":", "clr", "=", "QColor", "(", "self", ".", "color", "(", ")", ")", "clr", ".", "setAlpha", "(", "120", ")", "painter", ".", "setPen", "(", "Qt", ".", "NoPen", ")", "painter", ".", "setBrush", "(", "clr", ")", "painter", ".", "drawPath", "(", "area", ")", "# draw the line data\r", "pen", "=", "QPen", "(", "self", ".", "color", "(", ")", ")", "pen", ".", "setWidth", "(", "2", ")", "painter", ".", "setPen", "(", "pen", ")", "painter", ".", "setBrush", "(", "Qt", ".", "NoBrush", ")", "painter", ".", "drawPath", "(", "self", ".", "path", "(", ")", ")", "if", "(", "self", ".", "showPointsInLine", "(", ")", ")", ":", "palette", "=", "QApplication", ".", "palette", "(", ")", "pen", "=", "QPen", "(", "palette", ".", "color", "(", "palette", ".", "Base", ")", ")", "pen", ".", "setWidth", "(", "2", ")", "painter", ".", "setBrush", "(", "self", ".", "color", "(", ")", ")", "painter", ".", "setPen", "(", "pen", ")", "for", "point", "in", "self", ".", "_ellipses", ":", "painter", ".", "drawEllipse", "(", "point", ",", "self", ".", "pointRadius", "(", ")", ",", "self", ".", "pointRadius", "(", ")", ")", "# draw a bar chart\r", "elif", "typ", "==", "XChartScene", ".", "Type", ".", "Bar", ":", "painter", ".", "setRenderHint", "(", "painter", ".", "Antialiasing", ")", "pen", "=", "QPen", "(", "self", ".", "color", "(", ")", ")", "pen", ".", "setWidth", "(", "1", ")", "painter", ".", "setPen", "(", "pen", ")", "for", "key", ",", "value", ",", "sub_path", "in", "self", ".", "_subpaths", ":", "gradient", "=", "QLinearGradient", "(", ")", "clr", "=", "QColor", "(", "self", ".", "color", "(", ")", ")", "if", "(", "sub_path", "!=", "self", ".", "_hoveredPath", ")", ":", "clr", ".", "setAlpha", "(", "130", ")", "gradient", ".", "setColorAt", "(", "0.0", ",", "clr", ".", "lighter", "(", "140", ")", ")", "gradient", ".", "setColorAt", "(", "0.1", ",", "clr", ".", "lighter", "(", "120", ")", ")", "gradient", ".", "setColorAt", "(", "0.25", ",", "clr", ".", "lighter", "(", "110", ")", ")", "gradient", ".", "setColorAt", "(", "1.0", ",", "clr", ".", "lighter", "(", "105", ")", ")", "if", "(", "self", ".", "orientation", "(", ")", "==", "Qt", ".", "Horizontal", ")", ":", "gradient", ".", "setStart", "(", "0", ",", "sub_path", ".", "boundingRect", "(", ")", ".", "top", "(", ")", ")", "gradient", ".", "setFinalStop", "(", "0", ",", "sub_path", ".", "boundingRect", "(", ")", ".", "bottom", "(", ")", ")", "else", ":", "gradient", ".", "setStart", "(", "sub_path", ".", "boundingRect", "(", ")", ".", "left", "(", ")", ",", "0", ")", "gradient", ".", "setFinalStop", "(", "sub_path", ".", "boundingRect", "(", ")", ".", "right", "(", ")", ",", "0", ")", "painter", ".", "setBrush", "(", "gradient", ")", "painter", ".", "drawPath", "(", "sub_path", ")", "# draw a simple pie chart (calculated by scene)\r", "elif", "typ", "==", "XChartScene", ".", "Type", ".", "Pie", ":", "painter", ".", "setRenderHint", "(", "painter", ".", "Antialiasing", ")", "center", "=", "self", ".", "pieCenter", "(", ")", "radius", "=", "self", ".", "radius", "(", ")", "for", "key", ",", "value", ",", "sub_path", "in", "self", ".", "_subpaths", ":", "clr", "=", "self", ".", "keyColor", "(", "key", ")", "gradient", "=", "QRadialGradient", "(", "QPointF", "(", "0", ",", "0", ")", ",", "radius", ")", "a", "=", "QColor", "(", "clr", ".", "lighter", "(", "140", ")", ")", "b", "=", "QColor", "(", "clr", ".", "lighter", "(", "110", ")", ")", "a", ".", "setAlpha", "(", "40", ")", "b", ".", "setAlpha", "(", "80", ")", "# look for mouse over\r", "if", "(", "sub_path", "==", "self", ".", "_hoveredPath", ")", ":", "a", ".", "setAlpha", "(", "100", ")", "b", ".", "setAlpha", "(", "200", ")", "gradient", ".", "setColorAt", "(", "0", ",", "a", ")", "gradient", ".", "setColorAt", "(", "1", ",", "b", ")", "pen", "=", "QPen", "(", "clr", ")", "pen", ".", "setWidth", "(", "1", ")", "painter", ".", "setBrush", "(", "gradient", ")", "painter", ".", "setPen", "(", "pen", ")", "painter", ".", "drawPath", "(", "sub_path", ")" ]
35.067797
13.915254
def from_name(cls, name): """Retrieve datacenter id associated to a name.""" result = cls.list() dc_names = {} for dc in result: dc_names[dc['name']] = dc['id'] return dc_names.get(name)
[ "def", "from_name", "(", "cls", ",", "name", ")", ":", "result", "=", "cls", ".", "list", "(", ")", "dc_names", "=", "{", "}", "for", "dc", "in", "result", ":", "dc_names", "[", "dc", "[", "'name'", "]", "]", "=", "dc", "[", "'id'", "]", "return", "dc_names", ".", "get", "(", "name", ")" ]
29
14
def BuildServiceStub(self, cls): """Constructs the stub class. Args: cls: The class that will be constructed. """ def _ServiceStubInit(stub, rpc_channel): stub.rpc_channel = rpc_channel self.cls = cls cls.__init__ = _ServiceStubInit for method in self.descriptor.methods: setattr(cls, method.name, self._GenerateStubMethod(method))
[ "def", "BuildServiceStub", "(", "self", ",", "cls", ")", ":", "def", "_ServiceStubInit", "(", "stub", ",", "rpc_channel", ")", ":", "stub", ".", "rpc_channel", "=", "rpc_channel", "self", ".", "cls", "=", "cls", "cls", ".", "__init__", "=", "_ServiceStubInit", "for", "method", "in", "self", ".", "descriptor", ".", "methods", ":", "setattr", "(", "cls", ",", "method", ".", "name", ",", "self", ".", "_GenerateStubMethod", "(", "method", ")", ")" ]
28.230769
14.384615
def get_optional_attribute(self, element, attribute): """Attempt to retrieve an optional attribute from the xml and return None on failure.""" try: return self.get_attribute(element, attribute) except self.XmlError: return None
[ "def", "get_optional_attribute", "(", "self", ",", "element", ",", "attribute", ")", ":", "try", ":", "return", "self", ".", "get_attribute", "(", "element", ",", "attribute", ")", "except", "self", ".", "XmlError", ":", "return", "None" ]
41
15.666667
def get(self, namespacePrefix): """Get a specific configuration namespace""" ns = db.ConfigNamespace.find_one(ConfigNamespace.namespace_prefix == namespacePrefix) if not ns: return self.make_response('No such namespace: {}'.format(namespacePrefix), HTTP.NOT_FOUND) return self.make_response({ 'message': None, 'namespace': ns })
[ "def", "get", "(", "self", ",", "namespacePrefix", ")", ":", "ns", "=", "db", ".", "ConfigNamespace", ".", "find_one", "(", "ConfigNamespace", ".", "namespace_prefix", "==", "namespacePrefix", ")", "if", "not", "ns", ":", "return", "self", ".", "make_response", "(", "'No such namespace: {}'", ".", "format", "(", "namespacePrefix", ")", ",", "HTTP", ".", "NOT_FOUND", ")", "return", "self", ".", "make_response", "(", "{", "'message'", ":", "None", ",", "'namespace'", ":", "ns", "}", ")" ]
39.6
24.6
def network_xml(identifier, xml, address=None): """Fills the XML file with the required fields. * name * uuid * bridge * ip ** dhcp """ netname = identifier[:8] network = etree.fromstring(xml) subelement(network, './/name', 'name', identifier) subelement(network, './/uuid', 'uuid', identifier) subelement(network, './/bridge', 'bridge', None, name='virbr-%s' % netname) if address is not None: set_address(network, address) return etree.tostring(network).decode('utf-8')
[ "def", "network_xml", "(", "identifier", ",", "xml", ",", "address", "=", "None", ")", ":", "netname", "=", "identifier", "[", ":", "8", "]", "network", "=", "etree", ".", "fromstring", "(", "xml", ")", "subelement", "(", "network", ",", "'.//name'", ",", "'name'", ",", "identifier", ")", "subelement", "(", "network", ",", "'.//uuid'", ",", "'uuid'", ",", "identifier", ")", "subelement", "(", "network", ",", "'.//bridge'", ",", "'bridge'", ",", "None", ",", "name", "=", "'virbr-%s'", "%", "netname", ")", "if", "address", "is", "not", "None", ":", "set_address", "(", "network", ",", "address", ")", "return", "etree", ".", "tostring", "(", "network", ")", ".", "decode", "(", "'utf-8'", ")" ]
25
21.952381
def buckets(bucket=None, account=None, matched=False, kdenied=False, errors=False, dbpath=None, size=None, denied=False, format=None, incomplete=False, oversize=False, region=(), not_region=(), inventory=None, output=None, config=None, sort=None, tagprefix=None, not_bucket=None): """Report on stats by bucket""" d = db.db(dbpath) if tagprefix and not config: raise ValueError( "account tag value inclusion requires account config file") if config and tagprefix: with open(config) as fh: data = json.load(fh).get('accounts') account_data = {} for a in data: for t in a['tags']: if t.startswith(tagprefix): account_data[a['name']] = t[len(tagprefix):] buckets = [] for b in sorted(d.buckets(account), key=operator.attrgetter('bucket_id')): if bucket and b.name not in bucket: continue if not_bucket and b.name in not_bucket: continue if matched and not b.matched: continue if kdenied and not b.keys_denied: continue if errors and not b.error_count: continue if size and b.size < size: continue if inventory and not b.using_inventory: continue if denied and not b.denied: continue if oversize and b.scanned <= b.size: continue if incomplete and b.percent_scanned >= incomplete: continue if region and b.region not in region: continue if not_region and b.region in not_region: continue if tagprefix: setattr(b, tagprefix[:-1], account_data[b.account]) buckets.append(b) if sort: key = operator.attrgetter(sort) buckets = list(reversed(sorted(buckets, key=key))) formatter = format == 'csv' and format_csv or format_plain keys = tagprefix and (tagprefix[:-1],) or () formatter(buckets, output, keys=keys)
[ "def", "buckets", "(", "bucket", "=", "None", ",", "account", "=", "None", ",", "matched", "=", "False", ",", "kdenied", "=", "False", ",", "errors", "=", "False", ",", "dbpath", "=", "None", ",", "size", "=", "None", ",", "denied", "=", "False", ",", "format", "=", "None", ",", "incomplete", "=", "False", ",", "oversize", "=", "False", ",", "region", "=", "(", ")", ",", "not_region", "=", "(", ")", ",", "inventory", "=", "None", ",", "output", "=", "None", ",", "config", "=", "None", ",", "sort", "=", "None", ",", "tagprefix", "=", "None", ",", "not_bucket", "=", "None", ")", ":", "d", "=", "db", ".", "db", "(", "dbpath", ")", "if", "tagprefix", "and", "not", "config", ":", "raise", "ValueError", "(", "\"account tag value inclusion requires account config file\"", ")", "if", "config", "and", "tagprefix", ":", "with", "open", "(", "config", ")", "as", "fh", ":", "data", "=", "json", ".", "load", "(", "fh", ")", ".", "get", "(", "'accounts'", ")", "account_data", "=", "{", "}", "for", "a", "in", "data", ":", "for", "t", "in", "a", "[", "'tags'", "]", ":", "if", "t", ".", "startswith", "(", "tagprefix", ")", ":", "account_data", "[", "a", "[", "'name'", "]", "]", "=", "t", "[", "len", "(", "tagprefix", ")", ":", "]", "buckets", "=", "[", "]", "for", "b", "in", "sorted", "(", "d", ".", "buckets", "(", "account", ")", ",", "key", "=", "operator", ".", "attrgetter", "(", "'bucket_id'", ")", ")", ":", "if", "bucket", "and", "b", ".", "name", "not", "in", "bucket", ":", "continue", "if", "not_bucket", "and", "b", ".", "name", "in", "not_bucket", ":", "continue", "if", "matched", "and", "not", "b", ".", "matched", ":", "continue", "if", "kdenied", "and", "not", "b", ".", "keys_denied", ":", "continue", "if", "errors", "and", "not", "b", ".", "error_count", ":", "continue", "if", "size", "and", "b", ".", "size", "<", "size", ":", "continue", "if", "inventory", "and", "not", "b", ".", "using_inventory", ":", "continue", "if", "denied", "and", "not", "b", ".", "denied", ":", "continue", "if", "oversize", "and", "b", ".", "scanned", "<=", "b", ".", "size", ":", "continue", "if", "incomplete", "and", "b", ".", "percent_scanned", ">=", "incomplete", ":", "continue", "if", "region", "and", "b", ".", "region", "not", "in", "region", ":", "continue", "if", "not_region", "and", "b", ".", "region", "in", "not_region", ":", "continue", "if", "tagprefix", ":", "setattr", "(", "b", ",", "tagprefix", "[", ":", "-", "1", "]", ",", "account_data", "[", "b", ".", "account", "]", ")", "buckets", ".", "append", "(", "b", ")", "if", "sort", ":", "key", "=", "operator", ".", "attrgetter", "(", "sort", ")", "buckets", "=", "list", "(", "reversed", "(", "sorted", "(", "buckets", ",", "key", "=", "key", ")", ")", ")", "formatter", "=", "format", "==", "'csv'", "and", "format_csv", "or", "format_plain", "keys", "=", "tagprefix", "and", "(", "tagprefix", "[", ":", "-", "1", "]", ",", ")", "or", "(", ")", "formatter", "(", "buckets", ",", "output", ",", "keys", "=", "keys", ")" ]
34.966102
16.542373
def backtrace_on_usr1 (): """Install a signal handler such that this program prints a Python traceback upon receipt of SIGUSR1. This could be useful for checking that long-running programs are behaving properly, or for discovering where an infinite loop is occurring. Note, however, that the Python interpreter does not invoke Python signal handlers exactly when the process is signaled. For instance, a signal delivered in the midst of a time.sleep() call will only be seen by Python code after that call completes. This means that this feature may not be as helpful as one might like for debugging certain kinds of problems. """ import signal try: signal.signal (signal.SIGUSR1, _print_backtrace_signal_handler) except Exception as e: warn ('failed to set up Python backtraces on SIGUSR1: %s', e)
[ "def", "backtrace_on_usr1", "(", ")", ":", "import", "signal", "try", ":", "signal", ".", "signal", "(", "signal", ".", "SIGUSR1", ",", "_print_backtrace_signal_handler", ")", "except", "Exception", "as", "e", ":", "warn", "(", "'failed to set up Python backtraces on SIGUSR1: %s'", ",", "e", ")" ]
47.277778
26.111111
def work(self, actions_queue, returns_queue, control_queue=None): # pragma: no cover """Wrapper function for do_work in order to catch the exception to see the real work, look at do_work :param actions_queue: Global Queue Master->Slave :type actions_queue: Queue.Queue :param returns_queue: queue managed by manager :type returns_queue: Queue.Queue :return: None """ try: logger.info("[%s] (pid=%d) starting my job...", self._id, os.getpid()) self.do_work(actions_queue, returns_queue, control_queue) logger.info("[%s] (pid=%d) stopped", self._id, os.getpid()) except ActionError as exp: logger.error("[%s] exited with an ActionError exception : %s", self._id, str(exp)) logger.exception(exp) raise # Catch any exception, log the exception and exit anyway except Exception as exp: # pragma: no cover, this should never happen indeed ;) logger.error("[%s] exited with an unmanaged exception : %s", self._id, str(exp)) logger.exception(exp) raise
[ "def", "work", "(", "self", ",", "actions_queue", ",", "returns_queue", ",", "control_queue", "=", "None", ")", ":", "# pragma: no cover", "try", ":", "logger", ".", "info", "(", "\"[%s] (pid=%d) starting my job...\"", ",", "self", ".", "_id", ",", "os", ".", "getpid", "(", ")", ")", "self", ".", "do_work", "(", "actions_queue", ",", "returns_queue", ",", "control_queue", ")", "logger", ".", "info", "(", "\"[%s] (pid=%d) stopped\"", ",", "self", ".", "_id", ",", "os", ".", "getpid", "(", ")", ")", "except", "ActionError", "as", "exp", ":", "logger", ".", "error", "(", "\"[%s] exited with an ActionError exception : %s\"", ",", "self", ".", "_id", ",", "str", "(", "exp", ")", ")", "logger", ".", "exception", "(", "exp", ")", "raise", "# Catch any exception, log the exception and exit anyway", "except", "Exception", "as", "exp", ":", "# pragma: no cover, this should never happen indeed ;)", "logger", ".", "error", "(", "\"[%s] exited with an unmanaged exception : %s\"", ",", "self", ".", "_id", ",", "str", "(", "exp", ")", ")", "logger", ".", "exception", "(", "exp", ")", "raise" ]
49.086957
22.304348
def data(self, where, start, end, archiver="", timeout=DEFAULT_TIMEOUT): """ With the given WHERE clause, retrieves all RAW data between the 2 given timestamps Arguments: [where]: the where clause (e.g. 'path like "keti"', 'SourceName = "TED Main"') [start, end]: time references: [archiver]: if specified, this is the archiver to use. Else, it will run on the first archiver passed into the constructor for the client [timeout]: time in seconds to wait for a response from the archiver """ return self.query("select data in ({0}, {1}) where {2}".format(start, end, where), archiver, timeout).get('timeseries',{})
[ "def", "data", "(", "self", ",", "where", ",", "start", ",", "end", ",", "archiver", "=", "\"\"", ",", "timeout", "=", "DEFAULT_TIMEOUT", ")", ":", "return", "self", ".", "query", "(", "\"select data in ({0}, {1}) where {2}\"", ".", "format", "(", "start", ",", "end", ",", "where", ")", ",", "archiver", ",", "timeout", ")", ".", "get", "(", "'timeseries'", ",", "{", "}", ")" ]
57.916667
33.416667
def local(tool, slug, config_loader, offline=False): """ Create/update local copy of github.com/org/repo/branch. Returns path to local copy """ # Parse slug slug = Slug(slug, offline=offline) local_path = Path(LOCAL_PATH).expanduser() / slug.org / slug.repo git = Git(f"-C {shlex.quote(str(local_path))}") if not local_path.exists(): _run(Git()(f"init {shlex.quote(str(local_path))}")) _run(git(f"remote add origin https://github.com/{slug.org}/{slug.repo}")) if not offline: # Get latest version of checks _run(git(f"fetch origin {slug.branch}")) # Ensure that local copy of the repo is identical to remote copy _run(git(f"checkout -f -B {slug.branch} origin/{slug.branch}")) _run(git(f"reset --hard HEAD")) problem_path = (local_path / slug.problem).absolute() if not problem_path.exists(): raise InvalidSlugError(_("{} does not exist at {}/{}").format(slug.problem, slug.org, slug.repo)) # Get config try: with open(problem_path / ".cs50.yaml") as f: try: config = config_loader.load(f.read()) except InvalidConfigError: raise InvalidSlugError( _("Invalid slug for {}. Did you mean something else?").format(tool)) except FileNotFoundError: raise InvalidSlugError(_("Invalid slug. Did you mean something else?")) return problem_path
[ "def", "local", "(", "tool", ",", "slug", ",", "config_loader", ",", "offline", "=", "False", ")", ":", "# Parse slug", "slug", "=", "Slug", "(", "slug", ",", "offline", "=", "offline", ")", "local_path", "=", "Path", "(", "LOCAL_PATH", ")", ".", "expanduser", "(", ")", "/", "slug", ".", "org", "/", "slug", ".", "repo", "git", "=", "Git", "(", "f\"-C {shlex.quote(str(local_path))}\"", ")", "if", "not", "local_path", ".", "exists", "(", ")", ":", "_run", "(", "Git", "(", ")", "(", "f\"init {shlex.quote(str(local_path))}\"", ")", ")", "_run", "(", "git", "(", "f\"remote add origin https://github.com/{slug.org}/{slug.repo}\"", ")", ")", "if", "not", "offline", ":", "# Get latest version of checks", "_run", "(", "git", "(", "f\"fetch origin {slug.branch}\"", ")", ")", "# Ensure that local copy of the repo is identical to remote copy", "_run", "(", "git", "(", "f\"checkout -f -B {slug.branch} origin/{slug.branch}\"", ")", ")", "_run", "(", "git", "(", "f\"reset --hard HEAD\"", ")", ")", "problem_path", "=", "(", "local_path", "/", "slug", ".", "problem", ")", ".", "absolute", "(", ")", "if", "not", "problem_path", ".", "exists", "(", ")", ":", "raise", "InvalidSlugError", "(", "_", "(", "\"{} does not exist at {}/{}\"", ")", ".", "format", "(", "slug", ".", "problem", ",", "slug", ".", "org", ",", "slug", ".", "repo", ")", ")", "# Get config", "try", ":", "with", "open", "(", "problem_path", "/", "\".cs50.yaml\"", ")", "as", "f", ":", "try", ":", "config", "=", "config_loader", ".", "load", "(", "f", ".", "read", "(", ")", ")", "except", "InvalidConfigError", ":", "raise", "InvalidSlugError", "(", "_", "(", "\"Invalid slug for {}. Did you mean something else?\"", ")", ".", "format", "(", "tool", ")", ")", "except", "FileNotFoundError", ":", "raise", "InvalidSlugError", "(", "_", "(", "\"Invalid slug. Did you mean something else?\"", ")", ")", "return", "problem_path" ]
35.275
22.475
def save(self, *args): """ Save cache to file using pickle. Parameters ---------- *args: All but the last argument are inputs to the cached function. The last is the actual value of the function. """ with open(self.file_root + '.pkl', "wb") as f: pickle.dump(args, f, protocol=pickle.HIGHEST_PROTOCOL)
[ "def", "save", "(", "self", ",", "*", "args", ")", ":", "with", "open", "(", "self", ".", "file_root", "+", "'.pkl'", ",", "\"wb\"", ")", "as", "f", ":", "pickle", ".", "dump", "(", "args", ",", "f", ",", "protocol", "=", "pickle", ".", "HIGHEST_PROTOCOL", ")" ]
34.181818
19.727273
def _detach(self, item): """Hide items from treeview that do not match the search string.""" to_detach = [] children_det = [] children_match = False match_found = False value = self.filtervar.get() txt = self.treeview.item(item, 'text').lower() if value in txt: match_found = True else: class_txt = self.treedata[item].get_class().lower() if value in class_txt: match_found = True parent = self.treeview.parent(item) idx = self.treeview.index(item) children = self.treeview.get_children(item) if children: for child in children: match, detach = self._detach(child) children_match = children_match | match if detach: children_det.extend(detach) if match_found: if children_det: to_detach.extend(children_det) else: if children_match: if children_det: to_detach.extend(children_det) else: to_detach.append((item, parent, idx)) match_found = match_found | children_match return match_found, to_detach
[ "def", "_detach", "(", "self", ",", "item", ")", ":", "to_detach", "=", "[", "]", "children_det", "=", "[", "]", "children_match", "=", "False", "match_found", "=", "False", "value", "=", "self", ".", "filtervar", ".", "get", "(", ")", "txt", "=", "self", ".", "treeview", ".", "item", "(", "item", ",", "'text'", ")", ".", "lower", "(", ")", "if", "value", "in", "txt", ":", "match_found", "=", "True", "else", ":", "class_txt", "=", "self", ".", "treedata", "[", "item", "]", ".", "get_class", "(", ")", ".", "lower", "(", ")", "if", "value", "in", "class_txt", ":", "match_found", "=", "True", "parent", "=", "self", ".", "treeview", ".", "parent", "(", "item", ")", "idx", "=", "self", ".", "treeview", ".", "index", "(", "item", ")", "children", "=", "self", ".", "treeview", ".", "get_children", "(", "item", ")", "if", "children", ":", "for", "child", "in", "children", ":", "match", ",", "detach", "=", "self", ".", "_detach", "(", "child", ")", "children_match", "=", "children_match", "|", "match", "if", "detach", ":", "children_det", ".", "extend", "(", "detach", ")", "if", "match_found", ":", "if", "children_det", ":", "to_detach", ".", "extend", "(", "children_det", ")", "else", ":", "if", "children_match", ":", "if", "children_det", ":", "to_detach", ".", "extend", "(", "children_det", ")", "else", ":", "to_detach", ".", "append", "(", "(", "item", ",", "parent", ",", "idx", ")", ")", "match_found", "=", "match_found", "|", "children_match", "return", "match_found", ",", "to_detach" ]
33.405405
14.189189
def ssn(self, min_age=18, max_age=90): """ Returns a 10 digit Swedish SSN, "Personnummer". It consists of 10 digits in the form YYMMDD-SSGQ, where YYMMDD is the date of birth, SSS is a serial number and Q is a control character (Luhn checksum). http://en.wikipedia.org/wiki/Personal_identity_number_(Sweden) """ def _luhn_checksum(number): def digits_of(n): return [int(d) for d in str(n)] digits = digits_of(number) odd_digits = digits[-1::-2] even_digits = digits[-2::-2] checksum = 0 checksum += sum(odd_digits) for d in even_digits: checksum += sum(digits_of(d * 2)) return checksum % 10 def _calculate_luhn(partial_number): check_digit = _luhn_checksum(int(partial_number) * 10) return check_digit if check_digit == 0 else 10 - check_digit age = datetime.timedelta( days=self.generator.random.randrange(min_age * 365, max_age * 365)) birthday = datetime.datetime.now() - age pnr_date = birthday.strftime('%y%m%d') suffix = str(self.generator.random.randrange(0, 999)).zfill(3) luhn_checksum = str(_calculate_luhn(pnr_date + suffix)) pnr = '{0}-{1}{2}'.format(pnr_date, suffix, luhn_checksum) return pnr
[ "def", "ssn", "(", "self", ",", "min_age", "=", "18", ",", "max_age", "=", "90", ")", ":", "def", "_luhn_checksum", "(", "number", ")", ":", "def", "digits_of", "(", "n", ")", ":", "return", "[", "int", "(", "d", ")", "for", "d", "in", "str", "(", "n", ")", "]", "digits", "=", "digits_of", "(", "number", ")", "odd_digits", "=", "digits", "[", "-", "1", ":", ":", "-", "2", "]", "even_digits", "=", "digits", "[", "-", "2", ":", ":", "-", "2", "]", "checksum", "=", "0", "checksum", "+=", "sum", "(", "odd_digits", ")", "for", "d", "in", "even_digits", ":", "checksum", "+=", "sum", "(", "digits_of", "(", "d", "*", "2", ")", ")", "return", "checksum", "%", "10", "def", "_calculate_luhn", "(", "partial_number", ")", ":", "check_digit", "=", "_luhn_checksum", "(", "int", "(", "partial_number", ")", "*", "10", ")", "return", "check_digit", "if", "check_digit", "==", "0", "else", "10", "-", "check_digit", "age", "=", "datetime", ".", "timedelta", "(", "days", "=", "self", ".", "generator", ".", "random", ".", "randrange", "(", "min_age", "*", "365", ",", "max_age", "*", "365", ")", ")", "birthday", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "-", "age", "pnr_date", "=", "birthday", ".", "strftime", "(", "'%y%m%d'", ")", "suffix", "=", "str", "(", "self", ".", "generator", ".", "random", ".", "randrange", "(", "0", ",", "999", ")", ")", ".", "zfill", "(", "3", ")", "luhn_checksum", "=", "str", "(", "_calculate_luhn", "(", "pnr_date", "+", "suffix", ")", ")", "pnr", "=", "'{0}-{1}{2}'", ".", "format", "(", "pnr_date", ",", "suffix", ",", "luhn_checksum", ")", "return", "pnr" ]
39.142857
16.914286
def get_field_type(f): """Obtain the type name of a GRPC Message field.""" types = (t[5:] for t in dir(f) if t[:4] == 'TYPE' and getattr(f, t) == f.type) return next(types)
[ "def", "get_field_type", "(", "f", ")", ":", "types", "=", "(", "t", "[", "5", ":", "]", "for", "t", "in", "dir", "(", "f", ")", "if", "t", "[", ":", "4", "]", "==", "'TYPE'", "and", "getattr", "(", "f", ",", "t", ")", "==", "f", ".", "type", ")", "return", "next", "(", "types", ")" ]
38.6
11.2
def module_ids(self, rev=False): """Gets a list of module ids guaranteed to be sorted by run_order, ignoring conn modules (run order < 0). """ shutit_global.shutit_global_object.yield_to_draw() ids = sorted(list(self.shutit_map.keys()),key=lambda module_id: self.shutit_map[module_id].run_order) if rev: return list(reversed(ids)) return ids
[ "def", "module_ids", "(", "self", ",", "rev", "=", "False", ")", ":", "shutit_global", ".", "shutit_global_object", ".", "yield_to_draw", "(", ")", "ids", "=", "sorted", "(", "list", "(", "self", ".", "shutit_map", ".", "keys", "(", ")", ")", ",", "key", "=", "lambda", "module_id", ":", "self", ".", "shutit_map", "[", "module_id", "]", ".", "run_order", ")", "if", "rev", ":", "return", "list", "(", "reversed", "(", "ids", ")", ")", "return", "ids" ]
38.888889
19.444444
def generate(self, text): """Generate and save avatars, return a list of file name: [filename_s, filename_m, filename_l]. :param text: The text used to generate image. """ sizes = current_app.config['AVATARS_SIZE_TUPLE'] path = current_app.config['AVATARS_SAVE_PATH'] suffix = {sizes[0]: 's', sizes[1]: 'm', sizes[2]: 'l'} for size in sizes: image_byte_array = self.get_image( string=str(text), width=int(size), height=int(size), pad=int(size * 0.1)) self.save(image_byte_array, save_location=os.path.join(path, '%s_%s.png' % (text, suffix[size]))) return [text + '_s.png', text + '_m.png', text + '_l.png']
[ "def", "generate", "(", "self", ",", "text", ")", ":", "sizes", "=", "current_app", ".", "config", "[", "'AVATARS_SIZE_TUPLE'", "]", "path", "=", "current_app", ".", "config", "[", "'AVATARS_SAVE_PATH'", "]", "suffix", "=", "{", "sizes", "[", "0", "]", ":", "'s'", ",", "sizes", "[", "1", "]", ":", "'m'", ",", "sizes", "[", "2", "]", ":", "'l'", "}", "for", "size", "in", "sizes", ":", "image_byte_array", "=", "self", ".", "get_image", "(", "string", "=", "str", "(", "text", ")", ",", "width", "=", "int", "(", "size", ")", ",", "height", "=", "int", "(", "size", ")", ",", "pad", "=", "int", "(", "size", "*", "0.1", ")", ")", "self", ".", "save", "(", "image_byte_array", ",", "save_location", "=", "os", ".", "path", ".", "join", "(", "path", ",", "'%s_%s.png'", "%", "(", "text", ",", "suffix", "[", "size", "]", ")", ")", ")", "return", "[", "text", "+", "'_s.png'", ",", "text", "+", "'_m.png'", ",", "text", "+", "'_l.png'", "]" ]
43.823529
17.705882
def paga_adjacency( adata, adjacency='connectivities', adjacency_tree='connectivities_tree', as_heatmap=True, color_map=None, show=None, save=None): """Connectivity of paga groups. """ connectivity = adata.uns[adjacency].toarray() connectivity_select = adata.uns[adjacency_tree] if as_heatmap: matrix(connectivity, color_map=color_map, show=False) for i in range(connectivity_select.shape[0]): neighbors = connectivity_select[i].nonzero()[1] pl.scatter([i for j in neighbors], neighbors, color='black', s=1) # as a stripplot else: pl.figure() for i, cs in enumerate(connectivity): x = [i for j, d in enumerate(cs) if i != j] y = [c for j, c in enumerate(cs) if i != j] pl.scatter(x, y, color='gray', s=1) neighbors = connectivity_select[i].nonzero()[1] pl.scatter([i for j in neighbors], cs[neighbors], color='black', s=1) utils.savefig_or_show('paga_connectivity', show=show, save=save)
[ "def", "paga_adjacency", "(", "adata", ",", "adjacency", "=", "'connectivities'", ",", "adjacency_tree", "=", "'connectivities_tree'", ",", "as_heatmap", "=", "True", ",", "color_map", "=", "None", ",", "show", "=", "None", ",", "save", "=", "None", ")", ":", "connectivity", "=", "adata", ".", "uns", "[", "adjacency", "]", ".", "toarray", "(", ")", "connectivity_select", "=", "adata", ".", "uns", "[", "adjacency_tree", "]", "if", "as_heatmap", ":", "matrix", "(", "connectivity", ",", "color_map", "=", "color_map", ",", "show", "=", "False", ")", "for", "i", "in", "range", "(", "connectivity_select", ".", "shape", "[", "0", "]", ")", ":", "neighbors", "=", "connectivity_select", "[", "i", "]", ".", "nonzero", "(", ")", "[", "1", "]", "pl", ".", "scatter", "(", "[", "i", "for", "j", "in", "neighbors", "]", ",", "neighbors", ",", "color", "=", "'black'", ",", "s", "=", "1", ")", "# as a stripplot", "else", ":", "pl", ".", "figure", "(", ")", "for", "i", ",", "cs", "in", "enumerate", "(", "connectivity", ")", ":", "x", "=", "[", "i", "for", "j", ",", "d", "in", "enumerate", "(", "cs", ")", "if", "i", "!=", "j", "]", "y", "=", "[", "c", "for", "j", ",", "c", "in", "enumerate", "(", "cs", ")", "if", "i", "!=", "j", "]", "pl", ".", "scatter", "(", "x", ",", "y", ",", "color", "=", "'gray'", ",", "s", "=", "1", ")", "neighbors", "=", "connectivity_select", "[", "i", "]", ".", "nonzero", "(", ")", "[", "1", "]", "pl", ".", "scatter", "(", "[", "i", "for", "j", "in", "neighbors", "]", ",", "cs", "[", "neighbors", "]", ",", "color", "=", "'black'", ",", "s", "=", "1", ")", "utils", ".", "savefig_or_show", "(", "'paga_connectivity'", ",", "show", "=", "show", ",", "save", "=", "save", ")" ]
38.821429
16.035714
def create_db_entry(self, comment=''): """Create a db entry for this task file info and link it with a optional comment :param comment: a comment for the task file entry :type comment: str :returns: The created TaskFile django instance and the comment. If the comment was empty, None is returned instead :rtype: tuple of :class:`dj.models.TaskFile` and :class:`dj.models.Note` :raises: ValidationError, If the comment could not be created, the TaskFile is deleted and the Exception is propagated. """ jbfile = JB_File(self) p = jbfile.get_fullpath() user = dj.get_current_user() tf = dj.models.TaskFile(path=p, task=self.task, version=self.version, releasetype=self.releasetype, descriptor=self.descriptor, typ=self.typ, user=user) tf.full_clean() tf.save() note = None if comment: try: note = dj.models.Note(user=user, parent=tf, content=comment) note.full_clean() note.save() except Exception, e: tf.delete() raise e return tf, note
[ "def", "create_db_entry", "(", "self", ",", "comment", "=", "''", ")", ":", "jbfile", "=", "JB_File", "(", "self", ")", "p", "=", "jbfile", ".", "get_fullpath", "(", ")", "user", "=", "dj", ".", "get_current_user", "(", ")", "tf", "=", "dj", ".", "models", ".", "TaskFile", "(", "path", "=", "p", ",", "task", "=", "self", ".", "task", ",", "version", "=", "self", ".", "version", ",", "releasetype", "=", "self", ".", "releasetype", ",", "descriptor", "=", "self", ".", "descriptor", ",", "typ", "=", "self", ".", "typ", ",", "user", "=", "user", ")", "tf", ".", "full_clean", "(", ")", "tf", ".", "save", "(", ")", "note", "=", "None", "if", "comment", ":", "try", ":", "note", "=", "dj", ".", "models", ".", "Note", "(", "user", "=", "user", ",", "parent", "=", "tf", ",", "content", "=", "comment", ")", "note", ".", "full_clean", "(", ")", "note", ".", "save", "(", ")", "except", "Exception", ",", "e", ":", "tf", ".", "delete", "(", ")", "raise", "e", "return", "tf", ",", "note" ]
43.785714
22.821429
def normalize_topic(topic): """ Get a canonical representation of a Wikipedia topic, which may include a disambiguation string in parentheses. Returns (name, disambig), where "name" is the normalized topic name, and "disambig" is a string corresponding to the disambiguation text or None. """ # find titles of the form Foo (bar) topic = topic.replace('_', ' ') match = re.match(r'([^(]+) \(([^)]+)\)', topic) if not match: return normalize(topic), None else: return normalize(match.group(1)), 'n/' + match.group(2).strip(' _')
[ "def", "normalize_topic", "(", "topic", ")", ":", "# find titles of the form Foo (bar)", "topic", "=", "topic", ".", "replace", "(", "'_'", ",", "' '", ")", "match", "=", "re", ".", "match", "(", "r'([^(]+) \\(([^)]+)\\)'", ",", "topic", ")", "if", "not", "match", ":", "return", "normalize", "(", "topic", ")", ",", "None", "else", ":", "return", "normalize", "(", "match", ".", "group", "(", "1", ")", ")", ",", "'n/'", "+", "match", ".", "group", "(", "2", ")", ".", "strip", "(", "' _'", ")" ]
36
18.5
def get_list_continuous_queries(self): """Get the list of continuous queries in InfluxDB. :return: all CQs in InfluxDB :rtype: list of dictionaries :Example: :: >> cqs = client.get_list_cqs() >> cqs [ { u'db1': [] }, { u'db2': [ { u'name': u'vampire', u'query': u'CREATE CONTINUOUS QUERY vampire ON ' 'mydb BEGIN SELECT count(dracula) INTO ' 'mydb.autogen.all_of_them FROM ' 'mydb.autogen.one GROUP BY time(5m) END' } ] } ] """ query_string = "SHOW CONTINUOUS QUERIES" return [{sk[0]: list(p)} for sk, p in self.query(query_string).items()]
[ "def", "get_list_continuous_queries", "(", "self", ")", ":", "query_string", "=", "\"SHOW CONTINUOUS QUERIES\"", "return", "[", "{", "sk", "[", "0", "]", ":", "list", "(", "p", ")", "}", "for", "sk", ",", "p", "in", "self", ".", "query", "(", "query_string", ")", ".", "items", "(", ")", "]" ]
31.322581
21.354839
def statuscategories(self): """Get a list of status category Resources from the server. :rtype: List[StatusCategory] """ r_json = self._get_json('statuscategory') statuscategories = [StatusCategory(self._options, self._session, raw_stat_json) for raw_stat_json in r_json] return statuscategories
[ "def", "statuscategories", "(", "self", ")", ":", "r_json", "=", "self", ".", "_get_json", "(", "'statuscategory'", ")", "statuscategories", "=", "[", "StatusCategory", "(", "self", ".", "_options", ",", "self", ".", "_session", ",", "raw_stat_json", ")", "for", "raw_stat_json", "in", "r_json", "]", "return", "statuscategories" ]
40.444444
15.333333
def Pareto2(q, b, tag=None): """ A Pareto random variate (second kind). This form always starts at the origin. Parameters ---------- q : scalar The scale parameter b : scalar The shape parameter """ assert q > 0 and b > 0, 'Pareto2 "q" and "b" must be positive scalars' return Pareto(q, b, tag) - b
[ "def", "Pareto2", "(", "q", ",", "b", ",", "tag", "=", "None", ")", ":", "assert", "q", ">", "0", "and", "b", ">", "0", ",", "'Pareto2 \"q\" and \"b\" must be positive scalars'", "return", "Pareto", "(", "q", ",", "b", ",", "tag", ")", "-", "b" ]
24.714286
20.142857
def get_checkcode(cls, id_number_str): """ 计算身份证号码的校验位; :param: * id_number_str: (string) 身份证号的前17位,比如 3201241987010100 :returns: * 返回类型 (tuple) * flag: (bool) 如果身份证号格式正确,返回 True;格式错误,返回 False * checkcode: 计算身份证前17位的校验码 举例如下:: from fishbase.fish_data import * print('--- fish_data get_checkcode demo ---') # id number id1 = '32012419870101001' print(id1, IdCard.get_checkcode(id1)[1]) # id number id2 = '13052219840731647' print(id2, IdCard.get_checkcode(id2)[1]) print('---') 输出结果:: --- fish_data get_checkcode demo --- 32012419870101001 5 13052219840731647 1 --- """ # 判断长度,如果不是 17 位,直接返回失败 if len(id_number_str) != 17: return False, -1 id_regex = '[1-9][0-9]{14}([0-9]{2}[0-9X])?' if not re.match(id_regex, id_number_str): return False, -1 items = [int(item) for item in id_number_str] # 加权因子表 factors = (7, 9, 10, 5, 8, 4, 2, 1, 6, 3, 7, 9, 10, 5, 8, 4, 2) # 计算17位数字各位数字与对应的加权因子的乘积 copulas = sum([a * b for a, b in zip(factors, items)]) # 校验码表 check_codes = ('1', '0', 'X', '9', '8', '7', '6', '5', '4', '3', '2') checkcode = check_codes[copulas % 11].upper() return True, checkcode
[ "def", "get_checkcode", "(", "cls", ",", "id_number_str", ")", ":", "# 判断长度,如果不是 17 位,直接返回失败", "if", "len", "(", "id_number_str", ")", "!=", "17", ":", "return", "False", ",", "-", "1", "id_regex", "=", "'[1-9][0-9]{14}([0-9]{2}[0-9X])?'", "if", "not", "re", ".", "match", "(", "id_regex", ",", "id_number_str", ")", ":", "return", "False", ",", "-", "1", "items", "=", "[", "int", "(", "item", ")", "for", "item", "in", "id_number_str", "]", "# 加权因子表", "factors", "=", "(", "7", ",", "9", ",", "10", ",", "5", ",", "8", ",", "4", ",", "2", ",", "1", ",", "6", ",", "3", ",", "7", ",", "9", ",", "10", ",", "5", ",", "8", ",", "4", ",", "2", ")", "# 计算17位数字各位数字与对应的加权因子的乘积", "copulas", "=", "sum", "(", "[", "a", "*", "b", "for", "a", ",", "b", "in", "zip", "(", "factors", ",", "items", ")", "]", ")", "# 校验码表", "check_codes", "=", "(", "'1'", ",", "'0'", ",", "'X'", ",", "'9'", ",", "'8'", ",", "'7'", ",", "'6'", ",", "'5'", ",", "'4'", ",", "'3'", ",", "'2'", ")", "checkcode", "=", "check_codes", "[", "copulas", "%", "11", "]", ".", "upper", "(", ")", "return", "True", ",", "checkcode" ]
24.322034
22.694915
def exists_alias(self, index=None, name=None, params=None): """ Return a boolean indicating whether given alias exists. `<http://www.elastic.co/guide/en/elasticsearch/reference/current/indices-aliases.html>`_ :arg index: A comma-separated list of index names to filter aliases :arg name: A comma-separated list of alias names to return :arg allow_no_indices: Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified) :arg expand_wildcards: Whether to expand wildcard expression to concrete indices that are open, closed or both., default 'all', valid choices are: 'open', 'closed', 'none', 'all' :arg ignore_unavailable: Whether specified concrete indices should be ignored when unavailable (missing or closed) :arg local: Return local information, do not retrieve the state from master node (default: false) """ return self.transport.perform_request( "HEAD", _make_path(index, "_alias", name), params=params )
[ "def", "exists_alias", "(", "self", ",", "index", "=", "None", ",", "name", "=", "None", ",", "params", "=", "None", ")", ":", "return", "self", ".", "transport", ".", "perform_request", "(", "\"HEAD\"", ",", "_make_path", "(", "index", ",", "\"_alias\"", ",", "name", ")", ",", "params", "=", "params", ")" ]
55.619048
25.142857
def from_csv(self, csv_source, delimiter=","): """ Set tabular attributes to the writer from a character-separated values (CSV) data source. Following attributes are set to the writer by the method: - :py:attr:`~.headers`. - :py:attr:`~.value_matrix`. :py:attr:`~.table_name` also be set if the CSV data source is a file. In that case, :py:attr:`~.table_name` is as same as the filename. :param str csv_source: Input CSV data source either can be designated CSV text or CSV file path. :Examples: :ref:`example-from-csv` :Dependency Packages: - `pytablereader <https://github.com/thombashi/pytablereader>`__ """ import pytablereader as ptr loader = ptr.CsvTableTextLoader(csv_source, quoting_flags=self._quoting_flags) loader.delimiter = delimiter try: for table_data in loader.load(): self.from_tabledata(table_data, is_overwrite_table_name=False) return except ptr.DataError: pass loader = ptr.CsvTableFileLoader(csv_source, quoting_flags=self._quoting_flags) loader.delimiter = delimiter for table_data in loader.load(): self.from_tabledata(table_data)
[ "def", "from_csv", "(", "self", ",", "csv_source", ",", "delimiter", "=", "\",\"", ")", ":", "import", "pytablereader", "as", "ptr", "loader", "=", "ptr", ".", "CsvTableTextLoader", "(", "csv_source", ",", "quoting_flags", "=", "self", ".", "_quoting_flags", ")", "loader", ".", "delimiter", "=", "delimiter", "try", ":", "for", "table_data", "in", "loader", ".", "load", "(", ")", ":", "self", ".", "from_tabledata", "(", "table_data", ",", "is_overwrite_table_name", "=", "False", ")", "return", "except", "ptr", ".", "DataError", ":", "pass", "loader", "=", "ptr", ".", "CsvTableFileLoader", "(", "csv_source", ",", "quoting_flags", "=", "self", ".", "_quoting_flags", ")", "loader", ".", "delimiter", "=", "delimiter", "for", "table_data", "in", "loader", ".", "load", "(", ")", ":", "self", ".", "from_tabledata", "(", "table_data", ")" ]
34.864865
23.081081
def mod(value, arg): """Return the modulo value.""" try: return valid_numeric(value) % valid_numeric(arg) except (ValueError, TypeError): try: return value % arg except Exception: return ''
[ "def", "mod", "(", "value", ",", "arg", ")", ":", "try", ":", "return", "valid_numeric", "(", "value", ")", "%", "valid_numeric", "(", "arg", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "try", ":", "return", "value", "%", "arg", "except", "Exception", ":", "return", "''" ]
26.777778
16.111111
def normalize_reference_name(name): """ Search the dictionary of species-specific references to find a reference name that matches aside from capitalization. If no matching reference is found, raise an exception. """ lower_name = name.strip().lower() for reference in Species._reference_names_to_species.keys(): if reference.lower() == lower_name: return reference raise ValueError("Reference genome '%s' not found" % name)
[ "def", "normalize_reference_name", "(", "name", ")", ":", "lower_name", "=", "name", ".", "strip", "(", ")", ".", "lower", "(", ")", "for", "reference", "in", "Species", ".", "_reference_names_to_species", ".", "keys", "(", ")", ":", "if", "reference", ".", "lower", "(", ")", "==", "lower_name", ":", "return", "reference", "raise", "ValueError", "(", "\"Reference genome '%s' not found\"", "%", "name", ")" ]
38.75
14.25
def gen_secret(length=64): """ Generates a secret of given length """ charset = string.ascii_letters + string.digits return ''.join(random.SystemRandom().choice(charset) for _ in range(length))
[ "def", "gen_secret", "(", "length", "=", "64", ")", ":", "charset", "=", "string", ".", "ascii_letters", "+", "string", ".", "digits", "return", "''", ".", "join", "(", "random", ".", "SystemRandom", "(", ")", ".", "choice", "(", "charset", ")", "for", "_", "in", "range", "(", "length", ")", ")" ]
37.166667
7
def click_button(self, index_or_name): """ Click button """ _platform_class_dict = {'ios': 'UIAButton', 'android': 'android.widget.Button'} if self._is_support_platform(_platform_class_dict): class_name = self._get_class(_platform_class_dict) self._click_element_by_class_name(class_name, index_or_name)
[ "def", "click_button", "(", "self", ",", "index_or_name", ")", ":", "_platform_class_dict", "=", "{", "'ios'", ":", "'UIAButton'", ",", "'android'", ":", "'android.widget.Button'", "}", "if", "self", ".", "_is_support_platform", "(", "_platform_class_dict", ")", ":", "class_name", "=", "self", ".", "_get_class", "(", "_platform_class_dict", ")", "self", ".", "_click_element_by_class_name", "(", "class_name", ",", "index_or_name", ")" ]
54.714286
16.571429
def get_fba_flux(self, objective): """Return a dictionary of all the fluxes solved by FBA. Dictionary of fluxes is used in :meth:`.lin_moma` and :meth:`.moma` to minimize changes in the flux distributions following model perturbation. Args: objective: The objective reaction that is maximized. Returns: Dictionary of fluxes for each reaction in the model. """ flux_result = self.solve_fba(objective) fba_fluxes = {} # Place all the flux values in a dictionary for key in self._model.reactions: fba_fluxes[key] = flux_result.get_value(self._v_wt[key]) return fba_fluxes
[ "def", "get_fba_flux", "(", "self", ",", "objective", ")", ":", "flux_result", "=", "self", ".", "solve_fba", "(", "objective", ")", "fba_fluxes", "=", "{", "}", "# Place all the flux values in a dictionary", "for", "key", "in", "self", ".", "_model", ".", "reactions", ":", "fba_fluxes", "[", "key", "]", "=", "flux_result", ".", "get_value", "(", "self", ".", "_v_wt", "[", "key", "]", ")", "return", "fba_fluxes" ]
34.25
21.35
def register_suite(): """ Call this method in a module containing a test suite. The stack trace from which call descriptor hashes are derived will be truncated at this module. """ global test_suite frm = inspect.stack()[1] test_suite = ".".join(os.path.basename(frm[1]).split('.')[0:-1])
[ "def", "register_suite", "(", ")", ":", "global", "test_suite", "frm", "=", "inspect", ".", "stack", "(", ")", "[", "1", "]", "test_suite", "=", "\".\"", ".", "join", "(", "os", ".", "path", ".", "basename", "(", "frm", "[", "1", "]", ")", ".", "split", "(", "'.'", ")", "[", "0", ":", "-", "1", "]", ")" ]
34.222222
21.555556
def get_config(repo_root): """Gets the configuration file either from the repository or the default.""" config = os.path.join(os.path.dirname(__file__), 'configs', 'config.yaml') if repo_root: repo_config = os.path.join(repo_root, '.gitlint.yaml') if os.path.exists(repo_config): config = repo_config with open(config) as f: # We have to read the content first as yaml hangs up when reading from # MockOpen content = f.read() # Yaml.load will return None when the input is empty. if not content: yaml_config = {} else: yaml_config = yaml.load(content) return linters.parse_yaml_config(yaml_config, repo_root)
[ "def", "get_config", "(", "repo_root", ")", ":", "config", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "'configs'", ",", "'config.yaml'", ")", "if", "repo_root", ":", "repo_config", "=", "os", ".", "path", ".", "join", "(", "repo_root", ",", "'.gitlint.yaml'", ")", "if", "os", ".", "path", ".", "exists", "(", "repo_config", ")", ":", "config", "=", "repo_config", "with", "open", "(", "config", ")", "as", "f", ":", "# We have to read the content first as yaml hangs up when reading from", "# MockOpen", "content", "=", "f", ".", "read", "(", ")", "# Yaml.load will return None when the input is empty.", "if", "not", "content", ":", "yaml_config", "=", "{", "}", "else", ":", "yaml_config", "=", "yaml", ".", "load", "(", "content", ")", "return", "linters", ".", "parse_yaml_config", "(", "yaml_config", ",", "repo_root", ")" ]
35.6
20.7
def initialize_fields(self, content): """ Initializes the :class:`Field` elements in the `Array` with the *values* in the *content* list. If the *content* list is shorter than the `Array` then the *content* list is used as a rotating fill pattern for the :class:`Field` elements in the `Array`. :param list content: a list contains the :class:`Field` values for each element in the `Array` or one :class:`Field` value for all elements in the `Array`. """ if isinstance(content, (list, tuple)): capacity = len(content) for i in range(0, len(self), capacity): for name, pair in enumerate(zip(self[i:i + capacity], content), start=i): item, value = pair if is_mixin(item): # Container or Pointer item.initialize_fields(value) elif is_field(item): # Fields item.value = value else: raise MemberTypeError(self, item, name) else: for name, item in enumerate(self): if is_mixin(item): # Container or Pointer item.initialize_fields(content) elif is_field(item): # Fields item.value = content else: raise MemberTypeError(self, item, name)
[ "def", "initialize_fields", "(", "self", ",", "content", ")", ":", "if", "isinstance", "(", "content", ",", "(", "list", ",", "tuple", ")", ")", ":", "capacity", "=", "len", "(", "content", ")", "for", "i", "in", "range", "(", "0", ",", "len", "(", "self", ")", ",", "capacity", ")", ":", "for", "name", ",", "pair", "in", "enumerate", "(", "zip", "(", "self", "[", "i", ":", "i", "+", "capacity", "]", ",", "content", ")", ",", "start", "=", "i", ")", ":", "item", ",", "value", "=", "pair", "if", "is_mixin", "(", "item", ")", ":", "# Container or Pointer", "item", ".", "initialize_fields", "(", "value", ")", "elif", "is_field", "(", "item", ")", ":", "# Fields", "item", ".", "value", "=", "value", "else", ":", "raise", "MemberTypeError", "(", "self", ",", "item", ",", "name", ")", "else", ":", "for", "name", ",", "item", "in", "enumerate", "(", "self", ")", ":", "if", "is_mixin", "(", "item", ")", ":", "# Container or Pointer", "item", ".", "initialize_fields", "(", "content", ")", "elif", "is_field", "(", "item", ")", ":", "# Fields", "item", ".", "value", "=", "content", "else", ":", "raise", "MemberTypeError", "(", "self", ",", "item", ",", "name", ")" ]
41.657895
14.868421
def build(template_directories): """ Build a template from the source template directories. :param template_directories: source template directories :return: template workflow """ template = load_template(template_directories[0]) for directory in template_directories[1:]: template.update(load_template(directory)) return template
[ "def", "build", "(", "template_directories", ")", ":", "template", "=", "load_template", "(", "template_directories", "[", "0", "]", ")", "for", "directory", "in", "template_directories", "[", "1", ":", "]", ":", "template", ".", "update", "(", "load_template", "(", "directory", ")", ")", "return", "template" ]
32.818182
13.181818
def logical_chassis_fwdl_status_output_cluster_fwdl_entries_fwdl_entries_index(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") logical_chassis_fwdl_status = ET.Element("logical_chassis_fwdl_status") config = logical_chassis_fwdl_status output = ET.SubElement(logical_chassis_fwdl_status, "output") cluster_fwdl_entries = ET.SubElement(output, "cluster-fwdl-entries") fwdl_entries = ET.SubElement(cluster_fwdl_entries, "fwdl-entries") index = ET.SubElement(fwdl_entries, "index") index.text = kwargs.pop('index') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "logical_chassis_fwdl_status_output_cluster_fwdl_entries_fwdl_entries_index", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "logical_chassis_fwdl_status", "=", "ET", ".", "Element", "(", "\"logical_chassis_fwdl_status\"", ")", "config", "=", "logical_chassis_fwdl_status", "output", "=", "ET", ".", "SubElement", "(", "logical_chassis_fwdl_status", ",", "\"output\"", ")", "cluster_fwdl_entries", "=", "ET", ".", "SubElement", "(", "output", ",", "\"cluster-fwdl-entries\"", ")", "fwdl_entries", "=", "ET", ".", "SubElement", "(", "cluster_fwdl_entries", ",", "\"fwdl-entries\"", ")", "index", "=", "ET", ".", "SubElement", "(", "fwdl_entries", ",", "\"index\"", ")", "index", ".", "text", "=", "kwargs", ".", "pop", "(", "'index'", ")", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "_callback", ")", "return", "callback", "(", "config", ")" ]
49.642857
19.857143
def create(cls, name, engines, include_core_files=False, include_slapcat_output=False, comment=None): """ Create an sginfo task. :param str name: name of task :param engines: list of engines to apply the sginfo task :type engines: list(Engine) :param bool include_core_files: include core files in the sginfo backup (default: False) :param bool include_slapcat_output: include output from a slapcat command in output (default: False) :raises ElementNotFound: engine not found :raises CreateElementFailed: create the task failed :return: the task :rtype: SGInfoTask """ json = { 'name': name, 'comment': comment, 'resources': [engine.href for engine in engines], 'include_core_files': include_core_files, 'include_slapcat_output': include_slapcat_output} return ElementCreator(cls, json)
[ "def", "create", "(", "cls", ",", "name", ",", "engines", ",", "include_core_files", "=", "False", ",", "include_slapcat_output", "=", "False", ",", "comment", "=", "None", ")", ":", "json", "=", "{", "'name'", ":", "name", ",", "'comment'", ":", "comment", ",", "'resources'", ":", "[", "engine", ".", "href", "for", "engine", "in", "engines", "]", ",", "'include_core_files'", ":", "include_core_files", ",", "'include_slapcat_output'", ":", "include_slapcat_output", "}", "return", "ElementCreator", "(", "cls", ",", "json", ")" ]
39.68
14.64
def camera_list(self, **kwargs): """Return a list of cameras.""" api = self._api_info['camera'] payload = dict({ '_sid': self._sid, 'api': api['name'], 'method': 'List', 'version': api['version'], }, **kwargs) response = self._get_json_with_retry(api['url'], payload) cameras = [] for data in response['data']['cameras']: cameras.append(Camera(data, self._video_stream_url)) return cameras
[ "def", "camera_list", "(", "self", ",", "*", "*", "kwargs", ")", ":", "api", "=", "self", ".", "_api_info", "[", "'camera'", "]", "payload", "=", "dict", "(", "{", "'_sid'", ":", "self", ".", "_sid", ",", "'api'", ":", "api", "[", "'name'", "]", ",", "'method'", ":", "'List'", ",", "'version'", ":", "api", "[", "'version'", "]", ",", "}", ",", "*", "*", "kwargs", ")", "response", "=", "self", ".", "_get_json_with_retry", "(", "api", "[", "'url'", "]", ",", "payload", ")", "cameras", "=", "[", "]", "for", "data", "in", "response", "[", "'data'", "]", "[", "'cameras'", "]", ":", "cameras", ".", "append", "(", "Camera", "(", "data", ",", "self", ".", "_video_stream_url", ")", ")", "return", "cameras" ]
29.411765
17.235294
def patch_cmdline_parser(): """ Patches the ``luigi.cmdline_parser.CmdlineParser`` to store the original command line arguments for later processing in the :py:class:`law.config.Config`. """ # store original functions _init = luigi.cmdline_parser.CmdlineParser.__init__ # patch init def __init__(self, cmdline_args): _init(self, cmdline_args) self.cmdline_args = cmdline_args luigi.cmdline_parser.CmdlineParser.__init__ = __init__
[ "def", "patch_cmdline_parser", "(", ")", ":", "# store original functions", "_init", "=", "luigi", ".", "cmdline_parser", ".", "CmdlineParser", ".", "__init__", "# patch init", "def", "__init__", "(", "self", ",", "cmdline_args", ")", ":", "_init", "(", "self", ",", "cmdline_args", ")", "self", ".", "cmdline_args", "=", "cmdline_args", "luigi", ".", "cmdline_parser", ".", "CmdlineParser", ".", "__init__", "=", "__init__" ]
33.642857
17.928571
def ndvi(self): """ Normalized difference vegetation index. :return: NDVI """ red, nir = self.reflectance(3), self.reflectance(4) ndvi = self._divide_zero((nir - red), (nir + red), nan) return ndvi
[ "def", "ndvi", "(", "self", ")", ":", "red", ",", "nir", "=", "self", ".", "reflectance", "(", "3", ")", ",", "self", ".", "reflectance", "(", "4", ")", "ndvi", "=", "self", ".", "_divide_zero", "(", "(", "nir", "-", "red", ")", ",", "(", "nir", "+", "red", ")", ",", "nan", ")", "return", "ndvi" ]
29.875
18.375
def disbatch(self): ''' Disbatch all lowstates to the appropriate clients ''' ret = [] # check clients before going, we want to throw 400 if one is bad for low in self.lowstate: if not self._verify_client(low): return # Make sure we have 'token' or 'username'/'password' in each low chunk. # Salt will verify the credentials are correct. if self.token is not None and 'token' not in low: low['token'] = self.token if not (('token' in low) or ('username' in low and 'password' in low and 'eauth' in low)): ret.append('Failed to authenticate') break # disbatch to the correct handler try: chunk_ret = yield getattr(self, '_disbatch_{0}'.format(low['client']))(low) ret.append(chunk_ret) except (AuthenticationError, AuthorizationError, EauthAuthenticationError): ret.append('Failed to authenticate') break except Exception as ex: ret.append('Unexpected exception while handling request: {0}'.format(ex)) log.error('Unexpected exception while handling request:', exc_info=True) if not self._finished: self.write(self.serialize({'return': ret})) self.finish()
[ "def", "disbatch", "(", "self", ")", ":", "ret", "=", "[", "]", "# check clients before going, we want to throw 400 if one is bad", "for", "low", "in", "self", ".", "lowstate", ":", "if", "not", "self", ".", "_verify_client", "(", "low", ")", ":", "return", "# Make sure we have 'token' or 'username'/'password' in each low chunk.", "# Salt will verify the credentials are correct.", "if", "self", ".", "token", "is", "not", "None", "and", "'token'", "not", "in", "low", ":", "low", "[", "'token'", "]", "=", "self", ".", "token", "if", "not", "(", "(", "'token'", "in", "low", ")", "or", "(", "'username'", "in", "low", "and", "'password'", "in", "low", "and", "'eauth'", "in", "low", ")", ")", ":", "ret", ".", "append", "(", "'Failed to authenticate'", ")", "break", "# disbatch to the correct handler", "try", ":", "chunk_ret", "=", "yield", "getattr", "(", "self", ",", "'_disbatch_{0}'", ".", "format", "(", "low", "[", "'client'", "]", ")", ")", "(", "low", ")", "ret", ".", "append", "(", "chunk_ret", ")", "except", "(", "AuthenticationError", ",", "AuthorizationError", ",", "EauthAuthenticationError", ")", ":", "ret", ".", "append", "(", "'Failed to authenticate'", ")", "break", "except", "Exception", "as", "ex", ":", "ret", ".", "append", "(", "'Unexpected exception while handling request: {0}'", ".", "format", "(", "ex", ")", ")", "log", ".", "error", "(", "'Unexpected exception while handling request:'", ",", "exc_info", "=", "True", ")", "if", "not", "self", ".", "_finished", ":", "self", ".", "write", "(", "self", ".", "serialize", "(", "{", "'return'", ":", "ret", "}", ")", ")", "self", ".", "finish", "(", ")" ]
39.828571
24.228571
def show_image(self, name): """Show image (item is a PIL image)""" command = "%s.show()" % name sw = self.shellwidget if sw._reading: sw.kernel_client.input(command) else: sw.execute(command)
[ "def", "show_image", "(", "self", ",", "name", ")", ":", "command", "=", "\"%s.show()\"", "%", "name", "sw", "=", "self", ".", "shellwidget", "if", "sw", ".", "_reading", ":", "sw", ".", "kernel_client", ".", "input", "(", "command", ")", "else", ":", "sw", ".", "execute", "(", "command", ")" ]
31.875
10
def decrypt(self, data, decode=False): """ Decrypt the given data with cipher that is got from AES.cipher call. :param data: data to decrypt :param decode: whether to decode bytes to str or not :return: bytes or str (depends on decode flag) """ #result = self.cipher().decrypt(data) result = self.cipher().decrypt_block(data) padding = self.mode().padding() if padding is not None: result = padding.reverse_pad(result, WAESMode.__data_padding_length__) return result.decode() if decode else result
[ "def", "decrypt", "(", "self", ",", "data", ",", "decode", "=", "False", ")", ":", "#result = self.cipher().decrypt(data)", "result", "=", "self", ".", "cipher", "(", ")", ".", "decrypt_block", "(", "data", ")", "padding", "=", "self", ".", "mode", "(", ")", ".", "padding", "(", ")", "if", "padding", "is", "not", "None", ":", "result", "=", "padding", ".", "reverse_pad", "(", "result", ",", "WAESMode", ".", "__data_padding_length__", ")", "return", "result", ".", "decode", "(", ")", "if", "decode", "else", "result" ]
31.8125
16.25
def disconnect(self): """Disconnect from the server.""" # here we just request the disconnection # later in _handle_eio_disconnect we invoke the disconnect handler for n in self.namespaces: self._send_packet(packet.Packet(packet.DISCONNECT, namespace=n)) self._send_packet(packet.Packet( packet.DISCONNECT, namespace='/')) self.eio.disconnect(abort=True)
[ "def", "disconnect", "(", "self", ")", ":", "# here we just request the disconnection", "# later in _handle_eio_disconnect we invoke the disconnect handler", "for", "n", "in", "self", ".", "namespaces", ":", "self", ".", "_send_packet", "(", "packet", ".", "Packet", "(", "packet", ".", "DISCONNECT", ",", "namespace", "=", "n", ")", ")", "self", ".", "_send_packet", "(", "packet", ".", "Packet", "(", "packet", ".", "DISCONNECT", ",", "namespace", "=", "'/'", ")", ")", "self", ".", "eio", ".", "disconnect", "(", "abort", "=", "True", ")" ]
46.444444
12.333333
def read_json(file_or_path): """Parse json contents of string or file object or file path and return python nested dict/lists""" try: with (open(file_or_path, 'r') if isinstance(file_or_path, (str, bytes)) else file_or_path) as f: obj = json.load(f) except IOError: obj = json.loads(file_or_path) return obj
[ "def", "read_json", "(", "file_or_path", ")", ":", "try", ":", "with", "(", "open", "(", "file_or_path", ",", "'r'", ")", "if", "isinstance", "(", "file_or_path", ",", "(", "str", ",", "bytes", ")", ")", "else", "file_or_path", ")", "as", "f", ":", "obj", "=", "json", ".", "load", "(", "f", ")", "except", "IOError", ":", "obj", "=", "json", ".", "loads", "(", "file_or_path", ")", "return", "obj" ]
43
20.875
def _xray_clean_up_entries_for_driver(self, driver_id): """Remove this driver's object/task entries from redis. Removes control-state entries of all tasks and task return objects belonging to the driver. Args: driver_id: The driver id. """ xray_task_table_prefix = ( ray.gcs_utils.TablePrefix_RAYLET_TASK_string.encode("ascii")) xray_object_table_prefix = ( ray.gcs_utils.TablePrefix_OBJECT_string.encode("ascii")) task_table_objects = self.state.task_table() driver_id_hex = binary_to_hex(driver_id) driver_task_id_bins = set() for task_id_hex, task_info in task_table_objects.items(): task_table_object = task_info["TaskSpec"] task_driver_id_hex = task_table_object["DriverID"] if driver_id_hex != task_driver_id_hex: # Ignore tasks that aren't from this driver. continue driver_task_id_bins.add(hex_to_binary(task_id_hex)) # Get objects associated with the driver. object_table_objects = self.state.object_table() driver_object_id_bins = set() for object_id, _ in object_table_objects.items(): task_id_bin = ray._raylet.compute_task_id(object_id).binary() if task_id_bin in driver_task_id_bins: driver_object_id_bins.add(object_id.binary()) def to_shard_index(id_bin): return binary_to_object_id(id_bin).redis_shard_hash() % len( self.state.redis_clients) # Form the redis keys to delete. sharded_keys = [[] for _ in range(len(self.state.redis_clients))] for task_id_bin in driver_task_id_bins: sharded_keys[to_shard_index(task_id_bin)].append( xray_task_table_prefix + task_id_bin) for object_id_bin in driver_object_id_bins: sharded_keys[to_shard_index(object_id_bin)].append( xray_object_table_prefix + object_id_bin) # Remove with best effort. for shard_index in range(len(sharded_keys)): keys = sharded_keys[shard_index] if len(keys) == 0: continue redis = self.state.redis_clients[shard_index] num_deleted = redis.delete(*keys) logger.info("Monitor: " "Removed {} dead redis entries of the " "driver from redis shard {}.".format( num_deleted, shard_index)) if num_deleted != len(keys): logger.warning("Monitor: " "Failed to remove {} relevant redis " "entries from redis shard {}.".format( len(keys) - num_deleted, shard_index))
[ "def", "_xray_clean_up_entries_for_driver", "(", "self", ",", "driver_id", ")", ":", "xray_task_table_prefix", "=", "(", "ray", ".", "gcs_utils", ".", "TablePrefix_RAYLET_TASK_string", ".", "encode", "(", "\"ascii\"", ")", ")", "xray_object_table_prefix", "=", "(", "ray", ".", "gcs_utils", ".", "TablePrefix_OBJECT_string", ".", "encode", "(", "\"ascii\"", ")", ")", "task_table_objects", "=", "self", ".", "state", ".", "task_table", "(", ")", "driver_id_hex", "=", "binary_to_hex", "(", "driver_id", ")", "driver_task_id_bins", "=", "set", "(", ")", "for", "task_id_hex", ",", "task_info", "in", "task_table_objects", ".", "items", "(", ")", ":", "task_table_object", "=", "task_info", "[", "\"TaskSpec\"", "]", "task_driver_id_hex", "=", "task_table_object", "[", "\"DriverID\"", "]", "if", "driver_id_hex", "!=", "task_driver_id_hex", ":", "# Ignore tasks that aren't from this driver.", "continue", "driver_task_id_bins", ".", "add", "(", "hex_to_binary", "(", "task_id_hex", ")", ")", "# Get objects associated with the driver.", "object_table_objects", "=", "self", ".", "state", ".", "object_table", "(", ")", "driver_object_id_bins", "=", "set", "(", ")", "for", "object_id", ",", "_", "in", "object_table_objects", ".", "items", "(", ")", ":", "task_id_bin", "=", "ray", ".", "_raylet", ".", "compute_task_id", "(", "object_id", ")", ".", "binary", "(", ")", "if", "task_id_bin", "in", "driver_task_id_bins", ":", "driver_object_id_bins", ".", "add", "(", "object_id", ".", "binary", "(", ")", ")", "def", "to_shard_index", "(", "id_bin", ")", ":", "return", "binary_to_object_id", "(", "id_bin", ")", ".", "redis_shard_hash", "(", ")", "%", "len", "(", "self", ".", "state", ".", "redis_clients", ")", "# Form the redis keys to delete.", "sharded_keys", "=", "[", "[", "]", "for", "_", "in", "range", "(", "len", "(", "self", ".", "state", ".", "redis_clients", ")", ")", "]", "for", "task_id_bin", "in", "driver_task_id_bins", ":", "sharded_keys", "[", "to_shard_index", "(", "task_id_bin", ")", "]", ".", "append", "(", "xray_task_table_prefix", "+", "task_id_bin", ")", "for", "object_id_bin", "in", "driver_object_id_bins", ":", "sharded_keys", "[", "to_shard_index", "(", "object_id_bin", ")", "]", ".", "append", "(", "xray_object_table_prefix", "+", "object_id_bin", ")", "# Remove with best effort.", "for", "shard_index", "in", "range", "(", "len", "(", "sharded_keys", ")", ")", ":", "keys", "=", "sharded_keys", "[", "shard_index", "]", "if", "len", "(", "keys", ")", "==", "0", ":", "continue", "redis", "=", "self", ".", "state", ".", "redis_clients", "[", "shard_index", "]", "num_deleted", "=", "redis", ".", "delete", "(", "*", "keys", ")", "logger", ".", "info", "(", "\"Monitor: \"", "\"Removed {} dead redis entries of the \"", "\"driver from redis shard {}.\"", ".", "format", "(", "num_deleted", ",", "shard_index", ")", ")", "if", "num_deleted", "!=", "len", "(", "keys", ")", ":", "logger", ".", "warning", "(", "\"Monitor: \"", "\"Failed to remove {} relevant redis \"", "\"entries from redis shard {}.\"", ".", "format", "(", "len", "(", "keys", ")", "-", "num_deleted", ",", "shard_index", ")", ")" ]
44.063492
17.68254
def read_causal_pairs(filename, scale=True, **kwargs): """Convert a ChaLearn Cause effect pairs challenge format into numpy.ndarray. :param filename: path of the file to read or DataFrame containing the data :type filename: str or pandas.DataFrame :param scale: Scale the data :type scale: bool :param kwargs: parameters to be passed to pandas.read_csv :return: Dataframe composed of (SampleID, a (numpy.ndarray) , b (numpy.ndarray)) :rtype: pandas.DataFrame """ def convert_row(row, scale): """Convert a CCEPC row into numpy.ndarrays. :param row: :type row: pandas.Series :return: tuple of sample ID and the converted data into numpy.ndarrays :rtype: tuple """ a = row["A"].split(" ") b = row["B"].split(" ") if a[0] == "": a.pop(0) b.pop(0) if a[-1] == "": a.pop(-1) b.pop(-1) a = array([float(i) for i in a]) b = array([float(i) for i in b]) if scale: a = scaler(a) b = scaler(b) return row['SampleID'], a, b if isinstance(filename, str): data = read_csv(filename, **kwargs) elif isinstance(filename, DataFrame): data = filename else: raise TypeError("Type not supported.") conv_data = [] for idx, row in data.iterrows(): conv_data.append(convert_row(row, scale)) df = DataFrame(conv_data, columns=['SampleID', 'A', 'B']) df = df.set_index("SampleID") return df
[ "def", "read_causal_pairs", "(", "filename", ",", "scale", "=", "True", ",", "*", "*", "kwargs", ")", ":", "def", "convert_row", "(", "row", ",", "scale", ")", ":", "\"\"\"Convert a CCEPC row into numpy.ndarrays.\n\n :param row:\n :type row: pandas.Series\n :return: tuple of sample ID and the converted data into numpy.ndarrays\n :rtype: tuple\n \"\"\"", "a", "=", "row", "[", "\"A\"", "]", ".", "split", "(", "\" \"", ")", "b", "=", "row", "[", "\"B\"", "]", ".", "split", "(", "\" \"", ")", "if", "a", "[", "0", "]", "==", "\"\"", ":", "a", ".", "pop", "(", "0", ")", "b", ".", "pop", "(", "0", ")", "if", "a", "[", "-", "1", "]", "==", "\"\"", ":", "a", ".", "pop", "(", "-", "1", ")", "b", ".", "pop", "(", "-", "1", ")", "a", "=", "array", "(", "[", "float", "(", "i", ")", "for", "i", "in", "a", "]", ")", "b", "=", "array", "(", "[", "float", "(", "i", ")", "for", "i", "in", "b", "]", ")", "if", "scale", ":", "a", "=", "scaler", "(", "a", ")", "b", "=", "scaler", "(", "b", ")", "return", "row", "[", "'SampleID'", "]", ",", "a", ",", "b", "if", "isinstance", "(", "filename", ",", "str", ")", ":", "data", "=", "read_csv", "(", "filename", ",", "*", "*", "kwargs", ")", "elif", "isinstance", "(", "filename", ",", "DataFrame", ")", ":", "data", "=", "filename", "else", ":", "raise", "TypeError", "(", "\"Type not supported.\"", ")", "conv_data", "=", "[", "]", "for", "idx", ",", "row", "in", "data", ".", "iterrows", "(", ")", ":", "conv_data", ".", "append", "(", "convert_row", "(", "row", ",", "scale", ")", ")", "df", "=", "DataFrame", "(", "conv_data", ",", "columns", "=", "[", "'SampleID'", ",", "'A'", ",", "'B'", "]", ")", "df", "=", "df", ".", "set_index", "(", "\"SampleID\"", ")", "return", "df" ]
31.458333
16.583333