Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
27,200
def set_mode_cb(self, mode, tf): if tf: self.canvas.set_draw_mode(mode) if mode == : self.edit_select_marks() return True
Called when one of the Move/Draw/Edit radio buttons is selected.
27,201
def supports_suggested_actions(channel_id: str, button_cnt: int = 100) -> bool: max_actions = { Channels.facebook: 10, Channels.skype: 10, Channels.line: 13, Channels.kik: 20, Channels.telegram: 100, Channels.slack: 100, Channels.emulator: 100, Channels.direct_line: 100, Channels.webchat: 100, } return button_cnt <= max_actions[channel_id] if channel_id in max_actions else False
Determine if a number of Suggested Actions are supported by a Channel. Args: channel_id (str): The Channel to check the if Suggested Actions are supported in. button_cnt (int, optional): Defaults to 100. The number of Suggested Actions to check for the Channel. Returns: bool: True if the Channel supports the button_cnt total Suggested Actions, False if the Channel does not support that number of Suggested Actions.
27,202
def is_annual(self): if (self.st_month, self.st_day, self.st_hour, self.end_month, self.end_day, self.end_hour) == (1, 1, 0, 12, 31, 23): return True else: return False
Check if an analysis period is annual.
27,203
def validate(data, schema=None): schema = _load_schema_for_record(data, schema) return jsonschema_validate( instance=data, schema=schema, resolver=LocalRefResolver.from_schema(schema), format_checker=inspire_format_checker, )
Validate the given dictionary against the given schema. Args: data (dict): record to validate. schema (Union[dict, str]): schema to validate against. If it is a string, it is intepreted as the name of the schema to load (e.g. ``authors`` or ``jobs``). If it is ``None``, the schema is taken from ``data['$schema']``. If it is a dictionary, it is used directly. Raises: SchemaNotFound: if the given schema was not found. SchemaKeyNotFound: if ``schema`` is ``None`` and no ``$schema`` key was found in ``data``. jsonschema.SchemaError: if the schema is invalid. jsonschema.ValidationError: if the data is invalid.
27,204
def insert_from_xmldoc(connection, source_xmldoc, preserve_ids = False, verbose = False): orig_DBTable_append = dbtables.DBTable.append if not preserve_ids: try: dbtables.idmap_create(connection) except sqlite3.OperationalError: pass dbtables.idmap_sync(connection) dbtables.DBTable.append = dbtables.DBTable._remapping_append else: dbtables.DBTable.append = dbtables.DBTable._append try: xmldoc = ligolw.Document() xmldoc.appendChild(ligolw.LIGO_LW()) for tbl in source_xmldoc.getElementsByTagName(ligolw.Table.tagName): name = tbl.Name try: cls = dbtables.TableByName[name] except KeyError: cls = dbtables.DBTable dbtbl = xmldoc.childNodes[-1].appendChild(cls(tbl.attributes, connection = connection)) for elem in tbl.childNodes: if elem.tagName == ligolw.Stream.tagName: dbtbl._end_of_columns() dbtbl.appendChild(type(elem)(elem.attributes)) for row in tbl: dbtbl.append(row) dbtbl._end_of_rows() if not preserve_ids: update_ids(connection, xmldoc, verbose = verbose) finally: dbtables.DBTable.append = orig_DBTable_append connection.commit() xmldoc.unlink()
Insert the tables from an in-ram XML document into the database at the given connection. If preserve_ids is False (default), then row IDs are modified during the insert process to prevent collisions with IDs already in the database. If preserve_ids is True then IDs are not modified; this will result in database consistency violations if any of the IDs of newly-inserted rows collide with row IDs already in the database, and is generally only sensible when inserting a document into an empty database. If verbose is True then progress reports will be printed to stderr.
27,205
def get_success_url(self): if self.success_url: url = force_text(self.success_url) else: url = reverse(.format(self.url_namespace)) return url
Returns redirect URL for valid form submittal. :rtype: str.
27,206
def execute(self): if self._cli_arguments[]: generate_sample_cfn_module(self.env_root) elif self._cli_arguments[]: generate_sample_sls_module(self.env_root) elif self._cli_arguments[]: generate_sample_sls_tsc_module(self.env_root) elif self._cli_arguments[]: generate_sample_stacker_module(self.env_root) elif self._cli_arguments[]: generate_sample_tf_module(self.env_root) elif self._cli_arguments[]: generate_sample_cdk_tsc_module(self.env_root) elif self._cli_arguments[]: generate_sample_cdk_py_module(self.env_root) elif self._cli_arguments[]: generate_sample_cdk_cs_module(self.env_root)
Run selected module generator.
27,207
async def self_check(cls): async for check in super(Telegram, cls).self_check(): yield check s = cls.settings() try: assert isinstance(s[], str) except (KeyError, TypeError, AssertionError): yield HealthCheckFail( , , ) if not hasattr(settings, ): yield HealthCheckFail( , ) if not hasattr(settings, ): yield HealthCheckFail( , )
Check that the configuration is correct - Presence of "token" in the settings - Presence of "BERNARD_BASE_URL" in the global configuration
27,208
def repeater(call, args=None, kwargs=None, retries=4): args = args or () kwargs = kwargs or {} t = 1.0 for x in range(retries): try: return call(*args, **kwargs) except APIError as ex: logger.error("query except Exception as ex: log_last_traceback() logger.error("query t *= 2 time.sleep(t)
repeat call x-times: docker API is just awesome :param call: function :param args: tuple, args for function :param kwargs: dict, kwargs for function :param retries: int, how many times we try? :return: response of the call
27,209
def run_the_target(G, target, settings): sprint = settings["sprint"] sprint("Running target {}".format(target)) the_formula = get_the_node_dict(G, target)["formula"] run_commands(the_formula, settings)
Wrapper function that sends to commands in a target's 'formula' to run_commands() Args: The graph we are going to build The target to run The settings dictionary
27,210
def get_parameter_value(self, parameter, from_cache=True, timeout=10): params = { : from_cache, : int(timeout * 1000), } parameter = adapt_name_for_rest(parameter) url = .format( self._instance, self._processor, parameter) response = self._client.get_proto(url, params=params) proto = pvalue_pb2.ParameterValue() proto.ParseFromString(response.content) if proto.HasField() or proto.HasField(): return ParameterValue(proto) return None
Retrieve the current value of the specified parameter. :param str parameter: Either a fully-qualified XTCE name or an alias in the format ``NAMESPACE/NAME``. :param bool from_cache: If ``False`` this call will block until a fresh value is received on the processor. If ``True`` the server returns the latest value instead (which may be ``None``). :param float timeout: The amount of seconds to wait for a fresh value. (ignored if ``from_cache=True``). :rtype: .ParameterValue
27,211
def update(self, *iterables): _set = self._set values = set(chain(*iterables)) if (4 * len(values)) > len(_set): _list = self._list _set.update(values) _list.clear() _list.update(_set) else: _add = self.add for value in values: _add(value) return self
Update the set, adding elements from all *iterables*.
27,212
def _applyInter(finter0, finter1, conflict="ignore"): OPTIONS = ["error", "ignore", "me", "other"] assert conflict in OPTIONS, "Invalid value in `conflict`." min_int = -2**63 inter0 = tuple([f.getValue() if f else min_int for f in finter0]) inter1 = tuple([f.getValue() if f else min_int for f in finter1]) le00 = inter0[0] <= inter1[0] le01 = inter1[1] == min_int or inter0[0] <= inter1[1] le11 = inter1[1] == min_int or (inter0[1] != min_int and inter0[1] <= inter1[1]) ge00 = not le00 or inter0[0] == inter1[0] ge10 = inter0[1] == min_int or inter0[1] >= inter1[0] if le00 and ge10 and le11: return finter1[0], finter0[1] elif le00 and ge10 and not le11: return finter1 elif ge00 and le01 and le11: return finter0 elif ge00 and le01 and not le11: return finter0[0], finter1[1] elif conflict == "me": return finter0 elif conflict == "other": return finter1 elif conflict == "error": raise Exception("Disjoint intervals!") return None
Return the restriction of first interval by the second. Args: - inter0, inter1 (tuple of Feature): intervals Return(tuple of Feature): the resulting interval - conflict(str): if a property hasn't compatible values/constrains, do: - ``"error"``: raise exception. - ``"ignore"``: return None. - ``"me"``: return finter0. - ``"other"``: return finter1.
27,213
def _to_dict(self): _dict = {} if hasattr(self, ) and self.key is not None: _dict[] = self.key if hasattr(self, ) and self.matching_results is not None: _dict[] = self.matching_results if hasattr(self, ) and self.aggregations is not None: _dict[] = [x._to_dict() for x in self.aggregations] return _dict
Return a json dictionary representing this model.
27,214
def del_password(name): * cmd = "dscl . -passwd /Users/{0} ".format(name) try: salt.utils.mac_utils.execute_return_success(cmd) except CommandExecutionError as exc: if in exc.strerror: raise CommandExecutionError(.format(name)) raise CommandExecutionError(.format(exc.strerror)) cmd = "dscl . -create /Users/{0} Password ".format(name) salt.utils.mac_utils.execute_return_success(cmd) return info(name)[] ==
Deletes the account password :param str name: The user name of the account :return: True if successful, otherwise False :rtype: bool :raises: CommandExecutionError on user not found or any other unknown error CLI Example: .. code-block:: bash salt '*' shadow.del_password username
27,215
def searchIndex(self, printData=True): backupValue = copy.deepcopy(self.output.printData) self.output.printData = printData self.data = self.index.search(self.searchString, self.category, self.math, self.game, self.searchFiles, self.extension) self.output.printData = backupValue return self.data
Search the index with all the repo's specified parameters
27,216
def format(self, dt, fmt, locale=None): if not locale: locale = pendulum.get_locale() locale = Locale.load(locale) result = self._FORMAT_RE.sub( lambda m: m.group(1) if m.group(1) else m.group(2) if m.group(2) else self._format_token(dt, m.group(3), locale), fmt, ) return decode(result)
Formats a DateTime instance with a given format and locale. :param dt: The instance to format :type dt: pendulum.DateTime :param fmt: The format to use :type fmt: str :param locale: The locale to use :type locale: str or Locale or None :rtype: str
27,217
def from_pubkey(cls, pubkey, compressed=False, version=56, prefix=None): pubkey = PublicKey(pubkey) if compressed: pubkey = pubkey.compressed() else: pubkey = pubkey.uncompressed() addressbin = ripemd160(hexlify(hashlib.sha256(unhexlify(pubkey)).digest())) return cls(hexlify(addressbin).decode("ascii"))
Derive address using ``RIPEMD160(SHA256(x))``
27,218
def generate_data_type(self, data_type): if isinstance(data_type, Struct): self.emit() self.emit( % data_type.name) with self.indent(): if data_type.doc is not None: self.emit(self.format_string(data_type.doc)) for field in data_type.fields: type_repr = self.format_data_type(field.data_type) if not field.has_default: self.emit( % (field.name, type_repr)) else: self.emit( % (field.name, type_repr, self.format_value(field.default))) if field.doc is not None: with self.indent(): self.emit(self.format_value(field.doc)) elif isinstance(data_type, Union): self.emit() self.emit( % data_type.name) with self.indent(): if data_type.doc is not None: self.emit(self.format_string(data_type.doc)) for field in data_type.fields: name = field.name if field.catch_all or field is data_type.catch_all_field: name += if isinstance(field.data_type, Void): self.emit( % (name)) else: type_repr = self.format_data_type(field.data_type) self.emit( % (name, type_repr)) if field.doc is not None: with self.indent(): self.emit(self.format_value(field.doc)) else:
Output a data type definition (a struct or union).
27,219
def print_bytes(data): bs = bytearray(data) symbols_in_one_line = 8 n = len(bs) // symbols_in_one_line i = 0 for i in range(n): print(str(i*symbols_in_one_line)+" | "+" ".join(["%02X" % b for b in bs[i*symbols_in_one_line:(i+1)*symbols_in_one_line]])) if not len(bs) % symbols_in_one_line == 0: print(str((i+1)*symbols_in_one_line)+" | "+" ".join(["%02X" % b for b in bs[(i+1)*symbols_in_one_line:]])+"\n")
Function to visualize byte streams. Split into bytes, print to console. :param bs: BYTE STRING
27,220
def __store_clustering_results(self, amount_clusters, leaf_blocks): self.__clusters = [[] for _ in range(amount_clusters)] for block in leaf_blocks: index = block.get_cluster() if index is not None: self.__clusters[index] += block.get_points() else: self.__noise += block.get_points() self.__clusters = [ list(set(cluster)) for cluster in self.__clusters ] self.__noise = list(set(self.__noise))
! @brief Stores clustering results in a convenient way. @param[in] amount_clusters (uint): Amount of cluster that was allocated during processing. @param[in] leaf_blocks (list): Leaf BANG-blocks (the smallest cells).
27,221
def _validate( cls, sign, integer_part, non_repeating_part, repeating_part, base ): if any(x < 0 or x >= base for x in integer_part): return BasesValueError( integer_part, "integer_part", "values must be between 0 and %s" % base ) if any(x < 0 or x >= base for x in non_repeating_part): return BasesValueError( non_repeating_part, "non_repeating_part", "values must be between 0 and %s" % base ) if any(x < 0 or x >= base for x in repeating_part): return BasesValueError( repeating_part, "repeating_part", "values must be between 0 and %s" % base ) if base < 2: return BasesValueError(base, "base", "must be at least 2") if sign not in (-1, 0, 1) or sign is True or sign is False: return BasesValueError( sign, "sign", "must be an int between -1 and 1" ) return None
Check if radix is valid. :param int sign: -1, 0, or 1 as appropriate :param integer_part: the part on the left side of the radix :type integer_part: list of int :param non_repeating_part: non repeating part on left side :type non_repeating_part: list of int :param repeating_part: repeating part :type repeating_part: list of int :param int base: base of the radix, must be at least 2 :returns: BasesValueError if invalid values :rtype: BasesValueError or NoneType Complexity: O(len(integer_part + non_repeating_part + repeating_part))
27,222
def _heappop_max(heap): lastelt = heap.pop() if heap: returnitem = heap[0] heap[0] = lastelt _siftup_max(heap, 0) return returnitem return lastelt
Maxheap version of a heappop.
27,223
def texts(self: object, fileids: str, plaintext: bool = True): for doc in self.docs(fileids): if plaintext==True: doc = re.sub(r, , doc) doc = doc.rstrip() yield doc
Returns the text content of a .tess file, i.e. removing the bracketed citation info (e.g. "<Ach. Tat. 1.1.0>")
27,224
def collect_iptable(self, tablename): modname = "iptable_"+tablename if self.check_ext_prog("grep -q %s /proc/modules" % modname): cmd = "iptables -t "+tablename+" -nvL" self.add_cmd_output(cmd)
When running the iptables command, it unfortunately auto-loads the modules before trying to get output. Some people explicitly don't want this, so check if the modules are loaded before running the command. If they aren't loaded, there can't possibly be any relevant rules in that table
27,225
def average(sequence, key): return sum(map(key, sequence)) / float(len(sequence))
Averages a sequence based on a key.
27,226
def put_file(self, in_path, out_path): vvv("PUT %s TO %s" % (in_path, out_path), host=self.host) if not os.path.exists(in_path): raise errors.AnsibleFileNotFound("file or module does not exist: %s" % in_path) data = file(in_path).read() data = base64.b64encode(data) data = dict(mode=, data=data, out_path=out_path) data = utils.jsonify(data) data = utils.encrypt(self.key, data) self.socket.send(data) response = self.socket.recv() response = utils.decrypt(self.key, response) response = utils.parse_json(response)
transfer a file from local to remote
27,227
def lsa_twitter(cased_tokens): if cased_tokens is None: cased_tokens = ( + ).split() cased_tokens += [s + for s in cased_tokens] cased_tokens += \ .split() allcase_tokens = cased_tokens + [s.lower() for s in cased_tokens] allcase_tokens += [s.title() for s in cased_tokens] allcase_tokens += [s.upper() for s in cased_tokens] KEEP_TOKENS = allcase_tokens + [ + s for s in allcase_tokens] vocab_path = os.path.join(BIGDATA_PATH, ) if os.path.isfile(vocab_path): print(.format(vocab_path)) vocab = Dictionary.load(vocab_path) print(.format(len(vocab.dfs))) else: tweets_path = os.path.join(BIGDATA_PATH, ) print(.format(tweets_path)) tweets = read_csv(tweets_path) tweets = pd.np.array(tweets.text.str.split()) with gzip.open(os.path.join(BIGDATA_PATH, ), ) as f: for tokens in tweets: f.write((.join(tokens) + ).encode()) print(.format(len(tweets))) vocab = Dictionary(tweets, no_below=NO_BELOW, no_above=NO_ABOVE, keep_tokens=set(KEEP_TOKENS)) vocab.filter_extremes(no_below=NO_BELOW, no_above=NO_ABOVE, keep_n=KEEP_N, keep_tokens=set(KEEP_TOKENS)) print(.format(len(vocab.dfs))) gc.collect() lsa = LsiModel(tfidf[tweets], num_topics=200, id2word=vocab, extra_samples=100, power_iters=2) return lsa
Latent Sentiment Analyis on random sampling of twitter search results for words listed in cased_tokens
27,228
def validate(self, data): validated = self._validated(data) errors = [] for validator in self.additional_validators: if not validator(validated): errors.append( "%s invalidated by " % ( validated, _get_repr(validator))) if errors: raise NotValid(*errors) if self.default is UNSPECIFIED: return validated if self.null_values is not UNSPECIFIED\ and validated in self.null_values: return self.default if validated is None: return self.default return validated
Validate data. Raise NotValid error for invalid data.
27,229
def select_many( self, collection_selector=identity, result_selector=identity): if self.closed(): raise ValueError("Attempt to call select_many() on a closed " "Queryable.") if not is_callable(collection_selector): raise TypeError("select_many() parameter projector={0} is not " "callable".format(repr(collection_selector))) if not is_callable(result_selector): raise TypeError("select_many() parameter selector={selector} is " " not callable".format(selector=repr(result_selector))) sequences = self.select(collection_selector) chained_sequence = itertools.chain.from_iterable(sequences) return self._create(chained_sequence).select(result_selector)
Projects each element of a sequence to an intermediate new sequence, flattens the resulting sequences into one sequence and optionally transforms the flattened sequence using a selector function. Note: This method uses deferred execution. Args: collection_selector: A unary function mapping each element of the source iterable into an intermediate sequence. The single argument of the collection_selector is the value of an element from the source sequence. The return value should be an iterable derived from that element value. The default collection_selector, which is the identity function, assumes that each element of the source sequence is itself iterable. result_selector: An optional unary function mapping the elements in the flattened intermediate sequence to corresponding elements of the result sequence. The single argument of the result_selector is the value of an element from the flattened intermediate sequence. The return value should be the corresponding value in the result sequence. The default result_selector is the identity function. Returns: A Queryable over a generated sequence whose elements are the result of applying the one-to-many collection_selector to each element of the source sequence, concatenating the results into an intermediate sequence, and then mapping each of those elements through the result_selector into the result sequence. Raises: ValueError: If this Queryable has been closed. TypeError: If either collection_selector or result_selector are not callable.
27,230
def masses(self): buf = ffi.new("double[]", self.size) ims.spectrum_masses(self.ptr, buf) return list(buf)
:returns: peak masses :rtype: list of floats
27,231
def remove_entry_listener(self, registration_id): return self._stop_listening(registration_id, lambda i: replicated_map_remove_entry_listener_codec.encode_request(self.name, i))
Removes the specified entry listener. Returns silently if there is no such listener added before. :param registration_id: (str), id of registered listener. :return: (bool), ``true`` if registration is removed, ``false`` otherwise.
27,232
def _find_utmp(): t exist, /run/utmp is the new hotness. /var/run/utmp/run/utmp': try: result[os.stat(utmp).st_mtime] = utmp except Exception: pass if result: return result[sorted(result).pop()] else: return False
Figure out which utmp file to use when determining runlevel. Sometimes /var/run/utmp doesn't exist, /run/utmp is the new hotness.
27,233
def registerDisplay(func): setup() ref = weakref.ref(func) if ref not in _displayhooks: _displayhooks.append(ref)
Registers a function to the display hook queue to be called on hook. Look at the sys.displayhook documentation for more information. :param func | <callable>
27,234
def upgrade(): op.create_table( , sa.Column(, sa.DateTime(), nullable=False), sa.Column(, sa.DateTime(), nullable=False), sa.Column( , sqlalchemy_utils.types.uuid.UUIDType(), nullable=False), sa.Column(, sqlalchemy_utils.JSONType().with_variant( sa.dialects.postgresql.JSON( none_as_null=True), , ), nullable=True), sa.Column(, sa.Integer(), nullable=False), sa.PrimaryKeyConstraint() ) op.create_table( , sa.Column(, sa.DateTime(), autoincrement=False, nullable=True), sa.Column(, sa.DateTime(), autoincrement=False, nullable=True), sa.Column(, sqlalchemy_utils.types.uuid.UUIDType(), autoincrement=False, nullable=False), sa.Column(, sqlalchemy_utils.JSONType().with_variant( sa.dialects.postgresql.JSON( none_as_null=True), , ), autoincrement=False, nullable=True), sa.Column(, sa.Integer(), autoincrement=False, nullable=True), sa.Column(, sa.BigInteger(), autoincrement=False, nullable=False), sa.Column(, sa.BigInteger(), nullable=True), sa.Column(, sa.SmallInteger(), nullable=False), sa.PrimaryKeyConstraint(, ) ) op.create_index( op.f(), , [], unique=False ) op.create_index( op.f(), , [], unique=False ) op.create_index( op.f(), , [], unique=False )
Upgrade database.
27,235
def pdf_link(self, inv_link_f, y, Y_metadata=None): return np.where(y==1, inv_link_f, 1.-inv_link_f)
Likelihood function given inverse link of f. .. math:: p(y_{i}|\\lambda(f_{i})) = \\lambda(f_{i})^{y_{i}}(1-f_{i})^{1-y_{i}} :param inv_link_f: latent variables inverse link of f. :type inv_link_f: Nx1 array :param y: data :type y: Nx1 array :param Y_metadata: Y_metadata not used in bernoulli :returns: likelihood evaluated for this point :rtype: float .. Note: Each y_i must be in {0, 1}
27,236
def scheme_host_port_prefix(self, scheme=, host=, port=None, prefix=None): uri = scheme + + host if (port and not ((scheme == and port == 80) or (scheme == and port == 443))): uri += + str(port) if (prefix): uri += + prefix return uri
Return URI composed of scheme, server, port, and prefix.
27,237
def yyparse(self, lexfile): temp = tempfile.gettempdir() self.outfile = temp++.join( random.choice( string.ascii_uppercase + string.digits) for _ in range(5)) + self._create_automaton_from_regex(lexfile) states_num, delta = self._create_delta() states = self._create_states(states_num) accepted_states = self._read_accept_states() if self.alphabet != []: alphabet = self.alphabet else: alphabet = createalphabet() mma = DFA(alphabet) for state in states: if state != 0: for char in alphabet: nextstate = delta(state, char) mma.add_arc(state - 1, nextstate - 1, char) if state in accepted_states: mma[state - 1].final = True if os.path.exists(self.outfile): os.remove(self.outfile) return mma
Args: lexfile (str): Flex file to be parsed Returns: DFA: A dfa automaton
27,238
def get_new_pid(institute): number = prefix = % institute.name.replace(, )[:4] found = True while found: try: Project.objects.get(pid=prefix + number) number = str(int(number) + 1) if len(number) == 1: number = + number elif len(number) == 2: number = + number elif len(number) == 3: number = + number except Project.DoesNotExist: found = False return prefix + number
Return a new Project ID Keyword arguments: institute_id -- Institute id
27,239
def response_add(self, request, obj, post_url_continue=POST_URL_CONTINUE): if not in request.POST and not in request.POST: request.POST[] = 1 return super(ExportAdmin, self).response_add(request, obj, post_url_continue)
If we're adding, save must be "save and continue editing" Two exceptions to that workflow: * The user has pressed the 'Save and add another' button * We are adding a user in a popup
27,240
def concatenate_variables(scope, variables, container): input_dims = [] for variable in variables: if isinstance(variable.type, (Int64TensorType, Int64Type)): input_names.append(convert_integer_to_float(scope, variable, container)) else: input_names.append(variable.full_name) concatenated_name = scope.get_unique_variable_name() container.add_node(op_type, input_names, concatenated_name, op_domain=, **attrs) return concatenated_name
This function allocate operators to from a float tensor by concatenating all input variables. Notice that if all integer inputs would be converted to floats before concatenation.
27,241
def formatter(self): formatter_chain = [ LambdaLogMsgFormatters.colorize_errors, JSONMsgFormatter.format_json, KeywordHighlighter(self._filter_pattern).highlight_keywords, ] return LogsFormatter(self.colored, formatter_chain)
Creates and returns a Formatter capable of nicely formatting Lambda function logs Returns ------- LogsFormatter
27,242
def _set_logging( logger_name="colin", level=logging.INFO, handler_class=logging.StreamHandler, handler_kwargs=None, format=, date_format=): if level != logging.NOTSET: logger = logging.getLogger(logger_name) logger.setLevel(level) if not [x for x in logger.handlers if isinstance(x, handler_class)]: handler_kwargs = handler_kwargs or {} handler = handler_class(**handler_kwargs) handler.setLevel(level) formatter = logging.Formatter(format, date_format) handler.setFormatter(formatter) logger.addHandler(handler)
Set personal logger for this library. :param logger_name: str, name of the logger :param level: int, see logging.{DEBUG,INFO,ERROR,...}: level of logger and handler :param handler_class: logging.Handler instance, default is StreamHandler (/dev/stderr) :param handler_kwargs: dict, keyword arguments to handler's constructor :param format: str, formatting style :param date_format: str, date style in the logs
27,243
def _add_instruction(self, instruction, value): if (instruction == or instruction == ) and len(value) == 2: new_line = instruction + + .join(map(quote, value)) + else: new_line = .format(instruction, value) if new_line: lines = self.lines if not lines[len(lines) - 1].endswith(): new_line = + new_line lines += new_line self.lines = lines
:param instruction: instruction name to be added :param value: instruction value
27,244
def json(self) -> dict: content = {} content[] = self.name content[] = self.callback self.control_json[] = content return self.control_json
Returns json compatible state of the Button instance. Returns: control_json: Json representation of Button state.
27,245
def filter_data(self, field, filter_value, filter_operator, field_converter=None): data = [] if self._indexes.get(field) is not None: data = self._index_filter( self._indexes.get(field), filter_value, filter_operator, field_converter ) return set(data)
Filter the data given the provided. Args: field (string): The field to filter on. filter_value (string | list): The value to match. filter_operator (string): The operator for comparison. field_converter (method): A method used to convert the field before comparison. Returns: (set): List of matching data objects
27,246
def _reload(self, force=False): self._config_map = dict() self._registered_env_keys = set() self.__reload_sources(force) self.__load_environment_keys() self.verify() self._clear_memoization()
Reloads the configuration from the file and environment variables. Useful if using `os.environ` instead of this class' `set_env` method, or if the underlying configuration file is changed externally.
27,247
def parse_port_from_tensorboard_output(tensorboard_output: str) -> int: search = re.search("at http://[^:]+:([0-9]+)", tensorboard_output) if search is not None: port = search.group(1) return int(port) else: raise UnexpectedOutputError(tensorboard_output, "Address and port where Tensorboard has started," " e.g. TensorBoard 1.8.0 at http://martin-VirtualBox:36869")
Parse tensorboard port from its outputted message. :param tensorboard_output: Output message of Tensorboard in format TensorBoard 1.8.0 at http://martin-VirtualBox:36869 :return: Returns the port TensorBoard is listening on. :raise UnexpectedOutputError
27,248
def validate(self): for name, field in self._fields.items(): v = getattr(self, name) if v is None and not self._values[name].explicit and field.has_default: v = field.get_default() val = field.validate(v) setattr(self, name, val)
Cleans and validates the field values
27,249
def get_alexa_rankings(self, domains): api_name = (all_responses, domains) = self._bulk_cache_lookup(api_name, domains) responses = self._request_reports(domains) for domain, response in zip(domains, responses): xml_response = self._extract_response_xml(domain, response) if self._cache: self._cache.cache_value(api_name, domain, response) all_responses[domain] = xml_response return all_responses
Retrieves the most recent VT info for a set of domains. Args: domains: list of string domains. Returns: A dict with the domain as key and the VT report as value.
27,250
def ll(self,*args,**kwargs): _check_roSet(self,kwargs,) lbd= self._lbd(*args,**kwargs) return lbd[:,0]
NAME: ll PURPOSE: return Galactic longitude INPUT: t - (optional) time at which to get ll obs=[X,Y,Z] - (optional) position of observer (in kpc) (default=Object-wide default) OR Orbit object that corresponds to the orbit of the observer Y is ignored and always assumed to be zero ro= distance in kpc corresponding to R=1. (default=Object-wide default) OUTPUT: l(t) HISTORY: 2011-02-23 - Written - Bovy (NYU)
27,251
def erase_screen(self): self.vt100_output.erase_screen() self.vt100_output.cursor_goto(0, 0) self.vt100_output.flush()
Erase output screen.
27,252
def _join(lst, key, sep=";"): return sep.join([d[key] for d in lst if d[key]])
Auxiliary function to join same elements of a list of dictionaries if the elements are not None.
27,253
def _read_audio_data(self, file_path): try: self.log(u"Reading audio data...") audio_file = AudioFile( file_path=file_path, file_format=self.OUTPUT_AUDIO_FORMAT, rconf=self.rconf, logger=self.logger ) audio_file.read_samples_from_file() self.log([u"Duration of : %f", file_path, audio_file.audio_length]) self.log(u"Reading audio data... done") return (True, ( audio_file.audio_length, audio_file.audio_sample_rate, audio_file.audio_format, audio_file.audio_samples )) except (AudioFileUnsupportedFormatError, OSError) as exc: self.log_exc(u"An unexpected error occurred while reading audio data", exc, True, None) return (False, None)
Read audio data from file. :rtype: tuple (True, (duration, sample_rate, codec, data)) or (False, None) on exception
27,254
def _makeTags(tagStr, xml, suppress_LT=Suppress("<"), suppress_GT=Suppress(">")): if isinstance(tagStr,basestring): resname = tagStr tagStr = Keyword(tagStr, caseless=not xml) else: resname = tagStr.name tagAttrName = Word(alphas,alphanums+"_-:") if (xml): tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes ) openTag = (suppress_LT + tagStr("tag") + Dict(ZeroOrMore(Group(tagAttrName + Suppress("=") + tagAttrValue ))) + Optional("/", default=[False])("empty").setParseAction(lambda s,l,t:t[0]==) + suppress_GT) else: tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printables, excludeChars=">") openTag = (suppress_LT + tagStr("tag") + Dict(ZeroOrMore(Group(tagAttrName.setParseAction(downcaseTokens) + Optional(Suppress("=") + tagAttrValue)))) + Optional("/",default=[False])("empty").setParseAction(lambda s,l,t:t[0]==) + suppress_GT) closeTag = Combine(_L("</") + tagStr + ">", adjacent=False) openTag.setName("<%s>" % resname) openTag.addParseAction(lambda t: t.__setitem__("start"+"".join(resname.replace(":"," ").title().split()), t.copy())) closeTag = closeTag("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % resname) openTag.tag = resname closeTag.tag = resname openTag.tag_body = SkipTo(closeTag()) return openTag, closeTag
Internal helper to construct opening and closing tag expressions, given a tag name
27,255
def visit_Num(self, node: ast.Num) -> Union[int, float]: result = node.n self.recomputed_values[node] = result return result
Recompute the value as the number at the node.
27,256
def get_log_lookup_session(self, proxy): if not self.supports_log_lookup(): raise errors.Unimplemented() return sessions.LogLookupSession(proxy=proxy, runtime=self._runtime)
Gets the ``OsidSession`` associated with the log lookup service. arg: proxy (osid.proxy.Proxy): a proxy return: (osid.logging.LogLookupSession) - a ``LogLookupSession`` raise: NullArgument - ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_log_lookup()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_log_lookup()`` is ``true``.*
27,257
async def fetch_emoji(self, emoji_id): data = await self._state.http.get_custom_emoji(self.id, emoji_id) return Emoji(guild=self, state=self._state, data=data)
|coro| Retrieves a custom :class:`Emoji` from the guild. .. note:: This method is an API call. For general usage, consider iterating over :attr:`emojis` instead. Parameters ------------- emoji_id: :class:`int` The emoji's ID. Raises --------- NotFound The emoji requested could not be found. HTTPException An error occurred fetching the emoji. Returns -------- :class:`Emoji` The retrieved emoji.
27,258
def add_tweets(self, url, last_modified, tweets): try: self.cache[url] = {"last_modified": last_modified, "tweets": tweets} self.mark_updated() return True except TypeError: return False
Adds new tweets to the cache.
27,259
def enableEditing(self, editable=True): self.editable = editable self._columnDtypeModel.setEditable(self.editable)
Sets the DataFrameModel and columnDtypeModel's editable properties. :param editable: bool defaults to True, False disables most editing methods. :return: None
27,260
def minimum_image(self): if self.box_vectors is None: raise ValueError() else: self.r_array = minimum_image(self.r_array, self.box_vectors.diagonal()) return self
Align the system according to the minimum image convention
27,261
def add_property(self, property_): property_.set_href_prefix(self.href_prefix) self.properties[property_.name] = property_
Add a property to this thing. property_ -- property to add
27,262
def M200(self, Rs, rho0, c): return 4*np.pi*rho0*Rs**3*(np.log(1.+c)-c/(1.+c))
M(R_200) calculation for NFW profile :param Rs: scale radius :type Rs: float :param rho0: density normalization (characteristic density) :type rho0: float :param c: concentration :type c: float [4,40] :return: M(R_200) density
27,263
def python_like_mod_finder(import_line, alt_path=None, stop_token=None): if stop_token and in stop_token: stop_token = stop_token.split()[-1] tokens = re.split(r, import_line) if tokens[0] in [, ]: try: _, path, _ = imp.find_module(tokens[1]) except ImportError: if alt_path: path = osp.join(alt_path, tokens[1]) else: path = None if path: path = osp.realpath(path) if not tokens[1] == stop_token: for part in tokens[2:]: if part in [, , ]: break path = osp.join(path, part) if part == stop_token: break if stop_token and not stop_token in path: for ext in python_like_exts(): fname = % (stop_token, ext) if osp.exists(osp.join(path, fname)): return osp.join(path, fname) for ext in python_like_exts(): fname = % (path, ext) if osp.exists(fname): return fname if osp.exists(path) and not osp.isdir(path): return path path = osp.join(path, ) if osp.exists(path): return path
Locate a module path based on an import line in an python-like file import_line is the line of source code containing the import alt_path specifies an alternate base path for the module stop_token specifies the desired name to stop on This is used to a find the path to python-like modules (e.g. cython and enaml) for a goto definition.
27,264
def add_perf_task(task, auth, url): add_perf_task_url = "/imcrs/perf/task" f_url = url + add_perf_task_url payload = json.dumps(task) response = requests.post(f_url, data=payload, auth=auth, headers=HEADERS) try: return response.status_code except requests.exceptions.RequestException as error: return "Error:\n" + str(error) +
function takes the a python dict containing all necessary fields for a performance tasks, transforms the dict into JSON and issues a RESTFUL call to create the performance task. device. :param task: dictionary containing all required fields for performance tasks :param auth: requests auth object #usually auth.creds from auth pyhpeimc.auth.class :param url: base url of IMC RS interface #usually auth.url from pyhpeimc.auth.authclass :return: 204 :rtype: str >>> from pyhpeimc.auth import * >>> from pyhpeimc.plat.perf import * >>> auth = IMCAuth("http://", "10.101.0.203", "8080", "admin", "admin") >>> new_task = {'indexDesc': '1.3.6.1.4.1.9.9.13.1.3.1.3', 'indexType': '[index1[0]:ciscoEnvMonTemperatureStatusValue:1:0]', 'itemFunction': '1.3.6.1.4.1.9.9.13.1.3.1.3', 'itemName': 'Cisco_Temperature', 'selectDefaultUnit': '400', 'unit': 'Celsius'} >>> new_perf_task = add_perf_task(new_task, auth.creds, auth.url)
27,265
def get_subtree(self, name): r if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") self._node_in_tree(name) return self._get_subtree(name)
r""" Get all node names in a sub-tree. :param name: Sub-tree root node name :type name: :ref:`NodeName` :rtype: list of :ref:`NodeName` :raises: * RuntimeError (Argument \`name\` is not valid) * RuntimeError (Node *[name]* not in tree) Using the same example tree created in :py:meth:`ptrie.Trie.add_nodes`:: >>> from __future__ import print_function >>> import docs.support.ptrie_example, pprint >>> tobj = docs.support.ptrie_example.create_tree() >>> print(tobj) root ├branch1 (*) │├leaf1 ││└subleaf1 (*) │└leaf2 (*) │ └subleaf2 └branch2 >>> pprint.pprint(tobj.get_subtree('root.branch1')) ['root.branch1', 'root.branch1.leaf1', 'root.branch1.leaf1.subleaf1', 'root.branch1.leaf2', 'root.branch1.leaf2.subleaf2']
27,266
def _unique(list_of_dicts): unique_list = [] for ele in list_of_dicts: if ele not in unique_list: unique_list.append(ele) return unique_list
Returns an unique list of dictionaries given a list that may contain duplicates.
27,267
def generate_np(self, x_val, **kwargs): if self.sess is None: raise ValueError("Cannot use `generate_np` when no `sess` was" " provided") packed = self.construct_variables(kwargs) fixed, feedable, _, hash_key = packed if hash_key not in self.graphs: self.construct_graph(fixed, feedable, x_val, hash_key) else: for k in list(feedable.keys()): if feedable[k] is None: del feedable[k] x, new_kwargs, x_adv = self.graphs[hash_key] feed_dict = {x: x_val} for name in feedable: feed_dict[new_kwargs[name]] = feedable[name] return self.sess.run(x_adv, feed_dict)
Generate adversarial examples and return them as a NumPy array. Sub-classes *should not* implement this method unless they must perform special handling of arguments. :param x_val: A NumPy array with the original inputs. :param **kwargs: optional parameters used by child classes. :return: A NumPy array holding the adversarial examples.
27,268
def init_threads(tf_session): coord = tf.train.Coordinator() threads = list() for qr in tf.get_collection(tf.GraphKeys.QUEUE_RUNNERS): threads.extend(qr.create_threads(tf_session, coord=coord, daemon=True, start=True)) return threads, coord
Starts threads running
27,269
def connect(self): sock = socket.create_connection((self.host, self.port), self.timeout) if not USE_STDLIB_SSL: ssl_ctx = configure_pyopenssl_context(self.credentials) cxn = OpenSSL.SSL.Connection(ssl_ctx, sock) cxn.set_connect_state() while True: try: cxn.do_handshake() except OpenSSL.SSL.WantReadError: select.select([sock], [], []) continue except OpenSSL.SSL.Error as e: raise SecurityError( + str(e)) break self.sock = RiakWrappedSocket(cxn, sock) self.credentials._check_revoked_cert(self.sock) else: ssl_ctx = configure_ssl_context(self.credentials) if self.timeout is not None: sock.settimeout(self.timeout) self.sock = ssl.SSLSocket(sock=sock, keyfile=self.credentials.pkey_file, certfile=self.credentials.cert_file, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.credentials.cacert_file, ciphers=self.credentials.ciphers, server_hostname=self.host) self.sock.context = ssl_ctx
Connect to a host on a given (SSL) port using PyOpenSSL.
27,270
def decode(self, encoded): encoded = super().decode(encoded) return self.tokenizer.decode([self.itos[index] for index in encoded])
Decodes a tensor into a sequence. Args: encoded (torch.Tensor): Encoded sequence. Returns: str: Sequence decoded from ``encoded``.
27,271
def execute(self, action): self.logger.debug(, action) reward = self.wrapped.execute(action) if reward: self.total_reward += reward self.steps += 1 self.logger.debug(, reward or 0) self.logger.debug(, self.total_reward / self.steps) return reward
Execute the indicated action within the environment and return the resulting immediate reward dictated by the reward program. Usage: immediate_reward = scenario.execute(selected_action) Arguments: action: The action to be executed within the current situation. Return: A float, the reward received for the action that was executed, or None if no reward is offered.
27,272
def _save_upload_state_to_file(self): if os.access(self.file_dir, os.W_OK | os.R_OK | os.X_OK): save_file = self.file + data = { : self.upload_token, : self.upload_server_ip } with open(save_file, ) as f: json.dump(data, f)
if create and create_file has execute, save upload state to file for next resume upload if current upload process is interrupted.
27,273
def append_dict_key_value( in_dict, keys, value, delimiter=DEFAULT_TARGET_DELIM, ordered_dict=False): : dict_pointer, last_key = _dict_rpartition(in_dict, keys, delimiter=delimiter, ordered_dict=ordered_dict) if last_key not in dict_pointer or dict_pointer[last_key] is None: dict_pointer[last_key] = [] try: dict_pointer[last_key].append(value) except AttributeError: raise SaltInvocationError( .format(type(dict_pointer[last_key]))) return in_dict
Ensures that in_dict contains the series of recursive keys defined in keys. Also appends `value` to the list that is at the end of `in_dict` traversed with `keys`. :param dict in_dict: The dictionary to work with :param str keys: The delimited string with one or more keys. :param any value: The value to append to the nested dict-key. :param str delimiter: The delimiter to use in `keys`. Defaults to ':'. :param bool ordered_dict: Create OrderedDicts if keys are missing. Default: create regular dicts. :return dict: Though it updates in_dict in-place.
27,274
def norm(self, x): if self.exponent == 2.0: return float(np.sqrt(self.const) * _norm_default(x)) elif self.exponent == float(): return float(self.const * _pnorm_default(x, self.exponent)) else: return float((self.const ** (1 / self.exponent) * _pnorm_default(x, self.exponent)))
Return the weighted norm of ``x``. Parameters ---------- x1 : `NumpyTensor` Tensor whose norm is calculated. Returns ------- norm : float The norm of the tensor.
27,275
def remove_phenotype(self, ind_obj, phenotypes=None): if phenotypes is None: logger.info("delete all phenotypes related to %s", ind_obj.ind_id) self.query(PhenotypeTerm).filter_by(ind_id=ind_obj.id).delete() else: for term in ind_obj.phenotypes: if term.phenotype_id in phenotypes: logger.info("delete phenotype: %s from %s", term.phenotype_id, ind_obj.ind_id) self.session.delete(term) logger.debug() self.save() for case_obj in ind_obj.cases: self.update_hpolist(case_obj)
Remove multiple phenotypes from an individual.
27,276
def _import_plugin(module_name, plugin_path, modnames, modlist): if module_name in modnames: return try: mock = _ModuleMock() mock.LOCALEPATH = osp.join(plugin_path, module_name, ) sys.modules[module_name] = mock if osp.isdir(osp.join(plugin_path, module_name)): module = _import_module_from_path(module_name, plugin_path) else: module = None if module and getattr(module, , False): sys.modules[module_name] = module modlist.append(module) modnames.append(module_name) except Exception: sys.stderr.write("ERROR: 3rd party plugin import failed for " "`{0}`\n".format(module_name)) traceback.print_exc(file=sys.stderr)
Import the plugin `module_name` from `plugin_path`, add it to `modlist` and adds its name to `modnames`.
27,277
def generate_seeds(num, root_seed, secret): if num < 0: raise HeartbeatError( % num) if secret is None: raise HeartbeatError() seeds = [] try: tmp_seed = hashlib.sha256(root_seed).digest() except TypeError: tmp_seed = hashlib.sha256(str(root_seed).encode()).digest() for x in range(num): seeds.append(tmp_seed) h = hashlib.sha256(tmp_seed) h.update(secret) tmp_seed = h.digest() return seeds
Deterministically generate list of seeds from a root seed. :param num: Numbers of seeds to generate as int :param root_seed: Seed to start off with. :return: seed values as a list of length num
27,278
def assert_operations(self, *args): if not set(args).issubset(self.allowed_operations): raise http.exceptions.Forbidden()
Assets if the requested operations are allowed in this context.
27,279
def start(self): assert self._thread is None, self._thread = Thread(target=self._start_io_loop) self._thread.daemon = True self._thread.start() self._ready.wait()
Start IOLoop in daemonized thread.
27,280
def _create_header(info, format, encoding, errors): parts = [ stn(info.get("name", ""), 100, encoding, errors), itn(info.get("mode", 0) & 0o7777, 8, format), itn(info.get("uid", 0), 8, format), itn(info.get("gid", 0), 8, format), itn(info.get("size", 0), 12, format), itn(info.get("mtime", 0), 12, format), b" ", info.get("type", REGTYPE), stn(info.get("linkname", ""), 100, encoding, errors), info.get("magic", POSIX_MAGIC), stn(info.get("uname", ""), 32, encoding, errors), stn(info.get("gname", ""), 32, encoding, errors), itn(info.get("devmajor", 0), 8, format), itn(info.get("devminor", 0), 8, format), stn(info.get("prefix", ""), 155, encoding, errors) ] buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts)) chksum = calc_chksums(buf[-BLOCKSIZE:])[0] buf = buf[:-364] + ("%06o\0" % chksum).encode("ascii") + buf[-357:] return buf
Return a header block. info is a dictionary with file information, format must be one of the *_FORMAT constants.
27,281
def _render(self, request, template=None, status=200, context={}, headers={}, prefix_template_path=True): format = self._get_format(request) status = status ) for header, value in headers.items(): response[header] = value return response
Render a HTTP response. :param request: A django.http.HttpRequest instance. :param template: A string describing the path to a template. :param status: An integer describing the HTTP status code to respond with. :param context: A dictionary describing variables to populate the template with. :param headers: A dictionary describing HTTP headers. :param prefix_template_path: A boolean describing whether to prefix the template with the view's template path. Please note that ``template`` must not specify an extension, as one will be appended according to the request format. For example, a value of ``blog/posts/index`` would populate ``blog/posts/index.html`` for requests that query the resource's HTML representation. If no template that matches the request format exists at the given location, or if ``template`` is ``None``, Respite will attempt to serialize the template context automatically. You can change the way your models are serialized by defining ``serialize`` methods that return a dictionary:: class NuclearMissile(models.Model): serial_number = models.IntegerField() is_armed = models.BooleanField() launch_code = models.IntegerField() def serialize(self): return { 'serial_number': self.serial_number, 'is_armed': self.is_armed } If the request format is not supported by the view (as determined by the ``supported_formats`` property or a specific view's ``override_supported_formats`` decorator), this function will yield HTTP 406 Not Acceptable.
27,282
def rename(self, new_name, **kwargs): if not isinstance(new_name, string_type): raise TypeError("new_name must be an " "instance of %s" % (string_type.__name__,)) if not new_name or ".." in new_name: raise InvalidName("collection names cannot be empty") if new_name[0] == "." or new_name[-1] == ".": raise InvalidName("collecion names must not start or end with ") if "$" in new_name and not new_name.startswith("oplog.$main"): raise InvalidName("collection names must not contain ") new_name = "%s.%s" % (self.__database.name, new_name) cmd = SON([("renameCollection", self.__full_name), ("to", new_name)]) with self._socket_for_writes() as sock_info: if sock_info.max_wire_version >= 5 and self.write_concern: cmd[] = self.write_concern.document cmd.update(kwargs) sock_info.command(, cmd, parse_write_concern_error=True)
Rename this collection. If operating in auth mode, client must be authorized as an admin to perform this operation. Raises :class:`TypeError` if `new_name` is not an instance of :class:`basestring` (:class:`str` in python 3). Raises :class:`~pymongo.errors.InvalidName` if `new_name` is not a valid collection name. :Parameters: - `new_name`: new name for this collection - `**kwargs` (optional): additional arguments to the rename command may be passed as keyword arguments to this helper method (i.e. ``dropTarget=True``) .. note:: The :attr:`~pymongo.collection.Collection.write_concern` of this collection is automatically applied to this operation when using MongoDB >= 3.4. .. versionchanged:: 3.4 Apply this collection's write concern automatically to this operation when connected to MongoDB >= 3.4.
27,283
def get_idx_by_name(self, name): name = name.lower() for key, val in six.iteritems(type(self)._static_entries): if val.name() == name: return key for idx, val in enumerate(self._dynamic_table): if val.name() == name: return type(self)._static_entries_last_idx + idx + 1 return None
get_idx_by_name returns the index of a matching registered header This implementation will prefer returning a static entry index whenever possible. If multiple matching header name are found in the static table, there is insurance that the first entry (lowest index number) will be returned. If no matching header is found, this method returns None.
27,284
def get_hub(): try: hub = _local.hub except AttributeError: assert fibers.current().parent is None hub = _local.hub = Hub() return hub
Return the instance of the hub.
27,285
def check_title_match(expected_title, pa11y_results, logger): if not pa11y_results: return title_errs = [err for err in pa11y_results if err["context"].startswith("<title")] for err in title_errs: title_elmt = html.fragment_fromstring(err["context"]) elided_title = title_elmt.text.strip() if elided_title.endswith("..."): pa11y_title = elided_title[0:-4].strip() else: pa11y_title = elided_title if pa11y_title not in expected_title: ).format( scrapy_title=expected_title, elided_title=elided_title, ) logger.error(msg)
Check if Scrapy reports any issue with the HTML <title> element. If so, compare that <title> element to the title that we got in the A11yItem. If they don't match, something is screwy, and pa11y isn't parsing the page that we expect.
27,286
def get_pids_in_revision_chain(client, did): def _req(p): return d1_common.xml.get_req_val(p) def _opt(p, a): return d1_common.xml.get_opt_val(p, a) sysmeta_pyxb = client.getSystemMetadata(did) while _opt(sysmeta_pyxb, ): sysmeta_pyxb = client.getSystemMetadata(_opt(sysmeta_pyxb, )) chain_pid_list = [_req(sysmeta_pyxb.identifier)] while _opt(sysmeta_pyxb, ): sysmeta_pyxb = client.getSystemMetadata(_opt(sysmeta_pyxb, )) chain_pid_list.append(_req(sysmeta_pyxb.identifier)) return chain_pid_list
Args: client: d1_client.cnclient.CoordinatingNodeClient or d1_client.mnclient.MemberNodeClient. did : str SID or a PID of any object in a revision chain. Returns: list of str: All PIDs in the chain. The returned list is in the same order as the chain. The initial PID is typically obtained by resolving a SID. If the given PID is not in a chain, a list containing the single object is returned.
27,287
def GetEntries(self, parser_mediator, top_level=None, **unused_kwargs): for entry in top_level: datetime_value = entry.get(, None) package_identifiers = entry.get(, []) if not datetime_value or not package_identifiers: continue display_name = entry.get(, ) display_version = entry.get(, ) process_name = entry.get(, ) package_identifiers = .join(package_identifiers) event_data = plist_event.PlistTimeEventData() event_data.desc = ( ).format( display_name, display_version, process_name, package_identifiers) event_data.key = event_data.root = event = time_events.PythonDatetimeEvent( datetime_value, definitions.TIME_DESCRIPTION_WRITTEN) parser_mediator.ProduceEventWithEventData(event, event_data)
Extracts relevant install history entries. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. top_level (dict[str, object]): plist top-level key.
27,288
def get_wake_on_modem(): wake on modem* ret = salt.utils.mac_utils.execute_return_result( ) return salt.utils.mac_utils.validate_enabled( salt.utils.mac_utils.parse_return(ret)) ==
Displays whether 'wake on modem' is on or off if supported :return: A string value representing the "wake on modem" settings :rtype: str CLI Example: .. code-block:: bash salt '*' power.get_wake_on_modem
27,289
def show_csrs(): data = salt.utils.http.query( .format(_base_url()), status=True, decode=True, decode_type=, header_dict={ : _api_key(), }, ) status = data[] if six.text_type(status).startswith() or six.text_type(status).startswith(): raise CommandExecutionError( .format(data[]) ) return data.get(, {})
Show certificate requests for this API key CLI Example: .. code-block:: bash salt-run digicert.show_csrs
27,290
def check_instance(function): def wrapper(self, *args, **kwargs): func_trans = { "commit": manager.Manager, "compare_config": manager.Manager, "commit_check": manager.Manager, "device_info": manager.Manager, "diff_config": manager.Manager, "health_check": manager.Manager, "interface_errors": manager.Manager, "op_cmd": paramiko.client.SSHClient, "shell_cmd": paramiko.client.SSHClient, "scp_pull": paramiko.client.SSHClient, "scp_push": paramiko.client.SSHClient } if self.username == "root" and function.__name__ == "op_cmd": if not self._session: self.conn_type = "paramiko" self.connect() if not self._shell: self.conn_type = "root" self.connect() self.shell_to_cli() if isinstance(self._session, func_trans[function.__name__]): if not isinstance(self._scp, SCPClient): self.conn_type = "scp" self.connect() else: self.disconnect() if function.__name__ == "op_cmd": self.conn_type = "paramiko" elif function.__name__ in ["scp_pull", "scp_push"]: self.conn_type = "scp" else: self.conn_type = "ncclient" self.connect() return function(self, *args, **kwargs) return wrapper
Wrapper that tests the type of _session. Purpose: This decorator function is used by all functions within | the Jaide class that interact with a device to ensure the | proper session type is in use. If it is not, it will | attempt to migrate _session to that type before moving | to the originally requested function. | > **NOTE:** This function is a decorator, and should not be | > used directly. All other methods in this class that touch | > the Junos device are wrapped by this function to ensure the | > proper connection type is used. @param function: the function that is being wrapped around @type function: function @returns: the originally requested function @rtype: function
27,291
def _append_callback_id(ids, obj, callback_id): if obj not in ids: ids[obj] = [] ids[obj].append(callback_id)
Helper function adding a callback ID to the IDs dict. The callback ids dict maps an object to event callback ids. :param ids: dict of callback IDs to update :param obj: one of the keys of REGISTER_FUNCTIONS :param callback_id: the result of _register_callback
27,292
def requires_submit(func): @functools.wraps(func) def _wrapper(self, *args, **kwargs): if self._future is None: raise JobError("Job not submitted yet!. You have to .submit() first!") return func(self, *args, **kwargs) return _wrapper
Decorator to ensure that a submit has been performed before calling the method. Args: func (callable): test function to be decorated. Returns: callable: the decorated function.
27,293
def make_polynomial(degree=3, n_samples=100, bias=0.0, noise=0.0, return_coefs=False, random_state=None): generator = check_random_state(random_state) coefs = generator.randn(degree + 1) pows = np.arange(degree + 1) poly = np.vectorize(lambda x: np.sum(coefs * x ** pows)) X, y = make_regression(poly, n_samples=n_samples, bias=bias, noise=noise, random_state=random_state) if return_coefs: return X, y, coefs return X, y
Generate a noisy polynomial for a regression problem Examples -------- >>> X, y, coefs = make_polynomial(degree=3, n_samples=200, noise=.5, ... return_coefs=True, random_state=1)
27,294
def add_method_model(self, func, name=None, description=None, owner=None, ): if name is None: name = func.__name__ method = MethodModel.from_callable(func, description) self._add_field(owner, name, method, func) return method
Register a function to be added to the block
27,295
def _filter_vcf(out_file): in_file = out_file.replace(".vcf", "-ori.vcf") FILTER_line = ( ) SOMATIC_line = if not utils.file_exists(in_file): shutil.move(out_file, in_file) with file_transaction(out_file) as tx_out_file: with open(in_file) as in_handle, open(tx_out_file, "w") as out_handle: for line in in_handle: if line.startswith(" normal_name = line.strip().split("=")[1] if line.startswith(" tumor_name = line.strip().split("=")[1] if line.startswith(" line = line.replace("Normal", normal_name) line = line.replace("Tumour", tumor_name) if line.startswith(" line = line.replace("ID=FS", "ID=RNT") if line.find("FS=") > -1: line = line.replace("FS=", "RNT=") if "5BP" in line: line = sub("5BP[0-9]+", "5BP", line) if line.find("PASS") == -1: line = _set_reject(line) if line.find("PASS") > - 1 and line.find("SOMATIC") == -1: line = _set_reject(line) if not _has_ambiguous_ref_allele(line): out_handle.write(line) if line.startswith(" out_handle.write("%s" % FILTER_line) FILTER_line = "" if line.startswith(" out_handle.write("%s" % SOMATIC_line) SOMATIC_line = "" return out_file
Fix sample names, FILTER and FORMAT fields. Remove lines with ambiguous reference.
27,296
def check_parameter_similarity(files_dict): try: parameter_names = files_dict.itervalues().next().keys() except AttributeError: if any(i is not None for i in files_dict.itervalues()): return False else: return True if any(parameter_names != i.keys() for i in files_dict.itervalues()): return False return True
Checks if the parameter names of all files are similar. Takes the dictionary from get_parameter_from_files output as input.
27,297
def get_sitetree(self, alias): cache_ = self.cache get_cache_entry = cache_.get_entry set_cache_entry = cache_.set_entry caching_required = False if not self.current_app_is_admin(): alias = self.resolve_tree_i18n_alias(alias) sitetree = get_cache_entry(, alias) if not sitetree: if DYNAMIC_ONLY: sitetree = [] else: sitetree = ( MODEL_TREE_ITEM_CLASS.objects. select_related(, ). prefetch_related(). filter(tree__alias__exact=alias). order_by(, )) sitetree = self.attach_dynamic_tree_items(alias, sitetree) set_cache_entry(, alias, sitetree) caching_required = True parents = get_cache_entry(, alias) if not parents: parents = defaultdict(list) for item in sitetree: parent = getattr(item, ) parents[parent].append(item) set_cache_entry(, alias, parents) if caching_required: cache_update = cache_.update_entry_value for item in sitetree: cache_update(, alias, {item.id: item}) url = self.url calculate_item_depth = self.calculate_item_depth for item in sitetree: if caching_required: item.has_children = False if not hasattr(item, ): item.depth = calculate_item_depth(alias, item.id) item.depth_range = range(item.depth) if item.access_restricted: permissions_src = ( item.permissions if getattr(item, , False) else item.access_permissions.all()) item.perms = set( [ % (perm.content_type.app_label, perm.codename) for perm in permissions_src]) item.url_resolved = url(item) item.title_resolved = LazyTitle(item.title) if VARIABLE_TAG_START in item.title else item.title item.is_current = False item.in_current_branch = False self.get_tree_current_item(alias) if caching_required: cache_.save() return alias, sitetree
Gets site tree items from the given site tree. Caches result to dictionary. Returns (tree alias, tree items) tuple. :param str|unicode alias: :rtype: tuple
27,298
def isExpandKeyEvent(self, keyEvent): return keyEvent.modifiers() & Qt.ShiftModifier and \ keyEvent.modifiers() & Qt.AltModifier and \ keyEvent.key() in (Qt.Key_Left, Qt.Key_Right, Qt.Key_Down, Qt.Key_Up, Qt.Key_PageUp, Qt.Key_PageDown, Qt.Key_Home, Qt.Key_End)
Check if key event should expand rectangular selection
27,299
def imshow(x, y, z, ax, **kwargs): if x.ndim != 1 or y.ndim != 1: raise ValueError( ) try: xstep = (x[1] - x[0]) / 2.0 except IndexError: xstep = .1 try: ystep = (y[1] - y[0]) / 2.0 except IndexError: ystep = .1 left, right = x[0] - xstep, x[-1] + xstep bottom, top = y[-1] + ystep, y[0] - ystep defaults = {: , : } if not hasattr(ax, ): defaults[] = defaults.update(kwargs) if defaults[] == : defaults[] = [left, right, bottom, top] else: defaults[] = [left, right, top, bottom] if z.ndim == 3: if z.shape[-1] == 3: alpha = np.ma.ones(z.shape[:2] + (1,), dtype=z.dtype) if np.issubdtype(z.dtype, np.integer): alpha *= 255 z = np.ma.concatenate((z, alpha), axis=2) else: z = z.copy() z[np.any(z.mask, axis=-1), -1] = 0 primitive = ax.imshow(z, **defaults) return primitive
Image plot of 2d DataArray using matplotlib.pyplot Wraps :func:`matplotlib:matplotlib.pyplot.imshow` While other plot methods require the DataArray to be strictly two-dimensional, ``imshow`` also accepts a 3D array where some dimension can be interpreted as RGB or RGBA color channels and allows this dimension to be specified via the kwarg ``rgb=``. Unlike matplotlib, Xarray can apply ``vmin`` and ``vmax`` to RGB or RGBA data, by applying a single scaling factor and offset to all bands. Passing ``robust=True`` infers ``vmin`` and ``vmax`` :ref:`in the usual way <robust-plotting>`. .. note:: This function needs uniformly spaced coordinates to properly label the axes. Call DataArray.plot() to check. The pixels are centered on the coordinates values. Ie, if the coordinate value is 3.2 then the pixels for those coordinates will be centered on 3.2.