Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
381,200
def get_introspection_data(cls, tax_benefit_system): comments = inspect.getcomments(cls) try: absolute_file_path = inspect.getsourcefile(cls) except TypeError: source_file_path = None else: source_file_path = absolute_file_path.replace(tax_benefit_system.get_package_metadata()[], ) try: source_lines, start_line_number = inspect.getsourcelines(cls) source_code = textwrap.dedent(.join(source_lines)) except (IOError, TypeError): source_code, start_line_number = None, None return comments, source_file_path, source_code, start_line_number
Get instrospection data about the code of the variable. :returns: (comments, source file path, source code, start line number) :rtype: tuple
381,201
def _extract_object_params(self, name): params = self.request.query_params.lists() params_map = {} prefix = name[:-1] offset = len(prefix) for name, value in params: if name.startswith(prefix): if name.endswith(): name = name[offset:-1] elif name.endswith(): name = name[offset:-3] else: raise exceptions.ParseError( % name ) else: continue params_map[name] = value return params_map
Extract object params, return as dict
381,202
def required_from_env(key): val = os.environ.get(key) if not val: raise ValueError( "Required argument not supplied and not found in environment variables".format(key)) return val
Retrieve a required variable from the current environment variables. Raises a ValueError if the env variable is not found or has no value.
381,203
def system_qos_qos_service_policy_direction(self, **kwargs): config = ET.Element("config") system_qos = ET.SubElement(config, "system-qos", xmlns="urn:brocade.com:mgmt:brocade-policer") qos = ET.SubElement(system_qos, "qos") service_policy = ET.SubElement(qos, "service-policy") policy_map_name_key = ET.SubElement(service_policy, "policy-map-name") policy_map_name_key.text = kwargs.pop() direction = ET.SubElement(service_policy, "direction") direction.text = kwargs.pop() callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code
381,204
def clique(graph, id): clique = [id] for n in graph.nodes: friend = True for id in clique: if n.id == id or graph.edge(n.id, id) == None: friend = False break if friend: clique.append(n.id) return clique
Returns the largest possible clique for the node with given id.
381,205
def status(cwd, opts=None, user=None): * def _status(cwd): cmd = [, ] if opts: for opt in opts.split(): cmd.append(.format(opt)) out = __salt__[]( cmd, cwd=cwd, runas=user, python_shell=False) types = { : , : , : , : , : , : , : , : , } ret = {} for line in out.splitlines(): t, f = types[line[0]], line[2:] if t not in ret: ret[t] = [] ret[t].append(f) return ret if salt.utils.data.is_iter(cwd): return dict((cwd, _status(cwd)) for cwd in cwd) else: return _status(cwd)
Show changed files of the given repository cwd The path to the Mercurial repository opts : None Any additional options to add to the command line user : None Run hg as a user other than what the minion runs as CLI Example: .. code-block:: bash salt '*' hg.status /path/to/repo
381,206
def getitem_es(self, key): from nefertari_guards.elasticsearch import ACLFilterES es = ACLFilterES(self.item_model.__name__) params = { : key, : self.request, } obj = es.get_item(**params) obj.__acl__ = self.item_acl(obj) obj.__parent__ = self obj.__name__ = key return obj
Override to support ACL filtering. To do so: passes `self.request` to `get_item` and uses `ACLFilterES`.
381,207
def set(self, safe_len=False, **kwds): if kwds: d = self.kwds() d.update(kwds) self.reset(**d) if safe_len and self.item: self.leng = _len
Set one or more attributes.
381,208
def real(self): result = self.__raw[].copy() result[] = self.__raw[][] result[] = self.__raw[][] result[] = self.__raw[][] return result
Get realtime data :rtype: dict :returns: 代碼可以參考:http://goristock.appspot.com/API#apiweight
381,209
def get_single(decl_matcher, decls, recursive=True): answer = matcher.find(decl_matcher, decls, recursive) if len(answer) == 1: return answer[0] elif not answer: raise runtime_errors.declaration_not_found_t(decl_matcher) else: raise runtime_errors.multiple_declarations_found_t(decl_matcher)
Returns a reference to declaration, that match `decl_matcher` defined criteria. If a unique declaration could not be found, an appropriate exception will be raised. :param decl_matcher: Python callable object, that takes one argument - reference to a declaration :param decls: the search scope, :class:declaration_t object or :class:declaration_t objects list t :param recursive: boolean, if True, the method will run `decl_matcher` on the internal declarations too
381,210
def location(self, tag=None, fromdate=None, todate=None): return self.call("GET", "/stats/outbound/clicks/location", tag=tag, fromdate=fromdate, todate=todate)
Gets an overview of which part of the email links were clicked from (HTML or Text). This is only recorded when Link Tracking is enabled for that email.
381,211
def decode_response(status: int, headers: MutableMapping, body: bytes) -> dict: data = decode_body(headers, body) raise_for_status(status, headers, data) raise_for_api_error(headers, data) return data
Decode incoming response Args: status: Response status headers: Response headers body: Response body Returns: Response data
381,212
def update_sql(table, filter, updates): tblfooabarbarbazbUPDATE tbl SET bar=$1, baz=$2 WHERE bar=$3 AND foo=$4ba where_keys, where_vals = _split_dict(filter) up_keys, up_vals = _split_dict(updates) changes = _pairs(up_keys, sep=) where = _pairs(where_keys, start=len(up_keys) + 1) sql = .format( table, changes, where) return sql, up_vals + where_vals
>>> update_sql('tbl', {'foo': 'a', 'bar': 1}, {'bar': 2, 'baz': 'b'}) ('UPDATE tbl SET bar=$1, baz=$2 WHERE bar=$3 AND foo=$4', [2, 'b', 1, 'a'])
381,213
def add_factuality(self,my_fact): if self.factuality_layer is None: self.factuality_layer = Cfactualities() self.root.append(self.factuality_layer.get_node()) self.factuality_layer.add_factuality(my_fact)
Adds a factuality to the factuality layer @type my_fact: L{Cfactuality} @param my_fact: factuality object
381,214
def convert_identifiers(self, identifiers: Union[Identifier, List[Identifier]]): if not identifiers: return identifiers def _create_or_update(identifier): data = self._cache[identifier.key] return self.factory.create_or_update(identifier, data)[0] if isinstance(identifiers, Identifier): return _create_or_update(identifiers) elif isinstance(identifiers, list) and isinstance(identifiers[0], Identifier): return [_create_or_update(identifier) for identifier in identifiers] else: raise TypeError()
Convert an individual :class:`Identifier` to a model instance, or a list of Identifiers to a list of model instances.
381,215
def dump(self): with self.lock: atexit.unregister(self.dump) self.fh.close()
Output all recorded metrics
381,216
def eval_constraints(self, constraints): try: return all(self.eval_ast(c) for c in constraints) except errors.ClaripyZeroDivisionError: return False
Returns whether the constraints is satisfied trivially by using the last model.
381,217
def description(self): if self._session is None: return None res = self._session.res_info if res: return res.description else: return None
Cursor description, see http://legacy.python.org/dev/peps/pep-0249/#description
381,218
def regular_to_sparse_from_sparse_mappings(regular_to_unmasked_sparse, unmasked_sparse_to_sparse): total_regular_pixels = regular_to_unmasked_sparse.shape[0] regular_to_sparse = np.zeros(total_regular_pixels) for regular_index in range(total_regular_pixels): regular_to_sparse[regular_index] = unmasked_sparse_to_sparse[regular_to_unmasked_sparse[regular_index]] return regular_to_sparse
Using the mapping between the regular-grid and unmasked pixelization grid, compute the mapping between each regular pixel and the masked pixelization grid. Parameters ----------- regular_to_unmasked_sparse : ndarray The index mapping between every regular-pixel and masked pixelization pixel. unmasked_sparse_to_sparse : ndarray The index mapping between every masked pixelization pixel and unmasked pixelization pixel.
381,219
def get_oembed(self, url): api_url = self.api_url + parameters = { : url } response = self._request_url(api_url, , params=parameters) headers, result = self._parse_and_check(response) return result
Return an oEmbed format json dictionary :param url: Image page URL (ex. http://gyazo.com/xxxxx)
381,220
def disease(self, identifier=None, ref_id=None, ref_type=None, name=None, acronym=None, description=None, entry_name=None, limit=None, as_df=False ): q = self.session.query(models.Disease) model_queries_config = ( (identifier, models.Disease.identifier), (ref_id, models.Disease.ref_id), (ref_type, models.Disease.ref_type), (name, models.Disease.name), (acronym, models.Disease.acronym), (description, models.Disease.description) ) q = self.get_model_queries(q, model_queries_config) if entry_name: q = q.session.query(models.Disease).join(models.DiseaseComment).join(models.Entry) if isinstance(entry_name, str): q = q.filter(models.Entry.name == entry_name) elif isinstance(entry_name, Iterable): q = q.filter(models.Entry.name.in_(entry_name)) return self._limit_and_df(q, limit, as_df)
Method to query :class:`.models.Disease` objects in database :param identifier: disease UniProt identifier(s) :type identifier: str or tuple(str) or None :param ref_id: identifier(s) of referenced database :type ref_id: str or tuple(str) or None :param ref_type: database name(s) :type ref_type: str or tuple(str) or None :param name: disease name(s) :type name: str or tuple(str) or None :param acronym: disease acronym(s) :type acronym: str or tuple(str) or None :param description: disease description(s) :type description: str or tuple(str) or None :param entry_name: name(s) in :class:`.models.Entry` :type entry_name: str or tuple(str) or None :param limit: - if `isinstance(limit,int)==True` -> limit - if `isinstance(limit,tuple)==True` -> format:= tuple(page_number, results_per_page) - if limit == None -> all results :type limit: int or tuple(int) or None :param bool as_df: if `True` results are returned as :class:`pandas.DataFrame` :return: - if `as_df == False` -> list(:class:`.models.Disease`) - if `as_df == True` -> :class:`pandas.DataFrame` :rtype: list(:class:`.models.Disease`) or :class:`pandas.DataFrame`
381,221
def del_option_by_number(self, number): for o in list(self._options): assert isinstance(o, Option) if o.number == number: self._options.remove(o)
Delete an option from the message by number :type number: Integer :param number: option naumber
381,222
def get_form_label(self, request=None, obj=None, model=None, form=None): label = form.base_fields[self.field].label condition = self.condition_cls(request=request, obj=obj, model=model) if condition.check(): additional_opts = condition.get_additional_options( request=request, obj=obj, model=model ) visit_datetime = "" if obj: visit_datetime = getattr( obj, obj.visit_model_attr() ).report_datetime.strftime("%B %Y") try: label = self.custom_label.format( appointment=condition.appointment, previous_appointment=condition.previous_appointment, previous_obj=condition.previous_obj, previous_visit=condition.previous_visit, visit_datetime=visit_datetime, **additional_opts, ) except KeyError as e: raise CustomFormLabelError( f"Custom label template has invalid keys. See {label}. Got {e}." ) return label
Returns a customized form label, if condition is met, otherwise returns the default form label. * condition is an instance of CustomLabelCondition.
381,223
def cwt(ts, freqs=np.logspace(0, 2), wavelet=cwtmorlet, plot=True): orig_ndim = ts.ndim if ts.ndim is 1: ts = ts[:, np.newaxis] channels = ts.shape[1] fs = (len(ts) - 1.0) / (1.0*ts.tspan[-1] - ts.tspan[0]) x = signal.detrend(ts, axis=0) dtype = wavelet(fs/freqs[0], fs/freqs[0]).dtype coefs = np.zeros((len(ts), len(freqs), channels), dtype) for i in range(channels): coefs[:, :, i] = roughcwt(x[:, i], cwtmorlet, fs/freqs).T if plot: _plot_cwt(ts, coefs, freqs) if orig_ndim is 1: coefs = coefs[:, :, 0] return coefs
Continuous wavelet transform Note the full results can use a huge amount of memory at 64-bit precision Args: ts: Timeseries of m variables, shape (n, m). Assumed constant timestep. freqs: list of frequencies (in Hz) to use for the tranform. (default is 50 frequency bins logarithmic from 1Hz to 100Hz) wavelet: the wavelet to use. may be complex. see scipy.signal.wavelets plot: whether to plot time-resolved power spectrum Returns: coefs: Continuous wavelet transform output array, shape (n,len(freqs),m)
381,224
def perimeter(patch, world_size=(60, 60), neighbor_func=get_rook_neighbors_toroidal): edge = 0 patch = set([tuple(i) for i in patch]) for cell in patch: neighbors = neighbor_func(cell, world_size) neighbors = [n for n in neighbors if n not in patch] edge += len(neighbors) return edge
Count cell faces in patch that do not connect to part of patch. This preserves various square geometry features that would not be preserved by merely counting the number of cells that touch an edge.
381,225
def unplug(self): s methods from the callback registry.' if not self.__plugged: return members = set([method for _, method in inspect.getmembers(self, predicate=inspect.ismethod)]) for message in global_callbacks: global_callbacks[message] -= members self.__plugged = False
Remove the actor's methods from the callback registry.
381,226
def get_regular_expressions(taxonomy_name, rebuild=False, no_cache=False): onto_name, onto_path, onto_url = _get_ontology(taxonomy_name) if not onto_path: raise TaxonomyError("Unable to locate the taxonomy: ." % taxonomy_name) cache_path = _get_cache_path(onto_name) current_app.logger.debug( % (not no_cache, onto_path, cache_path) ) if os.access(cache_path, os.R_OK): if os.access(onto_path, os.R_OK): if rebuild or no_cache: current_app.logger.debug( "Cache generation was manually forced.") return _build_cache(onto_path, skip_cache=no_cache) else: current_app.logger.warning( "The ontology couldn%s%sWe cannot read/write into: %s. Aborting!Cache %s exists, but is not readable!' % cache_path) current_app.logger.info( "Cache not available. Building it now: %s" % onto_path) return _build_cache(onto_path, skip_cache=no_cache) else: raise TaxonomyError("We miss both source and cache" " of the taxonomy: %s" % taxonomy_name)
Return a list of patterns compiled from the RDF/SKOS ontology. Uses cache if it exists and if the taxonomy hasn't changed.
381,227
def run_segment_operation(outdoc, filenames, segments, use_segment_table, operation, result_name = , preserve = True): proc_id = table.get_table(outdoc, lsctables.ProcessTable.tableName)[0].process_id if preserve: indoc = ligolw_add.ligolw_add(outdoc, filenames) else: indoc = ligolw_add.ligolw_add(ligolw.Document(), filenames) keys = segments.split() if operation == INTERSECT: sgmntlist = pycbc_glue.segments.segmentlist([pycbc_glue.segments.segment(-pycbc_glue.segments.infinity(), pycbc_glue.segments.infinity())]) for key in keys: sgmntlist &= find_segments(indoc, key, use_segment_table) elif operation == UNION: sgmntlist = pycbc_glue.segments.segmentlist([]) for key in keys: sgmntlist |= find_segments(indoc, key, use_segment_table) elif operation == DIFF: sgmntlist = find_segments(indoc, keys[0], use_segment_table) for key in keys[1:]: sgmntlist -= find_segments(indoc, key, use_segment_table) else: raise NameError("%s is not a known operation (intersect, union or diff)" % operation) seg_def_id = add_to_segment_definer(outdoc, proc_id, , result_name, 1) if use_segment_table: add_to_segment(outdoc, proc_id, seg_def_id, sgmntlist) else: add_to_segment_summary(outdoc, proc_id, seg_def_id, sgmntlist) return outdoc, abs(sgmntlist)
Performs an operation (intersect or union) across a set of segments. That is, given a set of files each with segment definers DMT-FLAG1, DMT-FLAG2 etc and a list of segments DMT-FLAG1,DMT-FLAG1 this returns RESULT = (table 1's DMT-FLAG1 union table 2's DMT-FLAG1 union ...) operation (table 1's DMT-FLAG2 union table 2's DMT-FLAG2 union ...) operation etc
381,228
def addVariant(self,variant): if isinstance(variant, Variant): self.variants.append(variant) else: raise (VariantError, % type(variant))
Appends one Variant to variants
381,229
def _throat_props(self): r network = self.network net_Ts = network.throats(self.name) conns = network[][net_Ts] p1 = conns[:, 0] p2 = conns[:, 1] coords = network[] normals = tr.unit_vector(coords[p2]-coords[p1]) self[] = normals self[] = (coords[p1] + coords[p2])/2 self[] = self[]
r''' Helper Function to calculate the throat normal vectors
381,230
def translations_lists(self): return (getattr(self.generator, name) for name in self.info.get(, []))
Iterator over lists of content translations
381,231
def new_process(self, consumer_name): process_name = % (consumer_name, self.new_process_number(consumer_name)) kwargs = { : self.config.application, : consumer_name, : self.profile, : False, : self.stats_queue, : self.config.logging } return process_name, process.Process(name=process_name, kwargs=kwargs)
Create a new consumer instances :param str consumer_name: The name of the consumer :return tuple: (str, process.Process)
381,232
def init(*, threshold_lvl=1, quiet_stdout=False, log_file): global _logger, _log_lvl _log_lvl = _set_lvl(threshold_lvl) _logger = logging.getLogger(PKG_NAME) _logger.setLevel(_log_lvl) log_h = logging.FileHandler(log_file) base_fmt = log_fmt = logging.Formatter(base_fmt) log_h.setFormatter(log_fmt) _logger.addHandler(log_h) if not quiet_stdout: global _stdout _stdout = True
Initiate the log module :param threshold_lvl: messages under this level won't be issued/logged :param to_stdout: activate stdout log stream
381,233
def spawn_shell(self, context_file, tmpdir, rcfile=None, norc=False, stdin=False, command=None, env=None, quiet=False, pre_command=None, add_rez=True, package_commands_sourced_first=None, **Popen_args): raise NotImplementedError
Spawn a possibly interactive subshell. Args: context:_file File that must be sourced in the new shell, this configures the Rez environment. tmpdir: Tempfiles, if needed, should be created within this path. rcfile: Custom startup script. norc: Don't run startup scripts. Overrides rcfile. stdin: If True, read commands from stdin in a non-interactive shell. If a different non-False value, such as subprocess.PIPE, the same occurs, but stdin is also passed to the resulting subprocess.Popen object. command: If not None, execute this command in a non-interactive shell. If an empty string, don't run a command, but don't open an interactive shell either. env: Environ dict to execute the shell within; uses the current environment if None. quiet: If True, don't show the configuration summary, and suppress any stdout from startup scripts. pre_command: Command to inject before the shell command itself. This is for internal use. add_rez: If True, assume this shell is being used with rez, and do things such as set the prompt etc. package_commands_sourced_first: If True, source the context file before sourcing startup scripts (such as .bashrc). If False, source the context file AFTER. If None, use the configured setting. popen_args: args to pass to the shell process object constructor. Returns: A subprocess.Popen object representing the shell process.
381,234
def _deserialize(self, value, attr, data, **kwargs): if not value: self.fail() try: return utils.from_iso_time(value) except (AttributeError, TypeError, ValueError): self.fail()
Deserialize an ISO8601-formatted time to a :class:`datetime.time` object.
381,235
def evaluate(self, script): if WEBENGINE: return self.dom.runJavaScript("{}".format(script)) else: return self.dom.evaluateJavaScript("{}".format(script))
Evaluate script in page frame. :param script: The script to evaluate.
381,236
def camel(theta): x, y = theta obj = 2 * x ** 2 - 1.05 * x ** 4 + x ** 6 / 6 + x * y + y ** 2 grad = np.array([ 4 * x - 4.2 * x ** 3 + x ** 5 + y, x + 2 * y ]) return obj, grad
Three-hump camel function
381,237
def path_exists_or_creatable(pathname: str) -> bool: try: return is_pathname_valid(pathname) and ( os.path.exists(pathname) or is_path_creatable(pathname)) except OSError: return False
Checks whether the given path exists or is creatable. This function is guaranteed to _never_ raise exceptions. Returns ------- `True` if the passed pathname is a valid pathname for the current OS _and_ either currently exists or is hypothetically creatable; `False` otherwise.
381,238
def return_error(self, status, payload=None): resp = None if payload is not None: payload = json.dumps(payload) resp = self.make_response(payload, status=status) if status in [405]: abort(status) else: abort(status, response=resp)
Error handler called by request handlers when an error occurs and the request should be aborted. Usage:: def handle_post_request(self, *args, **kwargs): self.request_handler = self.get_request_handler() try: self.request_handler.process(self.get_data()) except SomeException as e: self.return_error(400, payload=self.request_handler.errors) return self.return_create_response()
381,239
def from_file(proto_file): if not proto_file.endswith(): raise BadProtobuf() dest = tempfile.mkdtemp() full_path = os.path.abspath(proto_file) _compile_proto(full_path, dest) filename = os.path.split(full_path)[-1] name = re.search(r, filename).group(1) target = os.path.join(dest, name+) return _load_module(target)
Take a filename |protoc_file|, compile it via the Protobuf compiler, and import the module. Return the module if successfully compiled, otherwise raise either a ProtocNotFound or BadProtobuf exception.
381,240
def mkp(*args, **kwargs): mk = kwargs.pop(, False) path = os.sep.join(list(args)) if mk: while sep2 in path: path = path.replace(sep2, os.sep) try: os.makedirs(path) except FileExistsError: pass return path
Generate a directory path, and create it if requested. .. code-block:: Python filepath = mkp('base', 'folder', 'file') dirpath = mkp('root', 'path', 'folder', mk=True) Args: \*args: File or directory path segments to be concatenated mk (bool): Make the directory (if it doesn't exist) Returns: path (str): File or directory path
381,241
def flush_cache(self): logger.debug(.format(self.db_path)) with self.db: for rec in self._tups: self.db.execute("replace into history(agent_id, t_step, key, value) values (?, ?, ?, ?)", (rec.agent_id, rec.t_step, rec.key, rec.value)) self._tups = list()
Use a cache to save state changes to avoid opening a session for every change. The cache will be flushed at the end of the simulation, and when history is accessed.
381,242
def peep_hash(argv): parser = OptionParser( usage=, description= ) _, paths = parser.parse_args(args=argv) if paths: for path in paths: print(, hash_of_file(path)) return ITS_FINE_ITS_FINE else: parser.print_usage() return COMMAND_LINE_ERROR
Return the peep hash of one or more files, returning a shell status code or raising a PipException. :arg argv: The commandline args, starting after the subcommand
381,243
def parse_report(self, lines, table_names): report = dict() lines = (l for l in lines) for line in lines: line = line.rstrip() if line in table_names.keys(): report[table_names[line]] = self.parse_gatk_report_table(lines) return report
Parse a GATK report https://software.broadinstitute.org/gatk/documentation/article.php?id=1244 Only GATTable entries are parsed. Tables are returned as a dict of tables. Each table is a dict of arrays, where names correspond to column names, and arrays correspond to column values. Args: lines (file handle): an iterable over the lines of a GATK report. table_names (dict): a dict with keys that are GATK report table names (e.g. "#:GATKTable:Quantized:Quality quantization map"), and values that are the keys in the returned dict. Returns: { table_1: { col_1: [ val_1, val_2, ... ] col_2: [ val_1, val_2, ... ] ... } table_2: ... }
381,244
def get_partition(url, headers, source_id, container, partition): accepted_formats = list(serializer.format_registry.keys()) accepted_compression = list(serializer.compression_registry.keys()) payload = dict(action=, source_id=source_id, accepted_formats=accepted_formats, accepted_compression=accepted_compression) if partition is not None: payload[] = partition try: resp = requests.post(urljoin(url, ), data=msgpack.packb(payload, use_bin_type=True), **headers) if resp.status_code != 200: raise Exception() msg = msgpack.unpackb(resp.content, **unpack_kwargs) format = msg[] compression = msg[] compressor = serializer.compression_registry[compression] encoder = serializer.format_registry[format] chunk = encoder.decode(compressor.decompress(msg[]), container) return chunk finally: if resp is not None: resp.close()
Serializable function for fetching a data source partition Parameters ---------- url: str Server address headers: dict HTTP header parameters source_id: str ID of the source in the server's cache (unique per user) container: str Type of data, like "dataframe" one of ``intake.container.container_map`` partition: serializable Part of data to fetch, e.g., an integer for a dataframe.
381,245
def _set_interface_vlan_ospf_conf(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=interface_vlan_ospf_conf.interface_vlan_ospf_conf, is_container=, presence=False, yang_name="interface-vlan-ospf-conf", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u: {u: None, u: u}}, namespace=, defining_module=, yang_type=, is_config=True) except (TypeError, ValueError): raise ValueError({ : , : "container", : , }) self.__interface_vlan_ospf_conf = t if hasattr(self, ): self._set()
Setter method for interface_vlan_ospf_conf, mapped from YANG variable /routing_system/interface/ve/ip/interface_vlan_ospf_conf (container) If this variable is read-only (config: false) in the source YANG file, then _set_interface_vlan_ospf_conf is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_interface_vlan_ospf_conf() directly.
381,246
def write(self, data): cnpj, data = data path = os.path.join(self.output, % cnpj) with open(path, ) as f: json.dump(data, f, encoding=)
Writes json data to the output directory.
381,247
def _get_unique_ch(text, all_common_encodes): unique_chars = if isinstance(text, str): text = text.split("\n") elif isinstance(text, (list, tuple)): pass special_chars = [, , , ,, , , , , ] for line in text: for word in line.split(): if ( not PYTHON3 ): word = word.decode( ) for ch in all_common_encodes: if ch in word: word = word.replace(ch, ) if not word: continue for ch in word: if ch.isdigit() or ch in special_chars: word = word.replace(ch, ) continue return word return
text : encode sample strings returns unique word / characters from input text encode strings.
381,248
def update_alarm(self, alarm, criteria=None, disabled=False, label=None, name=None, metadata=None): return self._alarm_manager.update(alarm, criteria=criteria, disabled=disabled, label=label, name=name, metadata=metadata)
Updates an existing alarm on this entity.
381,249
def get_paths(self, key): final_paths = [] if key in self.__cli: paths = self.__cli[key] or [] from_conf = False else: paths = self.__config.get(key) or [] from_conf = True for path in flatten_list(paths): final_path = self.__abspath(path, from_conf) if final_path: final_paths.append(final_path) return final_paths
Same as `ConfigParser.get_path` for a list of paths. Args: key: str, the key to lookup the paths with Returns: list: The paths.
381,250
def is_alive(self): null = chr(0) try: if self.device is None: return {"is_alive": False} else: self._send_command(null) except (socket.error, EOFError): return {"is_alive": False} return {"is_alive": self.device.remote_conn.transport.is_active()}
Returns a flag with the state of the SSH connection.
381,251
def _create_prefix_notification(outgoing_msg, rpc_session): assert outgoing_msg path = outgoing_msg.path assert path vpn_nlri = path.nlri assert path.source is not None if path.source != VRF_TABLE: params = [{ROUTE_DISTINGUISHER: outgoing_msg.route_dist, PREFIX: vpn_nlri.prefix, NEXT_HOP: path.nexthop, VRF_RF: VrfConf.rf_2_vrf_rf(path.route_family)}] if path.nlri.ROUTE_FAMILY.safi not in (subaddr_family.IP_FLOWSPEC, subaddr_family.VPN_FLOWSPEC): params[VPN_LABEL] = path.label_list[0] if not path.is_withdraw: rpc_msg = rpc_session.create_notification( NOTIFICATION_ADD_REMOTE_PREFIX, params) else: rpc_msg = rpc_session.create_notification( NOTIFICATION_DELETE_REMOTE_PREFIX, params) else: params = [{ROUTE_DISTINGUISHER: outgoing_msg.route_dist, PREFIX: vpn_nlri.prefix, NEXT_HOP: path.nexthop, VRF_RF: VrfConf.rf_2_vrf_rf(path.route_family), ORIGIN_RD: path.origin_rd}] if not path.is_withdraw: rpc_msg = rpc_session.create_notification( NOTIFICATION_ADD_LOCAL_PREFIX, params) else: rpc_msg = rpc_session.create_notification( NOTIFICATION_DELETE_LOCAL_PREFIX, params) return rpc_msg
Constructs prefix notification with data from given outgoing message. Given RPC session is used to create RPC notification message.
381,252
def to_match(self): self.validate() mark_name, _ = self.fold_scope_location.get_location_name() validate_safe_string(mark_name) template = u template_data = { : mark_name, } return template % template_data
Return a unicode object with the MATCH representation of this expression.
381,253
def flush(self): if self._wbuf: buffer = .join(self._wbuf) self._wbuf = [] self.write(buffer)
Write all data from buffer to socket and reset write buffer.
381,254
def createModel(self, model, context, owner=, includeReferences=True): CREATE = self.statement() sql, data = CREATE(model, includeReferences=includeReferences, owner=owner) if not sql: log.error(.format(model.schema().dbname())) return False else: if context.dryRun: print sql % data else: self.execute(sql, data, writeAccess=True) log.info(.format(model.schema().dbname())) return True
Creates a new table in the database based cff the inputted schema information. If the dryRun flag is specified, then the SQLConnection will only be logged to the current logger, and not actually executed in the database. :param model | <orb.Model> context | <orb.Context> :return <bool> success
381,255
def find_pareto_front(metrics, metadata, columns, depth=1, epsilon=None, progress=None): if len(metrics) == 0: return metrics import pareto indices_from_cols = lambda xs: [metrics.columns.get_loc(x) for x in xs] percentile = lambda x, q: metrics[x].quantile(q/100) epsilons = [ (epsilon or 1e-7) * abs(percentile(x, 90) - percentile(x, 10)) / (90 - 10) for x in columns ] maximize = [x for x in columns if metadata[x].direction == ] maximize_indices = indices_from_cols(maximize) column_indices = indices_from_cols(columns) def boxify(df): boxed_df = pd.DataFrame() for col, eps in zip(columns, epsilons): boxed_df[col] = np.floor(df[col] / eps) return boxed_df mask = np.zeros(len(metrics), dtype=) too_close = np.zeros(len(metrics), dtype=) all_boxes = boxify(metrics) labeled_metrics = metrics.copy() labeled_metrics[] = range(len(metrics)) id = labeled_metrics.columns.get_loc() for i in range(depth): if epsilon is None: candidates = [labeled_metrics[~mask]] else: front_boxes = boxify(metrics[mask]) for j, (_, row) in enumerate(front_boxes.iterrows()): if progress: progress(i+1, depth, j+1, len(front_boxes)) too_close |= all_boxes.apply( lambda x: (x == row).all(), axis=) candidates = [labeled_metrics[too_close == False]] front = pareto.eps_sort( candidates, column_indices, epsilons, maximize=maximize_indices) for row in front: assert not mask[row[id]] mask[row[id]] = True return metrics[mask]
Return the subset of the given metrics that are Pareto optimal with respect to the given columns. Arguments ========= metrics: DataFrame A dataframe where each row is a different model or design and each column is a different score metric. metadata: dict Extra information about each score metric, in particular whether or not bigger values are considered better or worse. You can get this data structure from structures.load(). columns: list The score metrics to consider when calculating the Pareto front. depth: int The number of Pareto fronts to return. In other words, if depth=2, the Pareto front will be calculated, then those points (and any within epsilon of them) will be set aside, then the Pareto front of the remaining points will be calculated, then the union of both fronts will be returned. epsilon: float How close two points can be (in all the dimensions considered) before they are considered the same and one is excluded from the Pareto front (even if it is non-dominated). This is roughly in units of percent of the range of the points. By default this is small enough that you can basically assume that no two points will be considered the same. progress: func A function that will be called in the innermost loop as follows: `progress(curr_depth, tot_depth, curr_hit, tot_hits)`. This is primarily intended to allow the caller to present a progress bar, since increasing the depth can dramatically increase the amount of time this function takes. Returns ======= front: DataFrame The subset of the given metrics that is Pareto optimal with respect to the given score metrics. There are several ways to tune the number of models that are returned by this function. These are important to know, because this function is used to filter models between rounds of design, and there are always practical constraints on the number of models that can be carried on: - Columns: This is only mentioned for completeness, because you should pick your score metrics based on which scores you think are informative, not based on how many models you need. But increasing the number of score metrics increases the number of models that are selected, sometimes dramatically. - Depth: Increasing the depth increases the number of models that are selected by including models that are just slightly behind the Pareto front. - Epsilon: Increasing the epsilon decreases the number of models that are selected by discarding the models in the Pareto front that are too similar to each other. In short, tune depth to get more models and epsilon to get fewer. You can also tune both at once to get a large but diverse set of models.
381,256
def import_profile(self): file_name_dialog = QFileDialog(self) file_name_dialog.setAcceptMode(QFileDialog.AcceptOpen) file_name_dialog.setNameFilter(self.tr()) file_name_dialog.setDefaultSuffix() path_name = resources_path() file_name_dialog.setDirectory(path_name) if file_name_dialog.exec_(): file_name = file_name_dialog.selectedFiles()[0] else: return -1 if self.minimum_needs.read_from_file(file_name) == -1: return -1 self.clear_resource_list() self.populate_resource_list() self.switch_context(self.profile_edit_page)
Import minimum needs from an existing json file. The minimum needs are loaded from a file into the table. This state is only saved if the form is accepted.
381,257
def iter_shortcuts(): for context_name, keystr in CONF.items(): context, name = context_name.split("/", 1) yield context, name, keystr
Iterate over keyboard shortcuts.
381,258
def _init(self): self.provider._count_get_resource_inst_init += 1 tableName, primKey = self.provider._split_path(self.path) display_type = "Unknown" displayTypeComment = "" contentType = "text/html" if tableName is None: display_type = "Database" elif primKey is None: display_type = "Database Table" else: contentType = "text/csv" if primKey == "_ENTIRE_CONTENTS": display_type = "Database Table Contents" displayTypeComment = "CSV Representation of Table Contents" else: display_type = "Database Record" displayTypeComment = "Attributes available as properties" is_collection = primKey is None self._cache = { "content_length": None, "contentType": contentType, "created": time.time(), "display_name": self.name, "etag": hashlib.md5().update(self.path).hexdigest(), "modified": None, "support_ranges": False, "display_info": {"type": display_type, "typeComment": displayTypeComment}, } if not is_collection: self._cache["modified"] = time.time() _logger.debug("---> _init, nc=%s" % self.provider._count_initConnection)
Read resource information into self._cache, for cached access. See DAVResource._init()
381,259
def inline(self) -> str: return "{0}:{1}".format(self.index, .join([str(p) for p in self.parameters]))
Return inline string format of the instance :return:
381,260
def plot(message, duration=1, ax=None): lst_bin = _encode_binary(message) x, y = _create_x_y(lst_bin, duration) ax = _create_ax(ax) ax.plot(x, y, linewidth=2.0) delta_y = 0.1 ax.set_ylim(-delta_y, 1 + delta_y) ax.set_yticks([0, 1]) delta_x = 0.5 * duration ax.set_xlim(-delta_x, len(lst_bin) * duration + delta_x) return ax
Plot a message Returns: ax a Matplotlib Axe
381,261
def _set_get_vnetwork_dvs(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=get_vnetwork_dvs.get_vnetwork_dvs, is_leaf=True, yang_name="get-vnetwork-dvs", rest_name="get-vnetwork-dvs", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u: {u: u, u: u}}, namespace=, defining_module=, yang_type=, is_config=True) except (TypeError, ValueError): raise ValueError({ : , : "rpc", : , }) self.__get_vnetwork_dvs = t if hasattr(self, ): self._set()
Setter method for get_vnetwork_dvs, mapped from YANG variable /brocade_vswitch_rpc/get_vnetwork_dvs (rpc) If this variable is read-only (config: false) in the source YANG file, then _set_get_vnetwork_dvs is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_get_vnetwork_dvs() directly. YANG Description: Shows discovered Distributed Virtual Switches
381,262
def driver_for_path(path, drivers=None): ext = (os.path.splitext(path)[1][1:] or path).lower() drivers = drivers or ImageDriver.registry if ext else {} for name, meta in drivers.items(): if ext == meta.get(, ).lower(): return ImageDriver(name) return None
Returns the gdal.Driver for a path or None based on the file extension. Arguments: path -- file path as str with a GDAL supported file extension
381,263
def transit_generate_hmac(self, name, hmac_input, key_version=None, algorithm=None, mount_point=): if algorithm is not None: url = .format(mount_point, name, algorithm) else: url = .format(mount_point, name) params = { : hmac_input } if key_version is not None: params[] = key_version return self._adapter.post(url, json=params).json()
POST /<mount_point>/hmac/<name>(/<algorithm>) :param name: :type name: :param hmac_input: :type hmac_input: :param key_version: :type key_version: :param algorithm: :type algorithm: :param mount_point: :type mount_point: :return: :rtype:
381,264
def calculate_columns(self): self.cols = int(math.ceil(len(self.elements) / float(self.rows)))
Assuming the number of rows is constant, work out the best number of columns to use.
381,265
def attachment(attachment: Attachment, text: str = None, speak: str = None, input_hint: Union[InputHints, str] = None): return attachment_activity(AttachmentLayoutTypes.list, [attachment], text, speak, input_hint)
Returns a single message activity containing an attachment. :Example: message = MessageFactory.attachment(CardFactory.hero_card(HeroCard(title='White T-Shirt', images=[CardImage(url='https://example.com/whiteShirt.jpg')], buttons=[CardAction(title='buy')]))) await context.send_activity(message) :param attachment: :param text: :param speak: :param input_hint: :return:
381,266
def _load_source_model(self): if hasattr(self, "source_model"): return self.source_model module = self.get_python_module() if hasattr(module, self.model): self.source_model = getattr(module, self.model) self.source_model.meta__verbose_name = self.source_model._meta.verbose_name self.source_model.meta__verbose_name_plural = self.source_model._meta.verbose_name_plural return self.source_model raise ValueError(u"Model {0} does not exist in module".format(self.model, self.module))
Loads and gets the source model of the FieldTranslation as a dynamic attribute. It is used only when deleting orphan translations (translations without a parent object associated).
381,267
def from_e164(text, origin=public_enum_domain): parts = [d for d in text if d.isdigit()] parts.reverse() return dns.name.from_text(.join(parts), origin=origin)
Convert an E.164 number in textual form into a Name object whose value is the ENUM domain name for that number. @param text: an E.164 number in textual form. @type text: str @param origin: The domain in which the number should be constructed. The default is e164.arpa. @type: dns.name.Name object or None @rtype: dns.name.Name object
381,268
def init_sentry(self): dsn = self.config.get("SENTRY_DSN") if not dsn: return try: import sentry_sdk except ImportError: logger.error( " is not installed." ) return from sentry_sdk.integrations.flask import FlaskIntegration sentry_sdk.init(dsn=dsn, integrations=[FlaskIntegration()])
Install Sentry handler if config defines 'SENTRY_DSN'.
381,269
def get_collection_rules_gpg(self, collection_rules): sig_text = self.fetch_gpg() sig_response = NamedTemporaryFile(suffix=".asc") sig_response.write(sig_text.encode()) sig_response.file.flush() self.validate_gpg_sig(collection_rules.name, sig_response.name) self.write_collection_data(self.collection_rules_file + ".asc", sig_text)
Download the collection rules gpg signature
381,270
def _find_multiple(self, match_class, **keywds): self._logger.debug() start_time = timeit.default_timer() norm_keywds = self.__normalize_args(**keywds) decl_matcher = self.__create_matcher(match_class, **norm_keywds) dtype = self.__findout_decl_type(match_class, **norm_keywds) recursive_ = self.__findout_recursive(**norm_keywds) allow_empty = self.__findout_allow_empty(**norm_keywds) decls = self.__findout_range(norm_keywds[], dtype, recursive_) found = matcher.find(decl_matcher, decls, False) mfound = mdecl_wrapper.mdecl_wrapper_t(found) self._logger.debug(, len(mfound)) self._logger.debug( , (timeit.default_timer() - start_time)) if not mfound and not allow_empty: raise RuntimeError( "Multi declaration query returned 0 declarations.") return mfound
implementation details
381,271
def map(lst, serialize_func): if not isinstance(lst, list): return lst return [serialize_func(e) for e in lst]
Applies serialize_func to every element in lst
381,272
def create_identity(self, user, sp_mapping, **extra_config): return { out_attr: getattr(user, user_attr) for user_attr, out_attr in sp_mapping.items() if hasattr(user, user_attr) }
Generate an identity dictionary of the user based on the given mapping of desired user attributes by the SP
381,273
def flexifunction_directory_ack_encode(self, target_system, target_component, directory_type, start_index, count, result): return MAVLink_flexifunction_directory_ack_message(target_system, target_component, directory_type, start_index, count, result)
Acknowldge sucess or failure of a flexifunction command target_system : System ID (uint8_t) target_component : Component ID (uint8_t) directory_type : 0=inputs, 1=outputs (uint8_t) start_index : index of first directory entry to write (uint8_t) count : count of directory entries to write (uint8_t) result : result of acknowledge, 0=fail, 1=good (uint16_t)
381,274
def _convert_xml_to_queues(response): if response is None or response.body is None: return None queues = _list() list_element = ETree.fromstring(response.body) next_marker = list_element.findtext() or None setattr(queues, , next_marker) queues_element = list_element.find() for queue_element in queues_element.findall(): queue = Queue() queue.name = queue_element.findtext() metadata_root_element = queue_element.find() if metadata_root_element is not None: queue.metadata = dict() for metadata_element in metadata_root_element: queue.metadata[metadata_element.tag] = metadata_element.text queues.append(queue) return queues
<?xml version="1.0" encoding="utf-8"?> <EnumerationResults ServiceEndpoint="https://myaccount.queue.core.windows.net/"> <Prefix>string-value</Prefix> <Marker>string-value</Marker> <MaxResults>int-value</MaxResults> <Queues> <Queue> <Name>string-value</Name> <Metadata> <metadata-name>value</metadata-name> </Metadata> </Queue> <NextMarker /> </EnumerationResults>
381,275
def run_solr_on(solrInstance, category, id, fields): query = solrInstance.value + "select?q=*:*&fq=document_category:\"" + category.value + "\"&fq=id:\"" + id + "\"&fl=" + fields + "&wt=json&indent=on" response = requests.get(query) return response.json()[][][0]
Return the result of a solr query on the given solrInstance (Enum ESOLR), for a certain document_category (ESOLRDoc) and id
381,276
def _encode_json(obj): def _dump_obj(obj): if isinstance(obj, dict): return obj d = dict() for k in dir(obj): if not k.startswith(): d[k] = getattr(obj, k) return d return json.dumps(obj, default=_dump_obj)
Encode object as json str.
381,277
def warning(self, message, print_location=True): msg = "Warning" if print_location and (exshared.location != None): wline = lineno(exshared.location, exshared.text) wcol = col(exshared.location, exshared.text) wtext = line(exshared.location, exshared.text) msg += " at line %d, col %d" % (wline, wcol) msg += ": %s" % message if print_location and (exshared.location != None): msg += "\n%s" % wtext print(msg)
Displays warning message. Uses exshared for current location of parsing
381,278
def rand_crop(*args, padding_mode=, p:float=1.): "Randomized version of `crop_pad`." return crop_pad(*args, **rand_pos, padding_mode=padding_mode, p=p)
Randomized version of `crop_pad`.
381,279
def burst_run(self): get_and_call_next_op = self.get_and_call_next_op for __ in range(self.outer_burst_op_count): for __ in range(self.inner_burst_op_count): get_and_call_next_op() self.call_sync_callbacks()
Run CPU as fast as Python can...
381,280
def remove_header(self, name): if name in self.info_dict: self.info_dict.pop(name) logger.info("Removed from INFO".format(name)) if name in self.filter_dict: self.filter_dict.pop(name) logger.info("Removed from FILTER".format(name)) if name in self.format_dict: self.format_dict.pop(name) logger.info("Removed from FORMAT".format(name)) if name in self.contig_dict: self.contig_dict.pop(name) logger.info("Removed from CONTIG".format(name)) if name in self.alt_dict: self.alt_dict.pop(name) logger.info("Removed from ALT".format(name)) if name in self.other_dict: self.other_dict.pop(name) logger.info("Removed from OTHER".format(name)) return
Remove a field from the header
381,281
def name(self): if in self.session: return self.session[] elif in self.session: return self.session[] elif in self.session: return self.session[] else: return
Name returns user's name or user's email or user_id :return: best guess of name to use to greet user
381,282
def start(self): self.running = True while self.running: self.update_prompt() try: self.cmdloop() except KeyboardInterrupt: print() except botocore.exceptions.BotoCoreError as e: print(e) except ParseException as e: print(self.engine.pformat_exc(e)) except Exception: traceback.print_exc() self.engine.reset()
Start running the interactive session (blocking)
381,283
def _maybe_replace_path(self, match): path = match.group(0) if self._should_replace(path): return self._replace_path(path) else: return path
Regex replacement method that will sub paths when needed
381,284
def _collect_output(self, process, result, writer=None, stdin=None): stderr = codecs.getreader(self._encoding)(process.stderr) rr = threading.Thread(target=self._read_response, args=(stderr, result)) rr.setDaemon(True) log.debug(, rr) rr.start() stdout = process.stdout dr = threading.Thread(target=self._read_data, args=(stdout, result)) dr.setDaemon(True) log.debug(, dr) dr.start() dr.join() rr.join() if writer is not None: writer.join() process.wait() if stdin is not None: try: stdin.close() except IOError: pass stderr.close() stdout.close()
Drain the subprocesses output streams, writing the collected output to the result. If a writer thread (writing to the subprocess) is given, make sure it's joined before returning. If a stdin stream is given, close it before returning.
381,285
def expand_path(s): if os.name==: s = s.replace(, ) s = os.path.expandvars(os.path.expanduser(s)) if os.name==: s = s.replace(, ) return s
Expand $VARS and ~names in a string, like a shell :Examples: In [2]: os.environ['FOO']='test' In [3]: expand_path('variable FOO is $FOO') Out[3]: 'variable FOO is test'
381,286
def list_hosts_by_cluster(kwargs=None, call=None): if call != : raise SaltCloudSystemExit( ) ret = {} cluster_name = kwargs.get() if kwargs and in kwargs else None cluster_properties = ["name"] cluster_list = salt.utils.vmware.get_mors_with_properties(_get_si(), vim.ClusterComputeResource, cluster_properties) for cluster in cluster_list: ret[cluster[]] = [] for host in cluster[].host: if isinstance(host, vim.HostSystem): ret[cluster[]].append(host.name) if cluster_name and cluster_name == cluster[]: return {: {cluster_name: ret[cluster_name]}} return {: ret}
List hosts for each cluster; or hosts for a specified cluster in this VMware environment To list hosts for each cluster: CLI Example: .. code-block:: bash salt-cloud -f list_hosts_by_cluster my-vmware-config To list hosts for a specified cluster: CLI Example: .. code-block:: bash salt-cloud -f list_hosts_by_cluster my-vmware-config cluster="clusterName"
381,287
def start(self, on_done): genesis_file = os.path.join(self._data_dir, ) try: with open(genesis_file, ) as batch_file: genesis_data = genesis_pb2.GenesisData() genesis_data.ParseFromString(batch_file.read()) LOGGER.info(, genesis_file) except IOError: raise InvalidGenesisStateError( "Genesis File {} specified, but unreadable".format( genesis_file)) initial_state_root = self._context_manager.get_first_root() genesis_batches = [batch for batch in genesis_data.batches] if genesis_batches: scheduler = SerialScheduler( self._context_manager.get_squash_handler(), initial_state_root, always_persist=True) LOGGER.debug(, len(genesis_data.batches)) for batch in genesis_data.batches: scheduler.add_batch(batch) self._transaction_executor.execute(scheduler) scheduler.finalize() scheduler.complete(block=True) txn_receipts = [] state_hash = initial_state_root for batch in genesis_batches: result = scheduler.get_batch_execution_result( batch.header_signature) if result is None or not result.is_valid: raise InvalidGenesisStateError( .format(batch.header_signature)) if result.state_hash is not None: state_hash = result.state_hash txn_results = scheduler.get_transaction_execution_results( batch.header_signature) txn_receipts += self._make_receipts(txn_results) settings_view = SettingsView( self._state_view_factory.create_view(state_hash)) name = settings_view.get_setting() version = settings_view.get_setting( ) if name is None or version is None: raise LocalConfigurationError( ) LOGGER.debug(, state_hash) block_builder = self._generate_genesis_block() block_builder.add_batches(genesis_batches) block_builder.set_state_hash(state_hash) block_publisher = self._get_block_publisher(initial_state_root) if not block_publisher.initialize_block(block_builder.block_header): LOGGER.error() raise InvalidGenesisConsensusError( ) if not block_publisher.finalize_block(block_builder.block_header): LOGGER.error() raise InvalidGenesisConsensusError( ) self._sign_block(block_builder) block = block_builder.build_block() blkw = BlockWrapper(block=block) LOGGER.info(, blkw) self._block_manager.put([blkw.block]) self._block_manager.persist(blkw.identifier, "commit_store") self._txn_receipt_store.chain_update(block, txn_receipts) self._chain_id_manager.save_block_chain_id(block.header_signature) LOGGER.debug() os.remove(genesis_file) if on_done is not None: on_done()
Starts the genesis block creation process. Will call the given `on_done` callback on successful completion. Args: on_done (function): a function called on completion Raises: InvalidGenesisStateError: raises this error if a genesis block is unable to be produced, or the resulting block-chain-id saved.
381,288
def _path_hash(path, transform, kwargs): sortedargs = ["%s:%r:%s" % (key, value, type(value)) for key, value in sorted(iteritems(kwargs))] srcinfo = "{path}:{transform}:{{{kwargs}}}".format(path=os.path.abspath(path), transform=transform, kwargs=",".join(sortedargs)) return digest_string(srcinfo)
Generate a hash of source file path + transform + args
381,289
def fit(self, features, classes): self.ensemble.fit(features, classes) unique_rows = list(set([tuple(row) for row in features])) for row in unique_rows: self.feature_map[row] = self.ensemble.predict([row])[0]
Constructs the MDR ensemble from the provided training data Parameters ---------- features: array-like {n_samples, n_features} Feature matrix classes: array-like {n_samples} List of class labels for prediction Returns ------- None
381,290
def connect(receiver, signal=Any, sender=Any, weak=True): if signal is None: raise errors.DispatcherTypeError( %( receiver,sender) ) if weak: receiver = saferef.safeRef(receiver, onDelete=_removeReceiver) senderkey = id(sender) if senderkey in connections: signals = connections[senderkey] else: connections[senderkey] = signals = {} if sender not in (None, Anonymous, Any): def remove(object, senderkey=senderkey): _removeSender(senderkey=senderkey) try: weakSender = weakref.ref(sender, remove) senders[senderkey] = weakSender except: pass receiverID = id(receiver) if signal in signals: receivers = signals[signal] _removeOldBackRefs(senderkey, signal, receiver, receivers) else: receivers = signals[signal] = [] try: current = sendersBack.get( receiverID ) if current is None: sendersBack[ receiverID ] = current = [] if senderkey not in current: current.append(senderkey) except: pass receivers.append(receiver)
Connect receiver to sender for signal receiver -- a callable Python object which is to receive messages/signals/events. Receivers must be hashable objects. if weak is True, then receiver must be weak-referencable (more precisely saferef.safeRef() must be able to create a reference to the receiver). Receivers are fairly flexible in their specification, as the machinery in the robustApply module takes care of most of the details regarding figuring out appropriate subsets of the sent arguments to apply to a given receiver. Note: if receiver is itself a weak reference (a callable), it will be de-referenced by the system's machinery, so *generally* weak references are not suitable as receivers, though some use might be found for the facility whereby a higher-level library passes in pre-weakrefed receiver references. signal -- the signal to which the receiver should respond if Any, receiver will receive any signal from the indicated sender (which might also be Any, but is not necessarily Any). Otherwise must be a hashable Python object other than None (DispatcherError raised on None). sender -- the sender to which the receiver should respond if Any, receiver will receive the indicated signals from any sender. if Anonymous, receiver will only receive indicated signals from send/sendExact which do not specify a sender, or specify Anonymous explicitly as the sender. Otherwise can be any python object. weak -- whether to use weak references to the receiver By default, the module will attempt to use weak references to the receiver objects. If this parameter is false, then strong references will be used. returns None, may raise DispatcherTypeError
381,291
def reuse_variables(method): initialized_variable_scopes_eager = set() initialized_variable_scopes_graph = weakref.WeakKeyDictionary() arg_spec = inspect.getargspec(method) is_method = arg_spec.args and arg_spec.args[0] == "self" if not is_method: raise TypeError("reuse_variables can only be used with methods.") @wrapt.decorator def eager_test(method, obj, args, kwargs): if obj is None: obj = args[0] if tf.executing_eagerly() and not hasattr(obj, "_template"): raise ValueError( "reuse_variables is not supported in eager mode except in Sonnet " "modules.") return method(*args, **kwargs) @wrapt.decorator def call_method(method, obj, args, kwargs): if obj is None: obj = args[0] def default_context_manager(reuse=None): variable_scope = obj.variable_scope return tf.variable_scope(variable_scope, reuse=reuse) variable_scope_context_manager = getattr(obj, "_enter_variable_scope", default_context_manager) with tf.init_scope(): obj._is_connected = True if not tf.executing_eagerly(): obj._add_connected_subgraph( method, out_ops, scope, args, kwargs) except AttributeError: pass return out_ops return eager_test(call_method(method))
Wraps an arbitrary method so it does variable sharing. This decorator creates variables the first time it calls `method`, and reuses them for subsequent calls. The object that calls `method` provides a `tf.VariableScope`, either as a `variable_scope` attribute or as the return value of an `_enter_variable_scope()` method. The first time the wrapped method is invoked, it enters the caller's `tf.VariableScope` with `reuse=False`. On all subsequent calls it enters the same variable scope with `reuse=True`. Variables are created in the context of the `tf.VariableScope` provided by the caller object. Ops are created with an additional `tf.name_scope()`, which adds a scope for the wrapped method name. For example: ```python class MyClass(object): def __init__(self, name): with tf.variable_scope(None, default_name=name) as variable_scope: self.variable_scope = variable_scope @snt.reuse_variables def add_x(self, tensor): x = tf.get_variable("x", shape=tensor.get_shape()) return tensor + x module = MyClass("my_module_name") input_tensor = tf.zeros(shape=(5,)) # This creates the variable "my_module_name/x" # and op "my_module_name/add_x/add" output = module.add_x(input_tensor) ``` For performance when executing eagerly it may be desirable to additionally annotate these methods using `defun`, such that they are encapsulated as graph functions. This is not recommended if your method returns a variable since the output of `defun` would be an op that returned the variable's value when evaluated (rather than the variable instance). ```python class FooModule(snt.AbstractModule): def _build(self, inputs): return complex_math(inputs) @tfe.defun @snt.reuse_variables def more_complex_stuff(self, inputs): return more_complex_math(inputs) ``` Args: method: The method to wrap. Returns: The wrapped method.
381,292
def add_callback(self, func): if func is None: return func_list = to_list(func) if not hasattr(self, ): self.callback_list = func_list else: self.callback_list.extend(func_list)
Registers a call back function
381,293
def _make_stream_reader(cls, stream): endian = cls._detect_endian(stream) return StreamReader(stream, endian)
Return a |StreamReader| instance with wrapping *stream* and having "endian-ness" determined by the 'MM' or 'II' indicator in the TIFF stream header.
381,294
def _process_validation_function_s(validation_func, auto_and_wrapper=True ): if validation_func is None: raise ValueError() elif not isinstance(validation_func, list): validation_func = [validation_func] elif len(validation_func) == 0: raise ValueError() final_list = [] for v in validation_func: v = as_function(v) if isinstance(v, tuple): if len(v) == 2: if isinstance(v[1], str): final_list.append(_failure_raiser(v[0], help_msg=v[1])) elif isinstance(v[1], type) and issubclass(v[1], WrappingFailure): final_list.append(_failure_raiser(v[0], failure_type=v[1])) else: raise TypeError( .format(supported_syntax, str(v))) else: raise TypeError( .format(supported_syntax, str(v))) elif callable(v): final_list.append(v) elif isinstance(v, list): final_list.append(and_(*v)) else: raise TypeError( .format(supported_syntax, str(v))) if auto_and_wrapper: return and_(*final_list) else: return final_list
This function handles the various ways that users may enter 'validation functions', so as to output a single callable method. Setting "auto_and_wrapper" to False allows callers to get a list of callables instead. valid8 supports the following expressions for 'validation functions' * <ValidationFunc> * List[<ValidationFunc>(s)]. The list must not be empty. <ValidationFunc> may either be * a callable or a mini-lambda expression (instance of LambdaExpression - in which case it is automatically 'closed'). * a Tuple[callable or mini-lambda expression ; failure_type]. Where failure type should be a subclass of valid8.Failure. In which case the tuple will be replaced with a _failure_raiser(callable, failure_type) When the contents provided does not match the above, this function raises a ValueError. Otherwise it produces a list of callables, that will typically be turned into a `and_` in the nominal case except if this is called inside `or_` or `xor_`. :param validation_func: the base validation function or list of base validation functions to use. A callable, a tuple(callable, help_msg_str), a tuple(callable, failure_type), or a list of several such elements. Nested lists are supported and indicate an implicit `and_`. Tuples indicate an implicit `_failure_raiser`. [mini_lambda](https://smarie.github.io/python-mini-lambda/) expressions can be used instead of callables, they will be transformed to functions automatically. :param auto_and_wrapper: if True (default), this function returns a single callable that is a and_() of all functions. Otherwise a list is returned. :return:
381,295
def numberOfDistalSegments(self, cells=None): if cells is None: cells = xrange(self.numberOfCells()) n = 0 for cell in cells: if self.internalDistalPermanences.nNonZerosOnRow(cell) > 0: n += 1 for permanences in self.distalPermanences: if permanences.nNonZerosOnRow(cell) > 0: n += 1 return n
Returns the total number of distal segments for these cells. A segment "exists" if its row in the matrix has any permanence values > 0. Parameters: ---------------------------- @param cells (iterable) Indices of the cells
381,296
def post(method, hmc, uri, uri_parms, body, logon_required, wait_for_completion): assert wait_for_completion is True cpc_oid = uri_parms[0] try: cpc = hmc.cpcs.lookup_by_oid(cpc_oid) except KeyError: raise InvalidResourceError(method, uri) check_required_fields(method, uri, body, []) power_capping_state = body[] power_cap_current = body.get(, None) if power_capping_state not in [, , ]: raise BadRequestError(method, uri, reason=7, message="Invalid power-capping-state value: " "%r" % power_capping_state) if power_capping_state == and power_cap_current is None: raise BadRequestError(method, uri, reason=7, message="Power-cap-current must be provided " "when enabling power capping") cpc.properties[] = power_capping_state cpc.properties[] = power_cap_current cpc.properties[] = power_capping_state cpc.properties[] = power_cap_current
Operation: Set CPC Power Capping (any CPC mode).
381,297
def handle_event(self, packet): if packet.get(): task = self.send_command_ack(packet[], packet[]) self.loop.create_task(task)
Handle incoming packet from rflink gateway.
381,298
def form_sent(request, slug, template="forms/form_sent.html"): published = Form.objects.published(for_user=request.user) context = {"form": get_object_or_404(published, slug=slug)} return render_to_response(template, context, RequestContext(request))
Show the response message.
381,299
def trigger_methods(instance, args): for name in sorted(args): value = args[name] target = instance if name.startswith() or name.startswith(): name = name.replace(, ).replace(, ) if hasattr(instance, ): target = instance._response member = getattr(target, name, None) isattr = name in dir(target) iscallable = ismethod(member) and not isfunction(member) if not iscallable and not isattr: raise PookInvalidArgument(.format(name)) if iscallable: member(value) else: setattr(target, name, value)
Triggers specific class methods using a simple reflection mechanism based on the given input dictionary params. Arguments: instance (object): target instance to dynamically trigger methods. args (iterable): input arguments to trigger objects to Returns: None