Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
9,600
def polygonize(layer): output_layer_name = polygonize_steps[] output_layer_name = output_layer_name % layer.keywords[] gdal_layer_name = polygonize_steps[] if layer.keywords.get() == : output_field = exposure_type_field else: output_field = hazard_value_field input_raster = gdal.Open(layer.source(), gdal.GA_ReadOnly) srs = osr.SpatialReference() srs.ImportFromWkt(input_raster.GetProjectionRef()) temporary_dir = temp_dir(sub_dir=) out_shapefile = unique_filename( suffix= % output_layer_name, dir=temporary_dir) driver = ogr.GetDriverByName("ESRI Shapefile") destination = driver.CreateDataSource(out_shapefile) output_layer = destination.CreateLayer(gdal_layer_name, srs) field_name = output_field[][0:10] fd = ogr.FieldDefn(field_name, ogr.OFTInteger) output_layer.CreateField(fd) active_band = layer.keywords.get(, 1) input_band = input_raster.GetRasterBand(active_band) gdal.Polygonize(input_band, None, output_layer, 0, [], callback=None) destination.Destroy() vector_layer = QgsVectorLayer(out_shapefile, output_layer_name, ) } check_layer(vector_layer) return vector_layer
Polygonize a raster layer into a vector layer using GDAL. Issue https://github.com/inasafe/inasafe/issues/3183 :param layer: The layer to reproject. :type layer: QgsRasterLayer :return: Reprojected memory layer. :rtype: QgsRasterLayer .. versionadded:: 4.0
9,601
def _clamp_value(value, minimum, maximum): if maximum < minimum: raise ValueError if value < minimum: return minimum elif value > maximum: return maximum else: return value
Clamp a value to fit between a minimum and a maximum. * If ``value`` is between ``minimum`` and ``maximum``, return ``value`` * If ``value`` is below ``minimum``, return ``minimum`` * If ``value is above ``maximum``, return ``maximum`` Args: value (float or int): The number to clamp minimum (float or int): The lowest allowed return value maximum (float or int): The highest allowed return value Returns: float or int: the clamped value Raises: ValueError: if maximum < minimum Example: >>> _clamp_value(3, 5, 10) 5 >>> _clamp_value(11, 5, 10) 10 >>> _clamp_value(8, 5, 10) 8
9,602
def task_collection_thread_handler(self, results_queue): while self.tasks_to_add and not self.errors: max_tasks = self._max_tasks_per_request chunk_tasks_to_add = [] with self._pending_queue_lock: while len(chunk_tasks_to_add) < max_tasks and self.tasks_to_add: chunk_tasks_to_add.append(self.tasks_to_add.pop()) if chunk_tasks_to_add: self._bulk_add_tasks(results_queue, chunk_tasks_to_add)
Main method for worker to run Pops a chunk of tasks off the collection of pending tasks to be added and submits them to be added. :param collections.deque results_queue: Queue for worker to output results to
9,603
def read_shakemap(self, haz_sitecol, assetcol): oq = self.oqparam E = oq.number_of_ground_motion_fields oq.risk_imtls = oq.imtls or self.datastore.parent[].imtls extra = self.riskmodel.get_extra_imts(oq.risk_imtls) if extra: logging.warning( % extra) logging.info() with self.monitor(): smap = oq.shakemap_id if oq.shakemap_id else numpy.load( oq.inputs[]) sitecol, shakemap, discarded = get_sitecol_shakemap( smap, oq.imtls, haz_sitecol, oq.asset_hazard_distance[], oq.discard_assets) if len(discarded): self.datastore[] = discarded assetcol = assetcol.reduce_also(sitecol) logging.info() with self.monitor(): imts, gmfs = to_gmfs( shakemap, oq.spatial_correlation, oq.cross_correlation, oq.site_effects, oq.truncation_level, E, oq.random_seed, oq.imtls) save_gmf_data(self.datastore, sitecol, gmfs, imts) return sitecol, assetcol
Enabled only if there is a shakemap_id parameter in the job.ini. Download, unzip, parse USGS shakemap files and build a corresponding set of GMFs which are then filtered with the hazard site collection and stored in the datastore.
9,604
def convert_out(self, obj): newobj = super(ProcessedImageProduct, self).convert_out(obj) if newobj: hdulist = newobj.open() hdr = hdulist[0].header if not in hdr: hdr[] = str(uuid.uuid1()) return newobj
Write EMIRUUID header on reduction
9,605
def delete(self): if not self._created: return try: node1 = self._nodes[0]["node"] adapter_number1 = self._nodes[0]["adapter_number"] port_number1 = self._nodes[0]["port_number"] except IndexError: return try: yield from node1.delete("/adapters/{adapter_number}/ports/{port_number}/nio".format(adapter_number=adapter_number1, port_number=port_number1), timeout=120) except aiohttp.web.HTTPNotFound: pass try: node2 = self._nodes[1]["node"] adapter_number2 = self._nodes[1]["adapter_number"] port_number2 = self._nodes[1]["port_number"] except IndexError: return try: yield from node2.delete("/adapters/{adapter_number}/ports/{port_number}/nio".format(adapter_number=adapter_number2, port_number=port_number2), timeout=120) except aiohttp.web.HTTPNotFound: pass yield from super().delete()
Delete the link and free the resources
9,606
def _action_allowed(self, action): if getattr(self.layer.participation_settings, .format(action)) is False: return False else: return getattr(self.participation_settings, .format(action))
participation actions can be disabled on layer level, or disabled on a per node basis
9,607
def _expr2bddnode(expr): if expr.is_zero(): return BDDNODEZERO elif expr.is_one(): return BDDNODEONE else: top = expr.top _ = bddvar(top.names, top.indices) root = top.uniqid lo = _expr2bddnode(expr.restrict({top: 0})) hi = _expr2bddnode(expr.restrict({top: 1})) return _bddnode(root, lo, hi)
Convert an expression into a BDD node.
9,608
def get_select_sql(self): return .format( self.field.get_select_sql(), self.name.upper(), self.get_field_identifier(), self.get_over(), )
Calculate the difference between this record's value and the lag/lead record's value
9,609
def tiered_alignment(in_bam, tier_num, multi_mappers, extra_args, genome_build, pair_stats, work_dir, dirs, config): nomap_fq1, nomap_fq2 = select_unaligned_read_pairs(in_bam, "tier{}".format(tier_num), work_dir, config) if nomap_fq1 is not None: base_name = "{}-tier{}out".format(os.path.splitext(os.path.basename(in_bam))[0], tier_num) config = copy.deepcopy(config) dirs = copy.deepcopy(dirs) config["algorithm"]["bam_sort"] = "queryname" config["algorithm"]["multiple_mappers"] = multi_mappers config["algorithm"]["extra_align_args"] = ["-i", int(pair_stats["mean"]), int(pair_stats["std"])] + extra_args out_bam, ref_file = align_to_sort_bam(nomap_fq1, nomap_fq2, lane.rg_names(base_name, base_name, config), genome_build, "novoalign", dirs, config, dir_ext=os.path.join("hydra", os.path.split(nomap_fq1)[0])) return out_bam else: return None
Perform the alignment of non-mapped reads from previous tier.
9,610
def put(self, storagemodel:object, modeldefinition = None) -> StorageQueueModel: try: message = modeldefinition[].put_message(storagemodel._queuename, storagemodel.getmessage()) storagemodel.mergemessage(message) except Exception as e: storagemodel = None msg = .format(storagemodel._queuename, storagemodel.content, e) raise AzureStorageWrapException(msg=msg) finally: return storagemodel
insert queue message into storage
9,611
def filter_accept_reftrack(self, reftrack): if reftrack.status() in self._forbidden_status: return False if reftrack.get_typ() in self._forbidden_types: return False if reftrack.uptodate() in self._forbidden_uptodate: return False if reftrack.alien() in self._forbidden_alien: return False return True
Return True, if the filter accepts the given reftrack :param reftrack: the reftrack to filter :type reftrack: :class:`jukeboxcore.reftrack.Reftrack` :returns: True, if the filter accepts the reftrack :rtype: :class:`bool` :raises: None
9,612
def p_for_sentence_start(p): gl.LOOPS.append((, p[2])) p[0] = None if p[4] is None or p[6] is None or p[7] is None: return if is_number(p[4], p[6], p[7]): if p[4].value != p[6].value and p[7].value == 0: warning(p.lineno(5), ) if p[4].value > p[6].value and p[7].value > 0: warning(p.lineno(5), ) if p[4].value < p[6].value and p[7].value < 0: warning(p.lineno(2), ) id_type = common_type(common_type(p[4], p[6]), p[7]) variable = SYMBOL_TABLE.access_var(p[2], p.lineno(2), default_type=id_type) if variable is None: return variable.accessed = True expr1 = make_typecast(variable.type_, p[4], p.lineno(3)) expr2 = make_typecast(variable.type_, p[6], p.lineno(5)) expr3 = make_typecast(variable.type_, p[7], p.lexer.lineno) p[0] = make_sentence(, variable, expr1, expr2, expr3)
for_start : FOR ID EQ expr TO expr step
9,613
def get_name_type_dict(self): attrs = self.get_attributes() types = self.get_types() d = dict() for i,a in enumerate(attrs): d[a] = types[i] return d
Returns a dictionary of the type {'column_name': data_type, ...} :return: dict
9,614
def _make_txn_selector(self): if self._transaction_id is not None: return TransactionSelector(id=self._transaction_id) if self._read_timestamp: key = "read_timestamp" value = _datetime_to_pb_timestamp(self._read_timestamp) elif self._min_read_timestamp: key = "min_read_timestamp" value = _datetime_to_pb_timestamp(self._min_read_timestamp) elif self._max_staleness: key = "max_staleness" value = _timedelta_to_duration_pb(self._max_staleness) elif self._exact_staleness: key = "exact_staleness" value = _timedelta_to_duration_pb(self._exact_staleness) else: key = "strong" value = True options = TransactionOptions( read_only=TransactionOptions.ReadOnly(**{key: value}) ) if self._multi_use: return TransactionSelector(begin=options) else: return TransactionSelector(single_use=options)
Helper for :meth:`read`.
9,615
async def remove(self, *instances, using_db=None) -> None: db = using_db if using_db else self.model._meta.db if not instances: raise OperationalError("remove() called on no instances") through_table = Table(self.field.through) if len(instances) == 1: condition = (getattr(through_table, self.field.forward_key) == instances[0].id) & ( getattr(through_table, self.field.backward_key) == self.instance.id ) else: condition = (getattr(through_table, self.field.backward_key) == self.instance.id) & ( getattr(through_table, self.field.forward_key).isin([i.id for i in instances]) ) query = db.query_class.from_(through_table).where(condition).delete() await db.execute_query(str(query))
Removes one or more of ``instances`` from the relation.
9,616
def list_engines_by_priority(engines=None): if engines is None: engines = ENGINES return sorted(engines, key=operator.methodcaller("priority"))
Return a list of engines supported sorted by each priority.
9,617
def platform_cache_dir(): if WIN32: dpath_ = elif LINUX: dpath_ = elif DARWIN: dpath_ = else: raise NotImplementedError( % (sys.platform,)) dpath = normpath(expanduser(dpath_)) return dpath
Returns a directory which should be writable for any application This should be used for temporary deletable data.
9,618
def _spinboxValueChanged(self, index, spinBox=None): if spinBox is None: spinBox = self.sender() assert spinBox, "spinBox not defined and not the sender" logger.debug("{} sigContentsChanged signal (spinBox)" .format("Blocked" if self.signalsBlocked() else "Emitting")) self.sigContentsChanged.emit(UpdateReason.COLLECTOR_SPIN_BOX)
Is called when a spin box value was changed. Updates the spin boxes and sets other combo boxes having the same index to the fake dimension of length 1.
9,619
def creep_data(data_set=): if not data_available(data_set): download_data(data_set) path = os.path.join(data_path, data_set) tar_file = os.path.join(path, ) tar = tarfile.open(tar_file) print() tar.extractall(path=path) tar.close() all_data = np.loadtxt(os.path.join(data_path, data_set, )) y = all_data[:, 1:2].copy() features = [0] features.extend(range(2, 31)) X = all_data[:, features].copy() return data_details_return({: X, : y}, data_set)
Brun and Yoshida's metal creep rupture data.
9,620
def consume_token(self, tokens, index, tokens_len): del tokens_len consumption_ended = False q_type = self.quote_type begin_literal_type = getattr(TokenType, "Begin{0}QuotedLiteral".format(q_type)) end_literal_type = getattr(TokenType, "End{0}QuotedLiteral".format(q_type)) if (index != self.begin and tokens[index].type == begin_literal_type): assert _RE_QUOTE_TYPE.match(tokens[index].content[0]) line_tokens = _scan_for_tokens(tokens[index].content[1:]) end_type = getattr(TokenType, "End{0}QuotedLiteral".format(q_type)) replacement = [Token(type=end_type, content=tokens[index].content[0], line=tokens[index].line, col=tokens[index].col)] for after in line_tokens: replacement.append(Token(type=after.type, content=after.content, line=(tokens[index].line + after.line - 1), col=(tokens[index].col + after.col - 1))) tokens = _replace_token_range(tokens, index, index + 1, replacement) consumption_ended = True if tokens[index].type == end_literal_type: consumption_ended = True if consumption_ended: end = index + 1 pasted = "" for i in range(self.begin, end): pasted += tokens[i].content tokens = _replace_token_range(tokens, self.begin, end, [Token(type=TokenType.QuotedLiteral, content=pasted, line=tokens[self.begin].line, col=tokens[self.begin].col)]) return (self.begin, len(tokens), tokens)
Consume a token. Returns tuple of (tokens, tokens_len, index) when consumption is completed and tokens have been merged together.
9,621
def train(self): self.stamp_start = time.time() for iteration, batch in tqdm.tqdm(enumerate(self.iter_train), desc=, total=self.max_iter, ncols=80): self.epoch = self.iter_train.epoch self.iteration = iteration if self.interval_validate and \ self.iteration % self.interval_validate == 0: self.validate() batch = map(datasets.transform_lsvrc2012_vgg16, batch) in_vars = utils.batch_to_vars(batch, device=self.device) self.model.zerograds() loss = self.model(*in_vars) if loss is not None: loss.backward() self.optimizer.update() lbl_true = zip(*batch)[1] lbl_pred = chainer.functions.argmax(self.model.score, axis=1) lbl_pred = chainer.cuda.to_cpu(lbl_pred.data) acc = utils.label_accuracy_score( lbl_true, lbl_pred, self.model.n_class) self._write_log(**{ : self.epoch, : self.iteration, : time.time() - self.stamp_start, : float(loss.data), : acc[0], : acc[1], : acc[2], : acc[3], }) if iteration >= self.max_iter: self._save_model() break
Train the network using the training dataset. Parameters ---------- None Returns ------- None
9,622
def evalrepr(self): if self.is_model(): return self.get_fullname() else: return self.parent.evalrepr + "." + self.name
Evaluable repr
9,623
def _put_key(file_path, dest_key=None, overwrite=True): ret = api("POST /3/PutKey?destination_key={}&overwrite={}".format(dest_key if dest_key else , overwrite), filename=file_path) return ret["destination_key"]
Upload given file into DKV and save it under give key as raw object. :param dest_key: name of destination key in DKV :param file_path: path to file to upload :return: key name if object was uploaded successfully
9,624
def copy(self): copied = Trimesh() copied._data.data = copy.deepcopy(self._data.data) copied.visual = self.visual.copy() copied.metadata = copy.deepcopy(self.metadata) if self._center_mass is not None: copied.center_mass = self.center_mass copied._density = self._density copied._cache.clear() return copied
Safely get a copy of the current mesh. Copied objects will have emptied caches to avoid memory issues and so may be slow on initial operations until caches are regenerated. Current object will *not* have its cache cleared. Returns --------- copied : trimesh.Trimesh Copy of current mesh
9,625
def update(self): der = self.subpars self(numpy.clip(1. - der.c1 - der.c3, 0., 1.))
Update |C2| based on :math:`c_2 = 1.-c_1-c_3`. Examples: The following examples show the calculated value of |C2| are clipped when to low or to high: >>> from hydpy.models.hstream import * >>> parameterstep('1d') >>> derived.c1 = 0.6 >>> derived.c3 = 0.1 >>> derived.c2.update() >>> derived.c2 c2(0.3) >>> derived.c1 = 1.6 >>> derived.c2.update() >>> derived.c2 c2(0.0) >>> derived.c1 = -1.6 >>> derived.c2.update() >>> derived.c2 c2(1.0)
9,626
def info(): try: platform_info = { : platform.system(), : platform.release(), } except IOError: platform_info = { : , : , } implementation_info = _implementation() urllib3_info = {: urllib3.__version__} chardet_info = {: chardet.__version__} pyopenssl_info = { : None, : , } if OpenSSL: pyopenssl_info = { : OpenSSL.__version__, : % OpenSSL.SSL.OPENSSL_VERSION_NUMBER, } cryptography_info = { : getattr(cryptography, , ), } idna_info = { : getattr(idna, , ), } system_ssl = ssl.OPENSSL_VERSION_NUMBER system_ssl_info = { : % system_ssl if system_ssl is not None else } return { : platform_info, : implementation_info, : system_ssl_info, : pyopenssl is not None, : pyopenssl_info, : urllib3_info, : chardet_info, : cryptography_info, : idna_info, : { : requests_version, }, }
Generate information for a bug report.
9,627
def alias_composition(self, composition_id, alias_id): self._alias_id(primary_id=composition_id, equivalent_id=alias_id)
Adds an ``Id`` to a ``Composition`` for the purpose of creating compatibility. The primary ``Id`` of the ``Composition`` is determined by the provider. The new ``Id`` is an alias to the primary ``Id``. If the alias is a pointer to another composition, it is reassigned to the given composition ``Id``. arg: composition_id (osid.id.Id): the ``Id`` of a ``Composition`` arg: alias_id (osid.id.Id): the alias ``Id`` raise: AlreadyExists - ``alias_id`` is in use as a primary ``Id`` raise: NotFound - ``composition_id`` not found raise: NullArgument - ``composition_id`` or ``alias_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.*
9,628
def parse_xml_node(self, node): self.sequence = int(node.getAttributeNS(RTS_NS, )) c = node.getElementsByTagNameNS(RTS_NS, ) if c.length != 1: raise InvalidParticipantNodeError self.target_component = TargetExecutionContext().parse_xml_node(c[0]) for c in get_direct_child_elements_xml(node, prefix=RTS_EXT_NS, local_name=): name, value = parse_properties_xml(c) self._properties[name] = value return self
Parse an xml.dom Node object representing a condition into this object.
9,629
def average(numbers, numtype=): if type == : return Decimal(sum(numbers)) / len(numbers) else: return float(sum(numbers)) / len(numbers)
Calculates the average or mean of a list of numbers Args: numbers: a list of integers or floating point numbers. numtype: string, 'decimal' or 'float'; the type of number to return. Returns: The average (mean) of the numbers as a floating point number or a Decimal object. Requires: The math module
9,630
def find(self, name): collectors = self.get_collectors() for collector in collectors: if name.lower() == collector[].lower(): self.collector_id = collector[] return collector return {: }
Returns a dict of collector's details if found. Args: name (str): name of collector searching for
9,631
def run_command(self, codeobj): try: value, stdout = yield from self.attempt_exec(codeobj, self.namespace) except Exception: yield from self.send_exception() return else: yield from self.send_output(value, stdout)
Execute a compiled code object, and write the output back to the client.
9,632
def clients(self, protocol=None, groups=None): groups = self.__group_replace_eval_by_genuine__(groups) groups = self.check_parameters_for_validity(groups, "group", self.client_types()) q = self.query(Client) if groups: q = q.filter(Client.stype.in_(groups)) q = q.order_by(Client.id) return list(q)
Returns a list of :py:class:`.Client` for the specific query by the user. Keyword Parameters: protocol Ignored. groups The groups (types) to which the clients belong either from ('Genuine', 'Impostor') Note that 'eval' is an alias for 'Genuine'. If no groups are specified, then both clients are impostors are listed. Returns: A list containing all the clients which have the given properties.
9,633
def play_sync(self): self.play() logger.info("Playing synchronously") try: time.sleep(0.05) logger.debug("Wait for playing to start") while self.is_playing(): time.sleep(0.05) except DBusException: logger.error( "Cannot play synchronously any longer as DBus calls timed out." )
Play the video and block whilst the video is playing
9,634
def err_exit(msg, rc=1): print(msg, file=sys.stderr) sys.exit(rc)
Print msg to stderr and exit with rc.
9,635
def picknthweekday(year, month, dayofweek, hour, minute, whichweek): first = datetime.datetime(year, month, 1, hour, minute) weekdayone = first.replace(day=((dayofweek - first.isoweekday()) % 7) + 1) wd = weekdayone + ((whichweek - 1) * ONEWEEK) if (wd.month != month): wd -= ONEWEEK return wd
dayofweek == 0 means Sunday, whichweek 5 means last instance
9,636
def most_visited_venues_card(num=10): if spectator_apps.is_enabled(): object_list = most_visited_venues(num=num) object_list = chartify(object_list, , cutoff=1) return { : , : , : object_list, }
Displays a card showing the Venues that have the most Events. In spectator_core tags, rather than spectator_events so it can still be used on core pages, even if spectator_events isn't installed.
9,637
def items(self) -> Iterable[Tuple[str, Any]]: return [(opt.name, opt.value()) for name, opt in self._options.items()]
An iterable of (name, value) pairs. .. versionadded:: 3.1
9,638
def restore(self, state): selector = DataStreamSelector.FromString(state.get(u)) if selector != self.selector: raise ArgumentError("Attempted to restore a BufferedStreamWalker with a different selector", selector=self.selector, serialized_data=state) self.seek(state.get(u), target="offset")
Restore a previous state of this stream walker. Raises: ArgumentError: If the state refers to a different selector or the offset is invalid.
9,639
def get_default_config(self): config = super(NumaCollector, self).get_default_config() config.update({ : , : self.find_binary(), }) return config
Returns the default collector settings
9,640
def _set_mode(self, discover_mode, connect_mode): payload = struct.pack("<BB", discover_mode, connect_mode) response = self._send_command(6, 1, payload) result, = unpack("<H", response.payload) if result != 0: return False, {: , : result} return True, None
Set the mode of the BLED112, used to enable and disable advertising To enable advertising, use 4, 2. To disable advertising use 0, 0. Args: discover_mode (int): The discoverability mode, 0 for off, 4 for on (user data) connect_mode (int): The connectability mode, 0 for of, 2 for undirected connectable
9,641
def iter_package_families(paths=None): for path in (paths or config.packages_path): repo = package_repository_manager.get_repository(path) for resource in repo.iter_package_families(): yield PackageFamily(resource)
Iterate over package families, in no particular order. Note that multiple package families with the same name can be returned. Unlike packages, families later in the searchpath are not hidden by earlier families. Args: paths (list of str, optional): paths to search for package families, defaults to `config.packages_path`. Returns: `PackageFamily` iterator.
9,642
def close(self): self.time_to_quit = True self.close_window.release() if self.child.is_alive(): self.child.join(1) self.child.terminate() self.mavlink_message_queue_handler.join() self.event_queue_lock.acquire() self.event_queue.put(MissionEditorEvent(me_event.MEE_TIME_TO_QUIT)); self.event_queue_lock.release()
close the Mission Editor window
9,643
def send_command(self, *args, **kwargs): if len(args) >= 2: expect_string = args[1] else: expect_string = kwargs.get("expect_string") if expect_string is None: expect_string = r"(OK|ERROR|Command not recognized\.)" expect_string = self.RETURN + expect_string + self.RETURN kwargs.setdefault("expect_string", expect_string) output = super(CiscoSSHConnection, self).send_command(*args, **kwargs) return output
Send command to network device retrieve output until router_prompt or expect_string By default this method will keep waiting to receive data until the network device prompt is detected. The current network device prompt will be determined automatically. command_string = command to execute expect_string = pattern to search for uses re.search (use raw strings) delay_factor = decrease the initial delay before we start looking for data max_loops = number of iterations before we give up and raise an exception strip_prompt = strip the trailing prompt from the output strip_command = strip the leading command from the output
9,644
def build_listen(self, listen_node): proxy_name = listen_node.listen_header.proxy_name.text service_address_node = listen_node.listen_header.service_address config_block_lines = self.__build_config_block( listen_node.config_block) host, port = , if isinstance(service_address_node, pegnode.ServiceAddress): host = service_address_node.host.text port = service_address_node.port.text else: for line in config_block_lines: if isinstance(line, config.Bind): host, port = line.host, line.port break else: raise Exception( ) return config.Listen( name=proxy_name, host=host, port=port, config_block=config_block_lines)
parse `listen` sections, and return a config.Listen Args: listen_node (TreeNode): Description Returns: config.Listen: an object
9,645
def _get_bgp_route_attr(self, destination, vrf, next_hop, ip_version=4): CMD_SHIBNV = search_re_dict = { "aspath": { "re": r"AS-Path: ([\d\(\)]([\d\(\) ])*)", "group": 1, "default": "", }, "bgpnh": { "re": r"[^|\\n][ ]{4}(" + IP_ADDR_REGEX + r")", "group": 1, "default": "", }, "bgpfrom": { "re": r"from (" + IP_ADDR_REGEX + r")", "group": 1, "default": "", }, "bgpcomm": { "re": r" Community: ([\w\d\-\: ]+)", "group": 1, "default": "", }, "bgplp": {"re": r"localpref (\d+)", "group": 1, "default": ""}, "bgpie": {"re": r"^: (\w+),", "group": 1, "default": ""}, "vrfimp": { "re": r"Imported from [\S]+ \(VRF (\S+)\)", "group": 1, "default": "", }, } bgp_attr = {} outbgp = self._send_command() matchbgpattr = RE_BGP_PROTO_TAG.match(outbgp) if not matchbgpattr: return bgp_attr bgpas = matchbgpattr.group(1) if ip_version == 4: bgpcmd = "show ip bgp vrf {vrf} {destination}".format( vrf=vrf, destination=destination ) outbgp = self._send_command(bgpcmd) outbgpsec = outbgp.split("Path type") if len(outbgpsec) == 1: return bgp_attr for bgppath in outbgpsec[1:]: if "is best path" not in bgppath: continue for key in search_re_dict: matchre = re.search(search_re_dict[key]["re"], bgppath) if matchre: groupnr = int(search_re_dict[key]["group"]) search_re_dict[key]["result"] = matchre.group(groupnr) else: search_re_dict[key]["result"] = search_re_dict[key]["default"] bgpnh = search_re_dict["bgpnh"]["result"] if ( not (search_re_dict["bgpie"]["result"] in ["redist", "local"]) ) and (bgpnh != next_hop): continue bgpcmd = CMD_SHIBNV.format(vrf=vrf, neigh=bgpnh) outbgpnei = self._send_command(bgpcmd) matchbgpras = RE_BGP_REMOTE_AS.search(outbgpnei) if matchbgpras: bgpras = matchbgpras.group(1) else: bgpie = search_re_dict["bgpie"]["result"] if bgpie == "external": bgpras = bgpie.split(" ")[0].replace("(", "") elif bgpie == "internal": bgpras = bgpas else: bgpras = "" bothcomm = [] extcomm = [] stdcomm = search_re_dict["bgpcomm"]["result"].split() commsplit = bgppath.split("Extcommunity:") if len(commsplit) == 2: for line in commsplit[1].split("\n")[1:]: matchcommun = RE_BGP_COMMUN.match(line) if matchcommun: extcomm.append(matchcommun.group(1)) else: break bothcomm = stdcomm + extcomm bgp_attr = { "as_path": search_re_dict["aspath"]["result"].strip(), "remote_address": search_re_dict["bgpfrom"]["result"], "local_preference": int(search_re_dict["bgplp"]["result"]), "communities": bothcomm, "local_as": helpers.as_number(bgpas), } if bgpras: bgp_attr["remote_as"] = helpers.as_number(bgpras) else: bgp_attr["remote_as"] = 0 return bgp_attr
BGP protocol attributes for get_route_tp Only IPv4 supported
9,646
def register(linter): linter.register_checker(TypeChecker(linter)) linter.register_checker(IterableChecker(linter))
required method to auto register this checker
9,647
def modify( login, password=None, password_hashed=False, domain=None, profile=None, script=None, drive=None, homedir=None, fullname=None, account_desc=None, account_control=None, machine_sid=None, user_sid=None, reset_login_hours=False, reset_bad_password_count=False, ): *Inara Serra**V:\\\\serenity\\jane\\profile* ret = flags = { : , : , : , : , : , : , : , : , : , : , } provided = { : domain, : fullname, : account_desc, : homedir, : drive, : profile, : script, : account_control, : user_sid, : machine_sid, } if password: ret = create(login, password, password_hashed)[login] if ret not in [, , ]: return {login: ret} elif login not in list_users(False): return {login: } current = get_user(login, hashes=True) changes = {} for key, val in provided.items(): if key in [, ]: if val is not None and key in current and not current[key].endswith(six.text_type(val)): changes[key] = six.text_type(val) elif key in []: if val is not None: if val.startswith(): val = val[1:-1] new = [] for f in val.upper(): if f not in [, , , , ]: logmsg = .format(f) log.warning(logmsg) else: new.append(f) changes[key] = "[{flags}]".format(flags="".join(new)) else: if val is not None and key in current and current[key] != val: changes[key] = val if changes or reset_login_hours or reset_bad_password_count: cmds = [] for change in changes: cmds.append(.format( flag=flags[change], value=_quote_args(changes[change]), )) if reset_login_hours: cmds.append() if reset_bad_password_count: cmds.append() res = __salt__[]( .format( login=_quote_args(login), changes=" ".join(cmds), ), ) if res[] > 0: return {login: res[] if in res else res[]} if ret != : ret = return {login: ret}
Modify user account login : string login name password : string password password_hashed : boolean set if password is a nt hash instead of plain text domain : string users domain profile : string profile path script : string logon script drive : string home drive homedir : string home directory fullname : string full name account_desc : string account description machine_sid : string specify the machines new primary group SID or rid user_sid : string specify the users new primary group SID or rid account_control : string specify user account control properties .. note:: Only the following can be set: - N: No password required - D: Account disabled - H: Home directory required - L: Automatic Locking - X: Password does not expire reset_login_hours : boolean reset the users allowed logon hours reset_bad_password_count : boolean reset the stored bad login counter .. note:: if user is absent and password is provided, the user will be created CLI Example: .. code-block:: bash salt '*' pdbedit.modify inara fullname='Inara Serra' salt '*' pdbedit.modify simon password=r1v3r salt '*' pdbedit.modify jane drive='V:' homedir='\\\\serenity\\jane\\profile' salt '*' pdbedit.modify mal account_control=NX
9,648
def advance_for_next_slice(self, recovery_slice=False): self.slice_start_time = None self.slice_request_id = None self.slice_retries = 0 self.acquired_once = False if recovery_slice: self.slice_id += 2 else: self.slice_id += 1
Advance self for next slice. Args: recovery_slice: True if this slice is running recovery logic. See handlers.MapperWorkerCallbackHandler._attempt_slice_recovery for more info.
9,649
def Hide(self, waitTime: float = OPERATION_WAIT_TIME) -> bool: return self.ShowWindow(SW.Hide, waitTime)
Call native `ShowWindow(SW.Hide)`. waitTime: float Return bool, True if succeed otherwise False.
9,650
def delete_datapoints_in_time_range(self, start_dt=None, end_dt=None): start_dt = to_none_or_dt(validate_type(start_dt, datetime.datetime, type(None))) end_dt = to_none_or_dt(validate_type(end_dt, datetime.datetime, type(None))) params = {} if start_dt is not None: params[] = isoformat(start_dt) if end_dt is not None: params[] = isoformat(end_dt) self._conn.delete("/ws/DataPoint/{stream_id}{querystring}".format( stream_id=self.get_stream_id(), querystring="?" + urllib.parse.urlencode(params) if params else "", ))
Delete datapoints from this stream between the provided start and end times If neither a start or end time is specified, all data points in the stream will be deleted. :param start_dt: The datetime after which data points should be deleted or None if all data points from the beginning of time should be deleted. :param end_dt: The datetime before which data points should be deleted or None if all data points until the current time should be deleted. :raises devicecloud.DeviceCloudHttpException: in the case of an unexpected http error
9,651
def _update_doc(self, func_doc): deprecated_doc = "Deprecated" if self.tip_info: deprecated_doc = "{}: {}".format(deprecated_doc, self.tip_info) if func_doc: func_doc = "{}\n{}".format(deprecated_doc, func_doc) return func_doc
更新文档信息,把原来的文档信息进行合并格式化, 即第一行为deprecated_doc(Deprecated: tip_info),下一行为原始func_doc
9,652
def push(cpu, value, size): assert size in (8, 16, cpu.address_bit_size) cpu.STACK = cpu.STACK - size // 8 base, _, _ = cpu.get_descriptor(cpu.read_register()) address = cpu.STACK + base cpu.write_int(address, value, size)
Writes a value in the stack. :param value: the value to put in the stack. :param size: the size of the value.
9,653
def savvyize(self, input_string, recursive=False, stemma=False): input_string = os.path.abspath(input_string) tasks = [] restricted = [ symbol for symbol in self.settings[] ] if self.settings[] else [] if os.path.isdir(input_string): if recursive: for root, dirs, files in os.walk(input_string): to_filter = [] for dir in dirs: dir = u(dir) for rs in restricted: if dir.startswith(rs) or dir.endswith(rs): to_filter.append(dir) break dirs[:] = [x for x in dirs if x not in to_filter] for filename in files: filename = u(filename) if restricted: for rs in restricted: if filename.startswith(rs) or filename.endswith(rs): break else: tasks.append(root + os.sep + filename) else: tasks.append(root + os.sep + filename) else: for filename in os.listdir(input_string): filename = u(filename) if os.path.isfile(input_string + os.sep + filename): if restricted: for rs in restricted: if filename.startswith(rs) or filename.endswith(rs): break else: tasks.append(input_string + os.sep + filename) else: tasks.append(input_string + os.sep + filename) elif os.path.isfile(input_string): tasks.append(input_string) else: if stemma: parent = os.path.dirname(input_string) for filename in os.listdir(parent): filename = u(filename) if input_string in parent + os.sep + filename and not os.path.isdir(parent + os.sep + filename): if restricted: for rs in restricted: if filename.startswith(rs) or filename.endswith(rs): break else: tasks.append(parent + os.sep + filename) else: tasks.append(parent + os.sep + filename) return tasks
Determines which files should be processed NB: this is the PUBLIC method @returns filenames_list
9,654
def _get_connection(self): try: if self._ncc_connection and self._ncc_connection.connected: return self._ncc_connection else: self._ncc_connection = manager.connect( host=self._host_ip, port=self._host_ssh_port, username=self._username, password=self._password, device_params={: "csr"}, timeout=self._timeout) if not self._itfcs_enabled: self._itfcs_enabled = self._enable_itfcs( self._ncc_connection) return self._ncc_connection except Exception as e: conn_params = {: self._host_ip, : self._host_ssh_port, : self._username, : self._timeout, : e.message} raise cfg_exc.ConnectionException(**conn_params)
Make SSH connection to the IOS XE device. The external ncclient library is used for creating this connection. This method keeps state of any existing connections and reuses them if already connected. Also interfaces (except management) are typically disabled by default when it is booted. So if connecting for the first time, driver will enable all other interfaces and keep that status in the `_itfcs_enabled` flag.
9,655
def _wrap(value): if is_primitive(value): return value if isinstance(value, (dict, set)) or is_namedtuple(value): return value elif isinstance(value, collections.Iterable): try: if type(value).__name__ == : import pandas if isinstance(value, pandas.DataFrame): return Sequence(value.values) except ImportError: pass return Sequence(value) else: return value
Wraps the passed value in a Sequence if it is not a primitive. If it is a string argument it is expanded to a list of characters. >>> _wrap(1) 1 >>> _wrap("abc") ['a', 'b', 'c'] >>> type(_wrap([1, 2])) functional.pipeline.Sequence :param value: value to wrap :return: wrapped or not wrapped value
9,656
def requeue(self, message_id, timeout=0, backoff=True): self.send(nsq.requeue(message_id, timeout)) self.finish_inflight() self.on_requeue.send( self, message_id=message_id, timeout=timeout, backoff=backoff )
Re-queue a message (indicate failure to process).
9,657
def start_greedy_ensemble_search(automated_run, session, path): module = functions.import_string_code_as_module(automated_run.source) assert module.metric_to_optimize in automated_run.base_learner_origin.metric_generators best_ensemble = [] secondary_learner = automated_run.base_learner_origin.return_estimator() secondary_learner.set_params(**module.secondary_learner_hyperparameters) for i in range(module.max_num_base_learners): best_score = -float() current_ensemble = best_ensemble[:] for base_learner in session.query(models.BaseLearner).filter_by(job_status=).all(): if base_learner in current_ensemble: ) session.add(stacked_ensemble) session.commit() eval_stacked_ensemble(stacked_ensemble, session, path) score = stacked_ensemble.individual_score[module.metric_to_optimize] score = -score if module.invert_metric else score if best_score < score: best_score = score best_ensemble = current_ensemble[:] current_ensemble.pop()
Starts an automated ensemble search using greedy forward model selection. The steps for this search are adapted from "Ensemble Selection from Libraries of Models" by Caruana. 1. Start with the empty ensemble 2. Add to the ensemble the model in the library that maximizes the ensemmble's performance on the error metric. 3. Repeat step 2 for a fixed number of iterations or until all models have been used. Args: automated_run (xcessiv.models.AutomatedRun): Automated run object session: Valid SQLAlchemy session path (str, unicode): Path to project folder
9,658
def stopMessage(self, apiMsgId): content = self.parseRest(self.request( + apiMsgId, {}, {}, )) return { : content[].encode(), : content[].encode(), : self.getStatus(content[]) }
See parent method for documentation
9,659
def _delete_resource(name, name_param, desc, res_type, wait=0, status_param=None, status_gone=, region=None, key=None, keyid=None, profile=None, **args): try: wait = int(wait) except Exception: raise SaltInvocationError("Bad value () passed for param - must be an " "int or boolean.".format(wait)) conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if name_param in args: log.info( " param being overridden by explicitly provided ", name, name_param, args[name_param] ) name = args[name_param] else: args[name_param] = name args = dict([(k, v) for k, v in args.items() if not k.startswith()]) try: func = +res_type f = getattr(conn, func) if wait: func = +res_type+ s = globals()[func] except (AttributeError, KeyError) as e: raise SaltInvocationError("No function found: {1}".format(func, e.message)) try: f(**args) if not wait: log.info(, desc.title(), name) return True log.info(, wait, desc, name) orig_wait = wait while wait > 0: r = s(name=name, conn=conn) if not r or r[0].get(status_param) == status_gone: log.info(, desc.title(), name) return True sleep = wait if wait % 60 == wait else 60 log.info(, sleep, desc, name) time.sleep(sleep) wait -= sleep log.error(, desc.title(), name, orig_wait) return False except botocore.exceptions.ClientError as e: log.error(, desc, name, e) return False
Delete a generic Elasticache resource.
9,660
def asset_create_task(self, *args, **kwargs): if not self.cur_asset: return task = self.create_task(element=self.cur_asset) if task: taskdata = djitemdata.TaskItemData(task) treemodel.TreeItem(taskdata, self.asset_task_model.root)
Create a new task :returns: None :rtype: None :raises: None
9,661
def _refresh_outlineexplorer(self, index=None, update=True, clear=False): oe = self.outlineexplorer if oe is None: return if index is None: index = self.get_stack_index() if self.data: finfo = self.data[index] oe.setEnabled(True) if finfo.editor.oe_proxy is None: finfo.editor.oe_proxy = OutlineExplorerProxyEditor( finfo.editor, finfo.filename) oe.set_current_editor(finfo.editor.oe_proxy, update=update, clear=clear) if index != self.get_stack_index(): self._refresh_outlineexplorer(update=False) return self._sync_outlineexplorer_file_order()
Refresh outline explorer panel
9,662
def map_port(protocol, public_port, private_port, lifetime=3600, gateway_ip=None, retry=9, use_exception=True): if protocol not in [NATPMP_PROTOCOL_UDP, NATPMP_PROTOCOL_TCP]: raise ValueError("Must be either NATPMP_PROTOCOL_UDP or " "NATPMP_PROTOCOL_TCP") if gateway_ip is None: gateway_ip = get_gateway_addr() response = None port_mapping_request = PortMapRequest(protocol, private_port, public_port, lifetime) port_mapping_response = \ send_request_with_retry(gateway_ip, port_mapping_request, response_data_class=PortMapResponse, retry=retry) if port_mapping_response.result != 0 and use_exception: raise NATPMPResultError(port_mapping_response.result, error_str(port_mapping_response.result), port_mapping_response) return port_mapping_response
A function to map public_port to private_port of protocol. Returns the complete response on success. protocol - NATPMP_PROTOCOL_UDP or NATPMP_PROTOCOL_TCP public_port - the public port of the mapping requested private_port - the private port of the mapping requested lifetime - the duration of the mapping in seconds. Defaults to 3600, per specification. gateway_ip - the IP to the NAT-PMP compatible gateway. Defaults to using auto-detection function get_gateway_addr() retry - the number of times to retry the request if unsuccessful. Defaults to 9 as per specification. use_exception - throw an exception if an error result is received from the gateway. Defaults to True.
9,663
def customize_compiler_for_nvcc(compiler, nvcc_settings): compiler.src_extensions.append() default_compiler_so = compiler.compiler_so default_compile = compiler._compile default_compile(obj, src, ext, cc_args, extra_postargs, pp_opts) compiler.compiler_so = default_compiler_so compiler._compile = _compile
inject deep into distutils to customize gcc/nvcc dispatch
9,664
def jinja_loader(self): loaders = self._jinja_loaders del self._jinja_loaders loaders.append(Flask.jinja_loader.func(self)) loaders.reverse() return jinja2.ChoiceLoader(loaders)
Search templates in custom app templates dir (default Flask behaviour), fallback on abilian templates.
9,665
def get_ppis(self, ppi_df): logger.info() for ix, row in ppi_df.iterrows(): agA = self._make_agent(row[]) agB = self._make_agent(row[]) stmt = Complex([agA, agB], evidence=ev_list) self.statements.append(stmt)
Generate Complex Statements from the HPRD PPI data. Parameters ---------- ppi_df : pandas.DataFrame DataFrame loaded from the BINARY_PROTEIN_PROTEIN_INTERACTIONS.txt file.
9,666
def document_frequencies(self, hashes): result = {} for (k, v) in self.client.get(HASH_FREQUENCY_TABLE, *[(h,) for h in hashes]): if v is None: v = 0 result[k[0]] = v return result
Get document frequencies for a list of hashes. This will return all zeros unless the index was written with `hash_frequencies` set. If :data:`DOCUMENT_HASH_KEY` is included in `hashes`, that value will be returned with the total number of documents indexed. If you are looking for documents with that hash, pass :data:`DOCUMENT_HASH_KEY_REPLACEMENT` instead. :param hashes: hashes to query :paramtype hashes: list of :class:`int` :return: map from hash to document frequency
9,667
def create_account(self, body, **kwargs): kwargs[] = True if kwargs.get(): return self.create_account_with_http_info(body, **kwargs) else: (data) = self.create_account_with_http_info(body, **kwargs) return data
Create a new account. # noqa: E501 An endpoint for creating a new account. **Example usage:** `curl -X POST https://api.us-east-1.mbedcloud.com/v3/accounts -d '{\"display_name\": \"MyAccount1\", \"admin_name\": \"accountAdmin1\", \"email\": \"[email protected]\"}' -H 'content-type: application/json' -H 'Authorization: Bearer API_KEY'` # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.create_account(body, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param AccountCreationReq body: Details of the account to be created. (required) :param str action: Action, either 'create' or 'enroll'. <ul><li>'create' creates the account where its admin user has ACTIVE status if admin_password was defined in the request, or RESET status if no admin_password was defined. If the user already exists, its status is not modified. </li><li>'enroll' creates the account where its admin user has ENROLLING status. If the user already exists, its status is not modified. Email to finish the enrollment or to notify the existing user about the new account is sent to the admin_email defined in the request. </li></ul> :return: AccountCreationResp If the method is called asynchronously, returns the request thread.
9,668
def TargetDirectory(ID, season, relative=False, **kwargs): if season is None: return None if relative: path = else: path = EVEREST_DAT return os.path.join(path, , % season, ( % ID)[:4] + , ( % ID)[4:])
Returns the location of the :py:mod:`everest` data on disk for a given target. :param ID: The target ID :param int season: The target season number :param bool relative: Relative path? Default :py:obj:`False`
9,669
def section_tortuosity(section): pts = section.points return 1 if len(pts) < 2 else mm.section_length(pts) / mm.point_dist(pts[-1], pts[0])
Tortuosity of a section The tortuosity is defined as the ratio of the path length of a section and the euclidian distnce between its end points. The path length is the sum of distances between consecutive points. If the section contains less than 2 points, the value 1 is returned.
9,670
def prepare_url(hostname, path, params=None): url = hostname + path if params: url = url + + urlencode(params) if not url.startswith((, )): url = "http://" + url return url.encode()
Prepare Elasticsearch request url. :param hostname: host name :param path: request path :param params: optional url params :return:
9,671
def _parse_date_rfc822(dateString): data = dateString.split() if data[0][-1] in (, ) or data[0].lower() in _daynames: del data[0] if len(data) == 4: s = data[3] s = s.split(, 1) if len(s) == 2: data[3:] = s else: data.append() dateString = " ".join(data) if len(data) < 5: dateString += return email.utils.parsedate_tz(dateString)
Parse an RFC822, RFC1123, RFC2822, or asctime-style date
9,672
def ack(self, device_uuid, ack_keys): attributes = {: ack_keys} status_code, _, _ = self.http_client.post(, body=attributes, headers=self.build_headers(device_uuid)) return status_code == 202
Acknowledge received data Send acknowledgement keys to let know the Sync service which data you have. As you fetch new data, you need to send acknowledgement keys. :calls: ``post /sync/ack`` :param string device_uuid: Device's UUID for which to perform synchronization. :param list ack_keys: List of acknowledgement keys. :return: True if the operation succeeded. :rtype: bool
9,673
def do_symbols_matching(self): self._clear_decorations() current_block = self.editor.textCursor().block() data = get_block_symbol_data(self.editor, current_block) pos = self.editor.textCursor().block().position() for symbol in [PAREN, SQUARE, BRACE]: self._match(symbol, data, pos)
Performs symbols matching.
9,674
def add_contacts( self, contacts: List["pyrogram.InputPhoneContact"] ): imported_contacts = self.send( functions.contacts.ImportContacts( contacts=contacts ) ) return imported_contacts
Use this method to add contacts to your Telegram address book. Args: contacts (List of :obj:`InputPhoneContact <pyrogram.InputPhoneContact>`): The contact list to be added Returns: On success, the added contacts are returned. Raises: :class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error.
9,675
def make_signature(name, params, common_params, common_param_values): tokens = [] seen = set() def tokens_append(key, value): if isinstance(value, str): value = "".format(value) tokens.append(.format(key, value)) for key in common_params: seen.add(key) try: value = params[key] except KeyError: value = common_param_values[key] tokens_append(key, value) for key in (set(params) - seen): tokens_append(key, params[key]) s1 = name + s2 = .join(tokens) + line_width = 78 - len(s1) indent_spaces = * (len(s1) + 4) newline_and_space = + indent_spaces s2_lines = wrap(s2, width=line_width) return s1 + newline_and_space.join(s2_lines)
Create a signature for a geom or stat Gets the DEFAULT_PARAMS (params) and creates are comma separated list of the `name=value` pairs. The common_params come first in the list, and they get take their values from either the params-dict or the common_geom_param_values-dict.
9,676
def copy_files(filelist, destdir): for filename in filelist: destfile = os.path.join(destdir, filename) assert destfile.startswith(destdir + os.path.sep) destfiledir = os.path.dirname(destfile) if not os.path.isdir(destfiledir): os.makedirs(destfiledir) if os.path.isdir(filename): os.mkdir(destfile) else: shutil.copy2(filename, destfile)
Copy a list of files to destdir, preserving directory structure. File names should be relative to the current working directory.
9,677
def ts_func(f): def wrap_func(df, *args): return Chromatogram(f(df.values, *args), df.index, df.columns) return wrap_func
This wraps a function that would normally only accept an array and allows it to operate on a DataFrame. Useful for applying numpy functions to DataFrames.
9,678
def __voronoi_finite_polygons_2d(vor, radius=None): if vor.points.shape[1] != 2: raise ValueError("Requires 2D input") new_regions = [] new_vertices = vor.vertices.tolist() center = vor.points.mean(axis=0) if radius is None: radius = vor.points.ptp().max() all_ridges = {} for (p1, p2), (v1, v2) in zip(vor.ridge_points, vor.ridge_vertices): all_ridges.setdefault(p1, []).append((p2, v1, v2)) all_ridges.setdefault(p2, []).append((p1, v1, v2)) for p1, region in enumerate(vor.point_region): vertices = vor.regions[region] if all(v >= 0 for v in vertices): new_regions.append(vertices) continue if p1 not in all_ridges: continue ridges = all_ridges[p1] new_region = [v for v in vertices if v >= 0] for p2, v1, v2 in ridges: if v2 < 0: v1, v2 = v2, v1 if v1 >= 0: continue t = vor.points[p2] - vor.points[p1] t /= np.linalg.norm(t) n = np.array([-t[1], t[0]]) midpoint = vor.points[[p1, p2]].mean(axis=0) direction = np.sign(np.dot(midpoint - center, n)) * n far_point = vor.vertices[v2] + direction * radius new_region.append(len(new_vertices)) new_vertices.append(far_point.tolist()) vs = np.asarray([new_vertices[v] for v in new_region]) c = vs.mean(axis=0) angles = np.arctan2(vs[:,1] - c[1], vs[:,0] - c[0]) new_region = np.array(new_region)[np.argsort(angles)] new_regions.append(new_region.tolist()) return new_regions, np.asarray(new_vertices)
Reconstruct infinite voronoi regions in a 2D diagram to finite regions. Parameters ---------- vor : Voronoi Input diagram radius : float, optional Distance to 'points at infinity'. Returns ------- regions : list of tuples Indices of vertices in each revised Voronoi regions. vertices : list of tuples Coordinates for revised Voronoi vertices. Same as coordinates of input vertices, with 'points at infinity' appended to the end.
9,679
def create_shield_layer(shield, hashcode): return pgnreader.parse_pagan_file(( % (PACKAGE_DIR, os.sep, os.sep)) + shield + , hashcode, sym=False, invert=False)
Creates the layer for shields.
9,680
def push_dcp(event, callback, position=): ctx = stack.top ctx.app.extensions.get().push_dcp(event, callback, position)
Push a callable for :class:`~flask_pluginkit.PluginManager`, :func:`push_dcp`. Example usage:: push_dcp('demo', lambda:'Hello dcp') .. versionadded:: 2.1.0
9,681
def make_index(gff_file): import gffutils db_file = gff_file + ".db" if need_update(gff_file, db_file): if op.exists(db_file): os.remove(db_file) logging.debug("Indexing `{0}`".format(gff_file)) gffutils.create_db(gff_file, db_file, merge_strategy="create_unique") else: logging.debug("Load index `{0}`".format(gff_file)) return gffutils.FeatureDB(db_file)
Make a sqlite database for fast retrieval of features.
9,682
def add_section(self, name=None, anchor=None, description=, comment=, helptext=, plot=, content=, autoformat=True, autoformat_type=): if anchor is None: if name is not None: nid = name.lower().strip().replace(,) anchor = .format(self.anchor, nid) else: sl = len(self.sections) + 1 anchor = .format(self.anchor, sl) if anchor in config.remove_sections: logger.debug("Skipping section because specified in user config".format(anchor)) return anchor = report.save_htmlid(anchor) if anchor in config.section_comments: comment = config.section_comments[anchor] if autoformat: if len(description) > 0: description = textwrap.dedent(description) if autoformat_type == : description = markdown.markdown(description) if len(comment) > 0: comment = textwrap.dedent(comment) if autoformat_type == : comment = markdown.markdown(comment) if len(helptext) > 0: helptext = textwrap.dedent(helptext) if autoformat_type == : helptext = markdown.markdown(helptext) description = description.strip() comment = comment.strip() helptext = helptext.strip() self.sections.append({ : name, : anchor, : description, : comment, : helptext, : plot, : content, : any([ n is not None and len(n) > 0 for n in [description, comment, helptext, plot, content] ]) })
Add a section to the module report output
9,683
def monthly(usaf, year, field=): m = [] lastm = 1 usafdata = Data(usaf, year) t = 0 for r in usafdata: r[] = r[] r[] = r[] r[] = r[] if r[].month != lastm: m.append(t/1000.) t = 0 lastm = r[].month t += float(r[field]) return m
monthly insolation
9,684
def parse_napp(napp_id): regex = r compiled_regex = re.compile(regex) matched = compiled_regex.fullmatch(napp_id) if not matched: msg = raise KytosException(msg.format(napp_id)) return matched.groups()
Convert a napp_id in tuple with username, napp name and version. Args: napp_id: String with the form 'username/napp[:version]' (version is optional). If no version is found, it will be None. Returns: tuple: A tuple with (username, napp, version) Raises: KytosException: If a NApp has not the form _username/name_.
9,685
def constraints(self): if self._parent is not None: return tuple(self._constraints) + self._parent.constraints return tuple(self._constraints)
:rtype tuple :return: All constraints represented by this and parent sets.
9,686
def intersect_arc(self, arc): intersections = self.intersect_circle(arc.center, arc.radius) isections = [pt for pt in intersections if arc.contains_angle_degrees(arc.point_as_angle(pt))] return isections
Given an arc, finds the intersection point(s) of this arc with that. Returns a list of 2x1 numpy arrays. The list has length 0, 1 or 2, depending on how many intesection points there are. Points are ordered along the arc. Intersection with the arc along the same circle (which means infinitely many points usually) is reported as no intersection at all. >>> a = Arc((0, 0), 1, -90, 90, True) >>> a.intersect_arc(Arc((1, 0), 1, 90, 270, True)) [array([ 0.5 , -0.866...]), array([ 0.5 , 0.866...])] >>> a.intersect_arc(Arc((1, 0), 1, 90, 180, True)) [array([ 0.5 , 0.866...])] >>> a.intersect_arc(Arc((1, 0), 1, 121, 239, True)) [] >>> a.intersect_arc(Arc((1, 0), 1, 120-tol, 240+tol, True)) # Without -tol and +tol the results differ on different architectures due to rounding (see Debian #813782). [array([ 0.5 , -0.866...]), array([ 0.5 , 0.866...])]
9,687
def convert_multiPointSource(self, node): geom = node.multiPointGeometry lons, lats = zip(*split_coords_2d(~geom.posList)) msr = valid.SCALEREL[~node.magScaleRel]() return source.MultiPointSource( source_id=node[], name=node[], tectonic_region_type=node.attrib.get(), mfd=self.convert_mfdist(node), magnitude_scaling_relationship=msr, rupture_aspect_ratio=~node.ruptAspectRatio, upper_seismogenic_depth=~geom.upperSeismoDepth, lower_seismogenic_depth=~geom.lowerSeismoDepth, nodal_plane_distribution=self.convert_npdist(node), hypocenter_distribution=self.convert_hpdist(node), mesh=geo.Mesh(F32(lons), F32(lats)), temporal_occurrence_model=self.get_tom(node))
Convert the given node into a MultiPointSource object. :param node: a node with tag multiPointGeometry :returns: a :class:`openquake.hazardlib.source.MultiPointSource`
9,688
def git_log_iterator(path): N = 10 count = 0 while True: lines = _run_git_command_lines([, , , str(N), , str(count), , ], cwd=path) for line in lines: sha = line.split(, 1)[0] count += 1 yield sha if len(lines) < N: break
yield commits using git log -- <dir>
9,689
def clear_lock(self, key): lock_path = self._get_lock_path(key) os.remove(lock_path)
Remove the lock file.
9,690
def is_all_field_none(self): if self._share_detail is not None: return False if self._start_date is not None: return False if self._end_date is not None: return False return True
:rtype: bool
9,691
def _temporary_keychain(): random_bytes = os.urandom(40) filename = base64.b16encode(random_bytes[:8]).decode() password = base64.b16encode(random_bytes[8:]) tempdirectory = tempfile.mkdtemp() keychain_path = os.path.join(tempdirectory, filename).encode() keychain = Security.SecKeychainRef() status = Security.SecKeychainCreate( keychain_path, len(password), password, False, None, ctypes.byref(keychain) ) _assert_no_error(status) return keychain, tempdirectory
This function creates a temporary Mac keychain that we can use to work with credentials. This keychain uses a one-time password and a temporary file to store the data. We expect to have one keychain per socket. The returned SecKeychainRef must be freed by the caller, including calling SecKeychainDelete. Returns a tuple of the SecKeychainRef and the path to the temporary directory that contains it.
9,692
def setdefault (self, key, *args): assert isinstance(key, basestring) return dict.setdefault(self, key.lower(), *args)
Set lowercase key value and return.
9,693
def get_step_f(step_f, lR2, lS2): mu, tau = 10, 2 if lR2 > mu*lS2: return step_f * tau elif lS2 > mu*lR2: return step_f / tau return step_f
Update the stepsize of given the primal and dual errors. See Boyd (2011), section 3.4.1
9,694
def wgan(cls, data:DataBunch, generator:nn.Module, critic:nn.Module, switcher:Callback=None, clip:float=0.01, **learn_kwargs): "Create a WGAN from `data`, `generator` and `critic`." return cls(data, generator, critic, NoopLoss(), WassersteinLoss(), switcher=switcher, clip=clip, **learn_kwargs)
Create a WGAN from `data`, `generator` and `critic`.
9,695
def from_coords(cls, coords, sort=True): coords = list(coords) if sort: coords.sort() if len(coords[0]) == 2: lons, lats = zip(*coords) depths = None else: lons, lats, depths = zip(*coords) depths = numpy.array(depths) return cls(numpy.array(lons), numpy.array(lats), depths)
Create a mesh object from a list of 3D coordinates (by sorting them) :params coords: list of coordinates :param sort: flag (default True) :returns: a :class:`Mesh` instance
9,696
def change(self, inpt, hashfun=DEFAULT_HASHFUN): self.img = self.__create_image(inpt, hashfun)
Change the avatar by providing a new input. Uses the standard hash function if no one is given.
9,697
def join_path(a, *p): path = a for b in p: if len(b) == 0: continue if b.startswith(): path += b[1:] elif path == or path.endswith(): path += b else: path += + b return path
Join path tokens together similar to osp.join, but always use '/' instead of possibly '\' on windows.
9,698
def sudo_yield_file_lines(file_path=): r sudo_cat_cmd = .format(file_path) process = subprocess.Popen(sudo_cat_cmd, stdout=subprocess.PIPE, shell=True) for line in iter(process.stdout.readline, ): yield line
r"""Cat a file iterating/yielding one line at a time, shell will execute: `sudo cat $file_path` so if your shell doesn't have sudo or cat, no joy Input: file_path(str): glob stars are fine >> for line in sudo_yield_file_lines('/etc/NetworkManager/system-connections/*')
9,699
def get_symbols_list(self): slist = [] rc, nSymb, nElem = gdxcc.gdxSystemInfo(self.gdx_handle) assert rc, % self.filename self.number_symbols = nSymb self.number_elements = nElem slist = [None]*(nSymb+1) for j in range(0,nSymb+1): sinfo = self.get_sid_info(j) if j==0: sinfo[] = slist[j] = GdxSymb(self,sinfo) return slist
Return a list of GdxSymb found in the GdxFile.