Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
388,500
def adjust_prior(self, index, prior): self.latent_variables.adjust_prior(index=index, prior=prior)
Adjusts priors for the latent variables Parameters ---------- index : int or list[int] Which latent variable index/indices to be altered prior : Prior object Which prior distribution? E.g. Normal(0,1) Returns ---------- None (changes priors in LatentVariables object)
388,501
def resize_volume(self, volumeObj, sizeInGb, bsize=1000): current_vol = self.get_volume_by_id(volumeObj.id) if current_vol.size_kb > (sizeInGb * bsize * bsize): raise RuntimeError( "resize_volume() - New size needs to be bigger than: %d KBs" % current_vol.size_kb) resizeDict = { : str(sizeInGb) } response = self.conn.connection._do_post("{}/{}{}/{}".format( self.conn.connection._api_url, "instances/Volume::", volumeObj.id, ), json=resizeDict) return response
Resize a volume to new GB size, must be larger than original. :param volumeObj: ScaleIO Volume Object :param sizeInGb: New size in GB (have to be larger than original) :param bsize: 1000 :return: POST request response :rtype: Requests POST response object
388,502
def run_on(*, event: str): def decorator(callback): @functools.wraps(callback) def decorator_wrapper(): RTMClient.on(event=event, callback=callback) return decorator_wrapper() return decorator
A decorator to store and link a callback to an event.
388,503
def _get_parser(extra_args): parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter, ) dirs = appdirs.AppDirs(, ) default_token_path = os.path.join(dirs.user_cache_dir, ) parser.add_argument( , default=default_token_path, help= ) parser.add_argument( , , action=, help= ) for extra_arg in extra_args: parser.add_argument(extra_arg, required=True) return parser
Return ArgumentParser with any extra arguments.
388,504
def get_codemirror_parameters(self, name): config = self.get_config(name) return {k: config[k] for k in config if k not in self._internal_only}
Return CodeMirror parameters for given configuration name. This is a reduced configuration from internal parameters. Arguments: name (string): Config name from available ones in ``settings.CODEMIRROR_SETTINGS``. Returns: dict: Parameters.
388,505
def show_driver(devname): * try: module = ethtool.get_module(devname) except IOError: log.error(, devname) return try: businfo = ethtool.get_businfo(devname) except IOError: log.error(, devname) return ret = { : module, : businfo, } return ret
Queries the specified network device for associated driver information CLI Example: .. code-block:: bash salt '*' ethtool.show_driver <devname>
388,506
def unorm(data, ord=None, axis=None, keepdims=False): norm = np.linalg.norm(data, ord=ord, axis=axis, keepdims=keepdims) if norm.shape == (): return unyt_quantity(norm, data.units) return unyt_array(norm, data.units)
Matrix or vector norm that preserves units This is a wrapper around np.linalg.norm that preserves units. See the documentation for that function for descriptions of the keyword arguments. Examples -------- >>> from unyt import km >>> data = [1, 2, 3]*km >>> print(unorm(data)) 3.7416573867739413 km
388,507
def prepare_adiabatic_limit(slh, k=None): if k is None: k = symbols(, positive=True) Ld = slh.L.dag() LdL = (Ld * slh.L)[0, 0] K = (-LdL / 2 + I * slh.H).expand().simplify_scalar() N = slh.S.dag() B, A, Y = K.series_expand(k, 0, 2) G, F = Ld.series_expand(k, 0, 1) return Y, A, B, F, G, N
Prepare the adiabatic elimination on an SLH object Args: slh: The SLH object to take the limit for k: The scaling parameter $k \rightarrow \infty$. The default is a positive symbol 'k' Returns: tuple: The objects ``Y, A, B, F, G, N`` necessary to compute the limiting system.
388,508
def request(self, url, method = u"get", data = None, headers = None, **kwargs): url, method, data, headers, kwargs = self._pre_request(url, method=method, data=data, headers=headers, **kwargs) response = self._request(url, method=method, data=data, headers=headers, **kwargs) response = self._post_request(response) response = self._handle_response(response) return response
public method for doing the live request
388,509
def _parse_typed_parameter_typed_value(values): type_, value = _expand_one_key_dictionary(values) _current_parameter_value.type = type_ if _is_simple_type(value): arg = Argument(value) _current_parameter_value.add_argument(arg) elif isinstance(value, list): for idx in value: arg = Argument(idx) _current_parameter_value.add_argument(arg)
Creates Arguments in a TypedParametervalue.
388,510
def msg(cls, error=None, debug=True, trace=True): if debug and error is not None: print(error) if trace: print(traceback.format_exc())
prints the error message :param error: the error message :param debug: only prints it if debug is set to true :param trace: if true prints the trace :return:
388,511
def src2ast(src: str) -> Expression: try: return ast.parse(src, mode=) except SyntaxError: raise ValueError("Not a valid expression.") from None
Return ast.Expression created from source code given in `src`.
388,512
def take_snapshot(obj, store=True, **kw): logger.debug("📷 Take new snapshot for {}".format(repr(obj))) snapshot = get_object_data(obj) metadata = get_object_metadata(obj, **kw) snapshot["__metadata__"] = metadata data = json.dumps(snapshot) if not store: return snapshot storage = get_storage(obj) storage.append(data) alsoProvides(obj, IAuditable) return snapshot
Takes a snapshot of the passed in object :param obj: Content object :returns: New snapshot
388,513
async def _set_rev_reg(self, rr_id: str, rr_size: int) -> None: LOGGER.debug(, rr_id, rr_size) assert self.rrbx dir_hopper_rr_id = join(self.rrb.dir_tails_hopper, rr_id) while Tails.linked(dir_hopper_rr_id, rr_id) is None: await asyncio.sleep(1) await self._send_rev_reg_def(rr_id) cd_id = rev_reg_id2cred_def_id(rr_id) (next_tag, rr_size_suggested) = Tails.next_tag(self.dir_tails, cd_id) rr_id = rev_reg_id(cd_id, next_tag) self.rrb.mark_in_progress(rr_id, rr_size or rr_size_suggested) LOGGER.debug()
Move precomputed revocation registry data from hopper into place within tails directory. :param rr_id: revocation registry identifier :param rr_size: revocation registry size, in case creation required
388,514
def do_state_tomography(preparation_program, nsamples, cxn, qubits=None, use_run=False): return tomography._do_tomography(preparation_program, nsamples, cxn, qubits, tomography.MAX_QUBITS_STATE_TOMO, StateTomography, state_tomography_programs, DEFAULT_STATE_TOMO_SETTINGS, use_run=use_run)
Method to perform both a QPU and QVM state tomography, and use the latter as as reference to calculate the fidelity of the former. :param Program preparation_program: Program to execute. :param int nsamples: Number of samples to take for the program. :param QVMConnection|QPUConnection cxn: Connection on which to run the program. :param list qubits: List of qubits for the program. to use in the tomography analysis. :param bool use_run: If ``True``, use append measurements on all qubits and use ``cxn.run`` instead of ``cxn.run_and_measure``. :return: The state tomogram. :rtype: StateTomography
388,515
def fix_chrome_mac_platform(platform): ver = platform.split()[1] build_range = range(*MACOSX_CHROME_BUILD_RANGE[ver]) build = choice(build_range) mac_ver = ver.replace(, ) + + str(build) return % mac_ver
Chrome on Mac OS adds minor version number and uses underscores instead of dots. E.g. platform for Firefox will be: 'Intel Mac OS X 10.11' but for Chrome it will be 'Intel Mac OS X 10_11_6'. :param platform: - string like "Macintosh; Intel Mac OS X 10.8" :return: platform with version number including minor number and formatted with underscores, e.g. "Macintosh; Intel Mac OS X 10_8_2"
388,516
def sub_template(template,template_tag,substitution): template = template.replace(template_tag,substitution) return template
make a substitution for a template_tag in a template
388,517
def BuildCampaignOperations(batch_job_helper, budget_operations, number_of_campaigns=1): budget_id = budget_operations[0][][] campaign_operations = [ { : , : { : % uuid.uuid4(), } for _ in range(number_of_campaigns)] return campaign_operations
Builds the operations needed to create a new Campaign. Note: When the Campaigns are created, they will have a different Id than those generated here as a temporary Id. This is just used to identify them in the BatchJobService. Args: batch_job_helper: a BatchJobHelper instance. budget_operations: a list containing the operation that will add the budget used by these Campaigns. number_of_campaigns: an int number defining the number of campaigns to be created. Returns: a list containing the operations to create the desired number of Campaigns.
388,518
def get_interface_detail_output_interface_if_state(self, **kwargs): config = ET.Element("config") get_interface_detail = ET.Element("get_interface_detail") config = get_interface_detail output = ET.SubElement(get_interface_detail, "output") interface = ET.SubElement(output, "interface") interface_type_key = ET.SubElement(interface, "interface-type") interface_type_key.text = kwargs.pop() interface_name_key = ET.SubElement(interface, "interface-name") interface_name_key.text = kwargs.pop() if_state = ET.SubElement(interface, "if-state") if_state.text = kwargs.pop() callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code
388,519
def expand_path(path): paths = [] path = os.path.expanduser(path) path = os.path.expandvars(path) if os.path.isdir(path): for (dir, dirs, files) in os.walk(path): for file in files: paths.append(os.path.join(dir, file)) else: paths.extend(glob(path)) return paths
Expands directories and globs in given path.
388,520
def render(self, *args, **kwargs): r = [] if self.doctype: r.append(self.doctype) r.append() r.append(super(document, self).render(*args, **kwargs)) return u.join(r)
Creates a <title> tag if not present and renders the DOCTYPE and tag tree.
388,521
def callback(self, provider): provider = self.get_provider(provider) try: return provider.authorized_handler(self.login)(provider=provider) except OAuthException as ex: logging.error("Data: %s", ex.data) raise
Handles 3rd party callback and processes it's data
388,522
def get(self, sid): return SigningKeyContext(self._version, account_sid=self._solution[], sid=sid, )
Constructs a SigningKeyContext :param sid: The sid :returns: twilio.rest.api.v2010.account.signing_key.SigningKeyContext :rtype: twilio.rest.api.v2010.account.signing_key.SigningKeyContext
388,523
def extract_concepts(self, sentences=None, ids=None, composite_phrase=4, filename=None, file_format=, allow_acronym_variants=False, word_sense_disambiguation=False, allow_large_n=False, strict_model=False, relaxed_model=False, allow_overmatches=False, allow_concept_gaps=False, term_processing=False, no_derivational_variants=False, derivational_variants=False, ignore_word_order=False, unique_acronym_variants=False, prefer_multiple_concepts=False, ignore_stop_phrases=False, compute_all_mappings=False, mm_data_version=False, exclude_sources=[], restrict_to_sources=[], restrict_to_sts=[], exclude_sts=[]): if allow_acronym_variants and unique_acronym_variants: raise ValueError("You cansldisldiIDr{0!r}|{1!r}\nutf8{0!r}\nutf8-N-QBaseUSAbaseNLM-V-y-A-C-l-o-g-z-d-D-i-a-u-Y-K-b-e,-R,-J,-k,sldiID--sldiID--sldiERROR' in stdout: metamap_process.terminate() error = stdout.rstrip() output = str(output_file.read()) finally: if sentences is not None: os.remove(input_file.name) else: input_file.close() os.remove(output_file.name) concepts = Corpus.load(output.splitlines()) return (concepts, error)
extract_concepts takes a list of sentences and ids(optional) then returns a list of Concept objects extracted via MetaMap. Supported Options: Composite Phrase -Q Word Sense Disambiguation -y use strict model -A use relaxed model -C allow large N -l allow overmatches -o allow concept gaps -g term processing -z No Derivational Variants -d All Derivational Variants -D Ignore Word Order -i Allow Acronym Variants -a Unique Acronym Variants -u Prefer Multiple Concepts -Y Ignore Stop Phrases -K Compute All Mappings -b MM Data Version -V Exclude Sources -e Restrict to Sources -R Restrict to Semantic Types -J Exclude Semantic Types -k For information about the available options visit http://metamap.nlm.nih.gov/. Note: If an error is encountered the process will be closed and whatever was processed, if anything, will be returned along with the error found.
388,524
def update_data(self): time_elapsed = datetime.datetime.now() - self.start_time crntTemp = self.roaster.current_temp targetTemp = self.roaster.target_temp heaterLevel = self.roaster.heater_level self.file.write( "%4.6f,%d,%d,%d\n" % (time_elapsed.total_seconds(), crntTemp, targetTemp, heaterLevel))
This is a method that will be called every time a packet is opened from the roaster.
388,525
def _as_published_topic(self): oop = self.get_operator_output_port() if not hasattr(oop, ): return export = oop.export if export[] != : return seen_export_type = False topic = None for p in export[]: if p[] != : continue if p[] == : if p[] == []: seen_export_type = True else: return if p[] == : topic = p[][0] if seen_export_type and topic is not None: schema = None if hasattr(oop, ): ta_url = oop.tupleAttributes ta_resp = self.rest_client.make_request(ta_url) schema = streamsx.topology.schema.StreamSchema(ta_resp[]) return PublishedTopic(topic[1:-1], schema) return
This stream as a PublishedTopic if it is published otherwise None
388,526
def _vmomentsurfaceIntegrand(vz,vR,vT,R,z,df,sigmaR1,gamma,sigmaz1,n,m,o): return vR**n*vT**m*vz**o*df(R,vR*sigmaR1,vT*sigmaR1*gamma,z,vz*sigmaz1, use_physical=False)
Internal function that is the integrand for the vmomentsurface mass integration
388,527
def main(): r = Random(42) startSerializationTime = time.time() for i in xrange(_SERIALIZATION_LOOPS): builderProto = RandomProto.new_message() r.write(builderProto) elapsedSerializationTime = time.time() - startSerializationTime builderBytes = builderProto.to_bytes() startDeserializationTime = time.time() deserializationCount = 0 while deserializationCount < _DESERIALIZATION_LOOPS: readerProto = RandomProto.from_bytes( builderBytes, traversal_limit_in_words=_TRAVERSAL_LIMIT_IN_WORDS, nesting_limit=_NESTING_LIMIT) numReads = min(_DESERIALIZATION_LOOPS - deserializationCount, _MAX_DESERIALIZATION_LOOPS_PER_READER) for _ in xrange(numReads): r.read(readerProto) deserializationCount += numReads elapsedDeserializationTime = time.time() - startDeserializationTime print _SERIALIZATION_LOOPS, "Serialization loops in", \ elapsedSerializationTime, "seconds." print "\t", elapsedSerializationTime/_SERIALIZATION_LOOPS, "seconds per loop." print deserializationCount, "Deserialization loops in", \ elapsedDeserializationTime, "seconds." print "\t", elapsedDeserializationTime/deserializationCount, "seconds per loop."
Measure capnp serialization performance of Random
388,528
def delete(queue, items): with _conn(commit=True) as cur: if isinstance(items, dict): cmd = str().format( queue, salt.utils.json.dumps(items)) log.debug(, cmd) cur.execute(cmd) return True if isinstance(items, list): items = [(salt.utils.json.dumps(el),) for el in items] cmd = .format(queue) log.debug(, cmd) cur.executemany(cmd, items) return True
Delete an item or items from a queue
388,529
def load_config(): configfile = get_configfile() if not os.path.exists(configfile): data = { : } save_config(data) with open(configfile, ) as f: return json.load(f)
Load configuration file containing API KEY and other settings. :rtype: str
388,530
def insertFromMimeData(self, source): lines = projex.text.nativestring(source.text()).splitlines() for i in range(1, len(lines)): if not lines[i].startswith(): lines[i] = + lines[i] if len(lines) > 1: lines.append() self.insertPlainText(.join(lines))
Inserts the information from the inputed source. :param source | <QMimeData>
388,531
def _get_mapping_for_table(self, table): for mapping in self.mappings.values(): if mapping["table"] == table: return mapping
Returns the first mapping for a table name
388,532
def __focus(self, item): cols = self.__get_display_columns() for col in cols: self.__event_info =(col,item) self.event_generate() if col in self._inplace_widgets: w = self._inplace_widgets[col] w.bind(, lambda e: w.tk_focusNext().focus_set()) w.bind(, lambda e: w.tk_focusPrev().focus_set())
Called when focus item has changed
388,533
def print_(*objects, **kwargs): sep = kwargs.get("sep") sep = sep if sep is not None else " " end = kwargs.get("end") end = end if end is not None else "\n" file = kwargs.get("file") file = file if file is not None else sys.stdout flush = bool(kwargs.get("flush", False)) if is_win: _print_windows(objects, sep, end, file, flush) else: _print_unix(objects, sep, end, file, flush)
print_(*objects, sep=None, end=None, file=None, flush=False) Args: objects (object): zero or more objects to print sep (str): Object separator to use, defaults to ``" "`` end (str): Trailing string to use, defaults to ``"\\n"``. If end is ``"\\n"`` then `os.linesep` is used. file (object): A file-like object, defaults to `sys.stdout` flush (bool): If the file stream should be flushed Raises: EnvironmentError Like print(), but: * Supports printing filenames under Unix + Python 3 and Windows + Python 2 * Emulates ANSI escape sequence support under Windows * Never fails due to encoding/decoding errors. Tries hard to get everything on screen as is, but will fall back to "?" if all fails. This does not conflict with ``colorama``, but will not use it on Windows.
388,534
def _merge_outfile_fname(out_file, bam_files, work_dir, batch): if out_file is None: out_file = os.path.join(work_dir, os.path.basename(sorted(bam_files)[0])) if batch is not None: base, ext = os.path.splitext(out_file) out_file = "%s-b%s%s" % (base, batch, ext) return out_file
Derive correct name of BAM file based on batching.
388,535
def extractValue(self, model, item): certPath = super(CertificateColumn, self).extractValue(model, item) return certPath.path.decode(, )
Get the path referenced by this column's attribute. @param model: Either a TabularDataModel or a ScrollableView, depending on what this column is part of. @param item: A port item instance (as defined by L{xmantissa.port}). @rtype: C{unicode}
388,536
def Refresh(): try: GdbCache.DICT = gdb.lookup_type().pointer() GdbCache.TYPE = gdb.lookup_type().pointer() except gdb.error as err: pass interp_head_name = GdbCache.FuzzySymbolLookup() if interp_head_name: GdbCache.INTERP_HEAD = gdb.parse_and_eval(interp_head_name) else: GdbCache.INTERP_HEAD = gdb.parse_and_eval() GdbCache.PENDINGBUSY = GdbCache.FuzzySymbolLookup() GdbCache.PENDINGCALLS_TO_DO = GdbCache.FuzzySymbolLookup()
looks up symbols within the inferior and caches their names / values. If debugging information is only partial, this method does its best to find as much information as it can, validation can be done using IsSymbolFileSane.
388,537
def __mark(self, element, mark_set): try: original = self.__elements.pop(element) mark_set.add(original) except KeyError: return False else: if not self.__elements: self.__call() return True
Marks an element :param element: The element to mark :param mark_set: The set corresponding to the mark :return: True if the element was known
388,538
def data_transforms_mnist(args, mnist_mean=None, mnist_std=None): if mnist_mean is None: mnist_mean = [0.5] if mnist_std is None: mnist_std = [0.5] train_transform = transforms.Compose( [ transforms.RandomCrop(28, padding=4), transforms.RandomHorizontalFlip(), transforms.ToTensor(), transforms.Normalize(mnist_mean, mnist_std), ] ) if args.cutout: train_transform.transforms.append(Cutout(args.cutout_length)) valid_transform = transforms.Compose( [transforms.ToTensor(), transforms.Normalize(mnist_mean, mnist_std)] ) return train_transform, valid_transform
data_transforms for mnist dataset
388,539
def sign_statement(self, statement, node_name, key_file, node_id, id_attr): if isinstance(statement, SamlBase): statement = str(statement) _, fil = make_temp( statement, suffix=, decode=False, delete=self._xmlsec_delete_tmpfiles, ) com_list = [ self.xmlsec, , , key_file, .format(id_attr_name=id_attr), node_name, ] if node_id: com_list.extend([, node_id]) try: (stdout, stderr, output) = self._run_xmlsec(com_list, [fil]) except XmlsecError as e: raise SignatureError(com_list) if output: return output.decode("utf-8") if stdout: return stdout.decode("utf-8") raise SignatureError(stderr)
Sign an XML statement. :param statement: The statement to be signed :param node_name: string like 'urn:oasis:names:...:Assertion' :param key_file: The file where the key can be found :param node_id: :param id_attr: The attribute name for the identifier, normally one of 'id','Id' or 'ID' :return: The signed statement
388,540
def get_core_name_without_suffix(file_path): if in file_path: file_path = file_path.replace(, ) file_name = os.path.basename(file_path) core_names = file_name.split() if len(core_names) > 1: core_names = core_names[:-1] if isinstance(core_names, list): return str(.join(core_names)) else: return str(core_names)
Return core file name without suffix. Examples: >>> FileClass.get_core_name_without_suffix(r'/home/zhulj/1990.01.30/test.01.tif') 'test.01' >>> FileClass.get_core_name_without_suffix(r'C:\zhulj\igsnrr\lreis.txt') 'lreis' >>> FileClass.get_core_name_without_suffix(r'C:\\zhulj\\igsnrr\\lreis.txt') 'lreis' >>> FileClass.get_core_name_without_suffix(r'C:/zhulj/igsnrr/lreis.txt') 'lreis' >>> FileClass.get_core_name_without_suffix(r'/home/zhulj/dta/taudem/aread8') 'aread8' >>> FileClass.get_core_name_without_suffix('singlename') 'singlename' >>> FileClass.get_core_name_without_suffix('singlename.txt') 'singlename'
388,541
def concatenate(ctx, *text): result = for arg in text: result += conversions.to_string(arg, ctx) return result
Joins text strings into one text string
388,542
def encode_string(data, encoding=): hex if six.PY2: return data.encode(encoding) else: if isinstance(data, str): data = bytes(data, ) return codecs.encode(data, encoding).decode()
Encode string :param data: string to encode :param encoding: encoding to use (default: 'hex') :return: encoded string
388,543
def find(self, nameFilter=None, typeFilter=None, bindingModeFilter=None, boundFilter=None): queryParms = {} if nameFilter: queryParms["name"] = nameFilter if typeFilter: queryParms["type"] = typeFilter if bindingModeFilter: queryParms["bindingMode"] = bindingModeFilter if boundFilter: queryParms["bound"] = boundFilter return IterableServiceBindingsList(self._apiClient, filters=queryParms)
Gets the list of services that the Watson IoT Platform can connect to. The list can include a mixture of services that are either bound or unbound. Parameters: - nameFilter(string) - Filter the results by the specified name - typeFilter(string) - Filter the results by the specified type, Available values : cloudant, eventstreams - bindingModeFilter(string) - Filter the results by the specified binding mode, Available values : automatic, manual - boundFilter(boolean) - Filter the results by the bound flag Throws APIException on failure.
388,544
def create(cls, cash_register_id, tab_uuid, description, monetary_account_id=None, ean_code=None, avatar_attachment_uuid=None, tab_attachment=None, quantity=None, amount=None, custom_headers=None): if custom_headers is None: custom_headers = {} request_map = { cls.FIELD_DESCRIPTION: description, cls.FIELD_EAN_CODE: ean_code, cls.FIELD_AVATAR_ATTACHMENT_UUID: avatar_attachment_uuid, cls.FIELD_TAB_ATTACHMENT: tab_attachment, cls.FIELD_QUANTITY: quantity, cls.FIELD_AMOUNT: amount } request_map_string = converter.class_to_json(request_map) request_map_string = cls._remove_field_for_request(request_map_string) api_client = client.ApiClient(cls._get_api_context()) request_bytes = request_map_string.encode() endpoint_url = cls._ENDPOINT_URL_CREATE.format(cls._determine_user_id(), cls._determine_monetary_account_id( monetary_account_id), cash_register_id, tab_uuid) response_raw = api_client.post(endpoint_url, request_bytes, custom_headers) return BunqResponseInt.cast_from_bunq_response( cls._process_for_id(response_raw) )
Create a new TabItem for a given Tab. :type user_id: int :type monetary_account_id: int :type cash_register_id: int :type tab_uuid: str :param description: The TabItem's brief description. Can't be empty and must be no longer than 100 characters :type description: str :param ean_code: The TabItem's EAN code. :type ean_code: str :param avatar_attachment_uuid: An AttachmentPublic UUID that used as an avatar for the TabItem. :type avatar_attachment_uuid: str :param tab_attachment: A list of AttachmentTab attached to the TabItem. :type tab_attachment: list[int] :param quantity: The quantity of the TabItem. Formatted as a number containing up to 15 digits, up to 15 decimals and using a dot. :type quantity: str :param amount: The money amount of the TabItem. Will not change the value of the corresponding Tab. :type amount: object_.Amount :type custom_headers: dict[str, str]|None :rtype: BunqResponseInt
388,545
def hazeDriver(): try: (command, args) = findSubCommand(sys.argv) if os.path.basename(command) == "haze": print "Could not find a subcommand for %s" % " ".join(sys.argv) sys.exit(1) except StandardError: print "Could not find a subcommand for %s" % " ".join(sys.argv) sys.exit(1) check_call([command] + args)
Process the command line arguments and run the appropriate haze subcommand. We want to be able to do git-style handoffs to subcommands where if we do `haze aws foo bar` and the executable haze-aws-foo exists, we'll call it with the argument bar. We deliberately don't do anything with the arguments other than hand them off to the haze subcommand. Subcommands are responsible for their own argument parsing.
388,546
def bump_version(version, bump=): try: parts = map(int, version.split()) except ValueError: fail() if bump == : parts[2] += 1 elif bump == : parts[1] += 1 parts[2] = 0 elif bump == : parts[0] +=1 parts[1] = 0 parts[2] = 0 return .join(map(str, parts))
patch: patch, minor, major
388,547
def get_glibc_version(): try: out = subprocess.Popen([, ], stdout=subprocess.PIPE).communicate()[0] except OSError: return match = re.search(, out) try: return map(int, match.groups()) except AttributeError: return
Returns: Version as a pair of ints (major, minor) or None
388,548
def refresh(self, module=None): module = module._mdl if module is not None else ffi.NULL lib.EnvRefreshAgenda(self._env, module)
Recompute the salience values of the Activations on the Agenda and then reorder the agenda. The Python equivalent of the CLIPS refresh-agenda command. If no Module is specified, the current one is used.
388,549
def commit_manually(using=None): def entering(using): enter_transaction_management(using=using) def exiting(exc_value, using): leave_transaction_management(using=using) return _transaction_func(entering, exiting, using)
Decorator that activates manual transaction control. It just disables automatic transaction control and doesn't do any commit/rollback of its own -- it's up to the user to call the commit and rollback functions themselves.
388,550
def UpdateStatus(self, is_complete): status = if is_complete else transferred = os.path.getsize(self.props[TRANSFER_IFACE][]) self.props[TRANSFER_IFACE][] = status self.props[TRANSFER_IFACE][] = dbus.UInt64(transferred, variant_level=1) self.EmitSignal(dbus.PROPERTIES_IFACE, , , [ TRANSFER_IFACE, { : dbus.String(status, variant_level=1), : dbus.UInt64(transferred, variant_level=1), }, [], ])
Mock method to update the transfer status. If is_complete is False, this marks the transfer is active; otherwise it marks the transfer as complete. It is an error to call this method after calling it with is_complete as True. In both cases, it updates the number of bytes transferred to be the current size of the transfer file (whose filename was emitted in the TransferCreated signal).
388,551
def alter_object(self, obj): for attname, field, replacer in self.replacers: currentval = getattr(obj, attname) replacement = replacer(self, obj, field, currentval) setattr(obj, attname, replacement)
Alters all the attributes in an individual object. If it returns False, the object will not be saved
388,552
def check(self): if not self.is_valid: raise PolyaxonDeploymentConfigError( .format(self.deployment_type)) check = False if self.is_kubernetes: check = self.check_for_kubernetes() elif self.is_docker_compose: check = self.check_for_docker_compose() elif self.is_docker: check = self.check_for_docker() elif self.is_heroku: check = self.check_for_heroku() if not check: raise PolyaxonDeploymentConfigError( .format(self.deployment_type))
Add platform specific checks
388,553
def invalidate(self, name): with self.__instances_lock: try: stored_instance = self.__instances[name] except KeyError: raise ValueError( "Unknown component instance ".format(name) ) else: stored_instance.invalidate(True)
Invalidates the given component :param name: Name of the component to invalidate :raise ValueError: Invalid component name
388,554
def gather(self, cmd): if not isinstance(cmd, list): cmd_list = shlex.split(cmd) else: cmd_list = cmd cwd = pushd.Dir.getcwd() cmd_info = .format(cwd, cmd_list) self.logger.debug("Executing:gather {}".format(cmd_info)) proc = subprocess.Popen( cmd_list, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate() rc = proc.returncode self.logger.debug( "Process {}: exited with: {}\nstdout>>{}<<\nstderr>>{}<<\n". format(cmd_info, rc, out, err)) return rc, out, err
Runs a command and returns rc,stdout,stderr as a tuple. If called while the `Dir` context manager is in effect, guarantees that the process is executed in that directory, even if it is no longer the current directory of the process (i.e. it is thread-safe). :param cmd: The command and arguments to execute :return: (rc,stdout,stderr)
388,555
def identifyApplication(self, unProcessId, pchAppKey): fn = self.function_table.identifyApplication result = fn(unProcessId, pchAppKey) return result
Identifies a running application. OpenVR can't always tell which process started in response to a URL. This function allows a URL handler (or the process itself) to identify the app key for the now running application. Passing a process ID of 0 identifies the calling process. The application must be one that's known to the system via a call to AddApplicationManifest.
388,556
def _create_filter(self): self._product_filter = {} for chip in itertools.chain(iter(self._family.targets(self._tile.short_name)), iter([self._family.platform_independent_target()])): for key, prods in chip.property(, {}).items(): name, _, _ = key.partition() for prod in prods: if prod not in self._product_filter: self._product_filter[prod] = set() self._product_filter[prod].add(name)
Create a filter of all of the dependency products that we have selected.
388,557
def Serialize(self, writer): writer.WriteVarBytes(self.Script) writer.WriteVarBytes(self.ParameterList) writer.WriteByte(self.ReturnType)
Serialize full object. Args: writer (neo.IO.BinaryWriter):
388,558
def _generate_typevars(self): self.emit("T = TypeVar(, bound=bb.AnnotationType)") self.emit("U = TypeVar()") self.import_tracker._register_typing_import() self.emit()
Creates type variables that are used by the type signatures for _process_custom_annotations.
388,559
def run_samtools(align_bams, items, ref_file, assoc_files, region=None, out_file=None): return shared_variantcall(_call_variants_samtools, "samtools", align_bams, ref_file, items, assoc_files, region, out_file)
Detect SNPs and indels with samtools mpileup and bcftools.
388,560
def project_decrease_permissions(object_id, input_params={}, always_retry=True, **kwargs): return DXHTTPRequest( % object_id, input_params, always_retry=always_retry, **kwargs)
Invokes the /project-xxxx/decreasePermissions API method. For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Project-Permissions-and-Sharing#API-method%3A-%2Fproject-xxxx%2FdecreasePermissions
388,561
def _add_reference(self, obj, ident=0): log_debug( " len(self.references) + self.BASE_REFERENCE_IDX, type(obj).__name__, repr(obj), ), ident, ) self.references.append(obj)
Adds a read reference to the marshaler storage :param obj: Reference to add :param ident: Log indentation level
388,562
def _process_message(self, message: amqp.Message) -> None: if self.shutdown_pending.is_set(): return try: if isinstance(message.body, bytes): message.body = message.body.decode() description = json.loads(message.body) except Exception: logger.error("Cannot decode message. Dropping. Message: %r", message.body) traceback.print_exc() message.channel.basic_reject(message.delivery_tag, requeue=False) else: logger.info("Processing task: %r", description) self._process_description(message, description)
Processes the message received from the queue.
388,563
def change_wavelength(self, wavelength): for name, slab in self.slabs.items(): const_args = slab._const_args mat_args = slab._mat_params const_args[8] = wavelength s = Slab(*const_args) for mat_arg in mat_args: s.add_material(*mat_arg) self.slabs[name] = s self._wl = wavelength
Changes the wavelength of the structure. This will affect the mode solver and potentially the refractive indices used (provided functions were provided as refractive indices). Args: wavelength (float): The new wavelength.
388,564
def set_perms(path, grant_perms=None, deny_perms=None, inheritance=True, reset=False): userpermsbasic_permissionuserpermsfull_controlapplies_tothis_folderuserpermsread_attributesread_eaapplies_tothis_folderUsersread & execute*Userspermsread_executeUserspermsread_executeapplies_tothis_folder_only*jsnuffypermsread_attributesread_eaapplies_tothis_folder_only return __utils__[](obj_name=path, obj_type=, grant_perms=grant_perms, deny_perms=deny_perms, inheritance=inheritance, reset=reset)
Set permissions for the given path Args: path (str): The full path to the directory. grant_perms (dict): A dictionary containing the user/group and the basic permissions to grant, ie: ``{'user': {'perms': 'basic_permission'}}``. You can also set the ``applies_to`` setting here. The default for ``applise_to`` is ``this_folder_subfolders_files``. Specify another ``applies_to`` setting like this: .. code-block:: yaml {'user': {'perms': 'full_control', 'applies_to': 'this_folder'}} To set advanced permissions use a list for the ``perms`` parameter, ie: .. code-block:: yaml {'user': {'perms': ['read_attributes', 'read_ea'], 'applies_to': 'this_folder'}} To see a list of available attributes and applies to settings see the documentation for salt.utils.win_dacl. A value of ``None`` will make no changes to the ``grant`` portion of the DACL. Default is ``None``. deny_perms (dict): A dictionary containing the user/group and permissions to deny along with the ``applies_to`` setting. Use the same format used for the ``grant_perms`` parameter. Remember, deny permissions supersede grant permissions. A value of ``None`` will make no changes to the ``deny`` portion of the DACL. Default is ``None``. inheritance (bool): If ``True`` the object will inherit permissions from the parent, if ``False``, inheritance will be disabled. Inheritance setting will not apply to parent directories if they must be created. Default is ``False``. reset (bool): If ``True`` the existing DCL will be cleared and replaced with the settings defined in this function. If ``False``, new entries will be appended to the existing DACL. Default is ``False``. .. versionadded:: 2018.3.0 Returns: bool: True if successful Raises: CommandExecutionError: If unsuccessful CLI Example: .. code-block:: bash # To grant the 'Users' group 'read & execute' permissions. salt '*' file.set_perms C:\\Temp\\ "{'Users': {'perms': 'read_execute'}}" # Locally using salt call salt-call file.set_perms C:\\Temp\\ "{'Users': {'perms': 'read_execute', 'applies_to': 'this_folder_only'}}" # Specify advanced attributes with a list salt '*' file.set_perms C:\\Temp\\ "{'jsnuffy': {'perms': ['read_attributes', 'read_ea'], 'applies_to': 'this_folder_only'}}"
388,565
def group_by_month(self): data_by_month = OrderedDict() for d in xrange(1, 13): data_by_month[d] = [] for v, dt in zip(self._values, self.datetimes): data_by_month[dt.month].append(v) return data_by_month
Return a dictionary of this collection's values grouped by each month. Key values are between 1-12.
388,566
def require_iterable_of(objs, types, name=None, type_name=None, truncate_at=80): try: if all(isinstance(obj, types) for obj in objs): return except TypeError: require_instance(objs, list, name, "iterable", truncate_at)
Raise an exception if objs is not an iterable with each element an instance of one of the specified types. See `require_instance` for descriptions of the other parameters.
388,567
def delete_attachment(self, attachment): parent = None if attachment.getLinkedRequests(): parent = attachment.getRequest() elif attachment.getLinkedAnalyses(): parent = attachment.getAnalysis() if parent is None: logger.warn( "Attachment {} is nowhere assigned. This should never happen!" .format(repr(attachment))) return False attachments = parent.getAttachment() if attachment in attachments: attachments.remove(attachment) parent.setAttachment(attachments) retain = False if attachment.getLinkedAnalyses(): holder = attachment.getAnalysis() logger.info("Attachment {} referenced by {} -> RETAIN" .format(repr(attachment), repr(holder))) retain = True if attachment.getLinkedRequests(): holder = attachment.getRequest() logger.info("Attachment {} referenced by {} -> RETAIN" .format(repr(attachment), repr(holder))) retain = True if retain is False: client = api.get_parent(attachment) client.manage_delObjects([attachment.getId(), ])
Delete attachment from the AR or Analysis The attachment will be only deleted if it is not further referenced by another AR/Analysis.
388,568
def change_port_speed(self, instance_id, public, speed): if public: return self.client.call(, , speed, id=instance_id) else: return self.client.call(, , speed, id=instance_id)
Allows you to change the port speed of a virtual server's NICs. Example:: #change the Public interface to 10Mbps on instance 12345 result = mgr.change_port_speed(instance_id=12345, public=True, speed=10) # result will be True or an Exception :param int instance_id: The ID of the VS :param bool public: Flag to indicate which interface to change. True (default) means the public interface. False indicates the private interface. :param int speed: The port speed to set. .. warning:: A port speed of 0 will disable the interface.
388,569
def _add_fold_decoration(self, block, region): deco = TextDecoration(block) deco.signals.clicked.connect(self._on_fold_deco_clicked) deco.tooltip = region.text(max_lines=25) deco.draw_order = 1 deco.block = block deco.select_line() deco.set_outline(drift_color( self._get_scope_highlight_color(), 110)) deco.set_background(self._get_scope_highlight_color()) deco.set_foreground(QtGui.QColor()) self._block_decos.append(deco) self.editor.decorations.append(deco)
Add fold decorations (boxes arround a folded block in the editor widget).
388,570
def zoom_to(self, zoomlevel, no_reset=False): scale_x, scale_y = self.zoom.calc_scale(zoomlevel) self._scale_to(scale_x, scale_y, no_reset=no_reset)
Set zoom level in a channel. This only changes the relevant settings; The image is not modified. Also see :meth:`scale_to`. .. note:: In addition to the given zoom level, other zoom settings are defined for the channel in preferences. Parameters ---------- zoomlevel : int The zoom level to zoom the image. Negative value to zoom out; positive to zoom in. no_reset : bool Do not reset ``autozoom`` setting.
388,571
def import_device(self, directory): with open(os.path.join(directory, "device.json"), "r") as f: ddata = json.load(f) d = self[ddata["name"]] dname = ddata["name"] del ddata["name"] if dname == "meta": return elif dname == "user": d.set(ddata) elif d.exists(): raise ValueError("The device " + d.name + " already exists") else: d.create(**ddata) for name in os.listdir(directory): sdir = os.path.join(directory, name) if os.path.isdir(sdir): d.import_stream(sdir)
Imports a device from the given directory. You export the device by using device.export() There are two special cases: user and meta devices. If the device name is meta, import_device will not do anything. If the device name is "user", import_device will overwrite the user device even if it exists already.
388,572
def deployed_devices(self): if self._deployed_devices is None: self._deployed_devices = DeployedDevices(self) return self._deployed_devices
:returns: Version deployed_devices of preview :rtype: twilio.rest.preview.deployed_devices.DeployedDevices
388,573
def get_distribution(self, name): result = None name = name.lower() if not self._cache_enabled: for dist in self._yield_distributions(): if dist.key == name: result = dist break else: self._generate_cache() if name in self._cache.name: result = self._cache.name[name][0] elif self._include_egg and name in self._cache_egg.name: result = self._cache_egg.name[name][0] return result
Looks for a named distribution on the path. This function only returns the first result found, as no more than one value is expected. If nothing is found, ``None`` is returned. :rtype: :class:`InstalledDistribution`, :class:`EggInfoDistribution` or ``None``
388,574
def get_root_modules(): ip = get_ipython() if in ip.db: return ip.db[] t = time() store = False modules = list(sys.builtin_module_names) for path in sys.path: modules += module_list(path) if time() - t >= TIMEOUT_STORAGE and not store: store = True print("\nCaching the list of root modules, please wait!") print("(This will only be done once - type to " "reset cache!)\n") sys.stdout.flush() if time() - t > TIMEOUT_GIVEUP: print("This is taking too long, we give up.\n") ip.db[] = [] return [] modules = set(modules) if in modules: modules.remove() modules = list(modules) if store: ip.db[] = modules return modules
Returns a list containing the names of all the modules available in the folders of the pythonpath.
388,575
def match_criterion(self, tag): return tag.name == self.reference_tag_name and \ tag.attrs.get(, ) == self.reference_tag_kind
Override. Determine if a tag has the desired name and kind attribute value. Args: tag: A BeautifulSoup Tag. Returns: True if tag has the desired name and kind, otherwise False.
388,576
def dbRestore(self, db_value, context=None): if db_value is not None: return yaml.load(projex.text.nativestring(db_value)) else: return db_value
Converts a stored database value to Python. :param py_value: <variant> :param context: <orb.Context> :return: <variant>
388,577
def _append_value(self, v_values, next_value, v_idx=None, n_vals=1): for _ in range(n_vals): if v_idx: try: v_i = next(v_idx) except StopIteration: if next_value is not None: warnings.warn( .format(next_value)) break v_s = [self.default_start_index if idx is None else idx for idx in v_idx.first] if not self.row_major: v_i = v_i[::-1] v_s = v_s[::-1] if not self.sparse_arrays: pad_array(v_values, list(zip(v_i, v_s))) v_subval = v_values for (i_v, i_s) in zip(v_i[:-1], v_s[:-1]): try: v_subval = v_subval[i_v - i_s] except IndexError: size = len(v_subval) v_subval.extend([] for _ in range(size, i_v - i_s + 1)) v_subval = v_subval[i_v - i_s] i_v, i_s = v_i[-1], v_s[-1] try: v_subval[i_v - i_s] = next_value except IndexError: size = len(v_subval) v_subval.extend(None for _ in range(size, i_v - i_s + 1)) v_subval[i_v - i_s] = next_value else: v_values.append(next_value)
Update a list of parsed values with a new value.
388,578
def _get_queue(self): if self._queue is None: self._links = [] queue, depth = self._resolve_queue(self.queue, links=self._links) if queue is None and depth > 0: raise QueueLinkBroken self._queue = queue return self._queue
Gets the actual location of the queue, or None.
388,579
def _expectation(p, rbf_kern, feat1, lin_kern, feat2, nghp=None): if rbf_kern.on_separate_dims(lin_kern) and isinstance(p, DiagonalGaussian): eKxz1 = expectation(p, (rbf_kern, feat1)) eKxz2 = expectation(p, (lin_kern, feat2)) return eKxz1[:, :, None] * eKxz2[:, None, :] if feat1 != feat2: raise NotImplementedError("Features have to be the same for both kernels.") if rbf_kern.active_dims != lin_kern.active_dims: raise NotImplementedError("active_dims have to be the same for both kernels.") with params_as_tensors_for(rbf_kern, lin_kern, feat1, feat2): Xcov = rbf_kern._slice_cov(tf.matrix_diag(p.cov) if isinstance(p, DiagonalGaussian) else p.cov) Z, Xmu = rbf_kern._slice(feat1.Z, p.mu) N = tf.shape(Xmu)[0] D = tf.shape(Xmu)[1] lin_kern_variances = lin_kern.variance if lin_kern.ARD \ else tf.zeros((D,), dtype=settings.float_type) + lin_kern.variance rbf_kern_lengthscales = rbf_kern.lengthscales if rbf_kern.ARD \ else tf.zeros((D,), dtype=settings.float_type) + rbf_kern.lengthscales chol_L_plus_Xcov = tf.cholesky(tf.matrix_diag(rbf_kern_lengthscales ** 2) + Xcov) Z_transpose = tf.transpose(Z) all_diffs = Z_transpose - tf.expand_dims(Xmu, 2) exponent_mahalanobis = tf.matrix_triangular_solve(chol_L_plus_Xcov, all_diffs, lower=True) exponent_mahalanobis = tf.reduce_sum(tf.square(exponent_mahalanobis), 1) exponent_mahalanobis = tf.exp(-0.5 * exponent_mahalanobis) sqrt_det_L = tf.reduce_prod(rbf_kern_lengthscales) sqrt_det_L_plus_Xcov = tf.exp(tf.reduce_sum(tf.log(tf.matrix_diag_part(chol_L_plus_Xcov)), axis=1)) determinants = sqrt_det_L / sqrt_det_L_plus_Xcov eKxz_rbf = rbf_kern.variance * (determinants[:, None] * exponent_mahalanobis) tiled_Z = tf.tile(tf.expand_dims(Z_transpose, 0), (N, 1, 1)) z_L_inv_Xcov = tf.matmul(tiled_Z, Xcov / rbf_kern_lengthscales[:, None] ** 2., transpose_a=True) cross_eKzxKxz = tf.cholesky_solve( chol_L_plus_Xcov, (lin_kern_variances * rbf_kern_lengthscales ** 2.)[..., None] * tiled_Z) cross_eKzxKxz = tf.matmul((z_L_inv_Xcov + Xmu[:, None, :]) * eKxz_rbf[..., None], cross_eKzxKxz) return cross_eKzxKxz
Compute the expectation: expectation[n] = <Ka_{Z1, x_n} Kb_{x_n, Z2}>_p(x_n) - K_lin_{.,.} :: RBF kernel - K_rbf_{.,.} :: Linear kernel Different Z1 and Z2 are handled if p is diagonal and K_lin and K_rbf have disjoint active_dims, in which case the joint expectations simplify into a product of expectations :return: NxM1xM2
388,580
def getNameFromPath(filePath): if len(filePath) == 0: raise ValueError("Cannot have empty path for name") fileName = os.path.split(os.path.normpath(filePath))[1] ret = fileName.split(".")[0] assert ret != "" return ret
Returns the filename of the specified path without its extensions. This is usually how we derive the default name for a given object.
388,581
def _execute(self, *args): command = ( [ , % self.config_filename, ] + self.get_configuration_override_args() + [six.text_type(arg) for arg in args] ) for i in range(len(command)): if isinstance(command[i], six.text_type): command[i] = ( taskw.utils.clean_ctrl_chars(command[i].encode())) try: proc = subprocess.Popen( command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) stdout, stderr = proc.communicate() except OSError as e: if e.errno == errno.ENOENT: raise OSError("Unable to find the command-line tool.") raise if proc.returncode != 0: raise TaskwarriorError(command, stderr, stdout, proc.returncode) try: stdout = stdout.decode(self.config.get(, )) except UnicodeDecodeError as e: stdout = kitchen.text.converters.to_unicode(stdout) try: stderr = stderr.decode(self.config.get(, )) except UnicodeDecodeError as e: stderr = kitchen.text.converters.to_unicode(stderr) for c in (, , , ): stdout = stdout.replace(c, ) stderr = stderr.replace(c, ) return stdout, stderr
Execute a given taskwarrior command with arguments Returns a 2-tuple of stdout and stderr (respectively).
388,582
def set_ylabel(self, s, delay_draw=False): "set plot ylabel" self.conf.relabel(ylabel=s, delay_draw=delay_draw)
set plot ylabel
388,583
def autoconfig_url_from_preferences(): if not ON_DARWIN: raise NotDarwinError() try: config = SystemConfiguration.SCDynamicStoreCopyProxies(None) except AttributeError: return if all(( in config, in config, not config.get(, 0))): return str(config[])
Get the PAC ``AutoConfigURL`` value from the macOS System Preferences. This setting is visible as the "URL" field in System Preferences > Network > Advanced... > Proxies > Automatic Proxy Configuration. :return: The value from the registry, or None if the value isn't configured or available. Note that it may be local filesystem path instead of a URL. :rtype: str|None :raises NotDarwinError: If called on a non-macOS/OSX platform.
388,584
def getPositionFromState(pState): result = dict() for (varName, value) in pState[].iteritems(): result[varName] = value[] return result
Return the position of a particle given its state dict. Parameters: -------------------------------------------------------------- retval: dict() of particle position, keys are the variable names, values are their positions
388,585
def _normalised_numpy(self): dx = (self.screen.width / float(len(self.points))) oy = (self.screen.height) points = np.array(self.points) - self.minimum points = points * 4.0 / self.extents * self.size.y for x, y in enumerate(points): yield Point(( dx * x, min(oy, oy - y), ))
Normalised data points using numpy.
388,586
def instantiateSong(fileName): ext = detectFormat(fileName) if(ext == "mp3"): return pd.AudioSegment.from_mp3(fileName) elif(ext == "wav"): return pd.AudioSegment.from_wav(fileName) elif(ext == "ogg"): return pd.AudioSegment.from_ogg(fileName) elif(ext == "flv"): return pd.AudioSegment.from_flv(fileName) elif(ext == "m4a"): return pd.AudioSegment.from_file(fileName, "mp4") else: return pd.AudioSegment.from_file(fileName, ext)
Create an AudioSegment with the data from the given file
388,587
def CheckFile(self, path): print(.format(path)) definitions_registry = registry.DataTypeDefinitionsRegistry() definitions_reader = reader.YAMLDataTypeDefinitionsFileReader() result = False try: definitions_reader.ReadFile(definitions_registry, path) result = True except KeyError as exception: logging.warning(( ).format(path, exception)) except errors.FormatError as exception: logging.warning( .format( path, exception)) return result
Validates the definition in a file. Args: path (str): path of the definition file. Returns: bool: True if the file contains valid definitions.
388,588
def county_state_alerts(self, county, state): samecode = self.geo.lookup_samecode(county, state) return self.samecode_alerts(samecode)
Given a county and state, return alerts
388,589
def return_letters_from_string(text): out = "" for letter in text: if letter.isalpha(): out += letter return out
Get letters from string only.
388,590
def cubic_lattice( a, b, c, spacing ): grid = np.array( list( range( 1, a * b * c + 1 ) ) ).reshape( a, b, c, order= ) it = np.nditer( grid, flags=[ ] ) sites = [] while not it.finished: x, y, z = it.multi_index r = np.array( [ x, y, z ] ) * spacing neighbours = [ np.roll( grid, +1, axis=0 )[x,y,z], np.roll( grid, -1, axis=0 )[x,y,z], np.roll( grid, +1, axis=1 )[x,y,z], np.roll( grid, -1, axis=1 )[x,y,z], np.roll( grid, +1, axis=2 )[x,y,z], np.roll( grid, -1, axis=2 )[x,y,z] ] sites.append( lattice_site.Site( int( it[0] ), r, neighbours, 0.0, ) ) it.iternext() return lattice.Lattice( sites, cell_lengths = np.array( [ a, b, c ] ) * spacing )
Generate a cubic lattice. Args: a (Int): Number of lattice repeat units along x. b (Int): Number of lattice repeat units along y. c (Int): Number of lattice repeat units along z. spacing (Float): Distance between lattice sites. Returns: (Lattice): The new lattice
388,591
async def stream( self, version="1.1", keep_alive=False, keep_alive_timeout=None ): headers = self.get_headers( version, keep_alive=keep_alive, keep_alive_timeout=keep_alive_timeout, ) self.protocol.push_data(headers) await self.protocol.drain() await self.streaming_fn(self) self.protocol.push_data(b"0\r\n\r\n")
Streams headers, runs the `streaming_fn` callback that writes content to the response body, then finalizes the response body.
388,592
def _viewport_default(self): viewport = Viewport(component=self.canvas, enable_zoom=True) viewport.tools.append(ViewportPanTool(viewport)) return viewport
Trait initialiser
388,593
def command(name): for args, kwargs in argparse_args_list: command.add_argument_tuple((args, kwargs)) return command return wrapper
Create a command, using the wrapped function as the handler. Args ---- name : str Name given to the created Command instance. Returns ------- Command A new instance of Command, with handler set to the wrapped function.
388,594
def _port_action_vlan(self, port, segment, func, vni): if not self._is_valid_segment(segment): return device_id = self._get_port_uuid(port) if nexus_help.is_baremetal(port): host_id = port.get() else: host_id = port.get(bc.portbindings.HOST_ID) vlan_id = segment.get(api.SEGMENTATION_ID) is_provider = nxos_db.is_provider_vlan(vlan_id) settings = {"vlan_id": vlan_id, "device_id": device_id, "host_id": host_id} missing_fields = [field for field, value in settings.items() if (field != and not value)] if not missing_fields: func(port, vlan_id, device_id, host_id, vni, is_provider) else: raise excep.NexusMissingRequiredFields( fields=.join(missing_fields))
Verify configuration and then process event.
388,595
def channel_is_settled( self, participant1: Address, participant2: Address, block_identifier: BlockSpecification, channel_identifier: ChannelID, ) -> bool: try: channel_state = self._get_channel_state( participant1=participant1, participant2=participant2, block_identifier=block_identifier, channel_identifier=channel_identifier, ) except RaidenRecoverableError: return False return channel_state >= ChannelState.SETTLED
Returns true if the channel is in a settled state, false otherwise.
388,596
def info(self, **kwargs): path = self._get_path() kwargs.update({: self.session_id}) response = self._GET(path, kwargs) self.id = response[] self._set_attrs_to_values(response) return response
Get the basic information for an account. Call this method first, before calling other Account methods. Returns: A dict respresentation of the JSON returned from the API.
388,597
def command(cls, command, stdin=None, shell=False): if not shell and isinstance(command, str): command = cls.shlex.split(command) collate_original = None try: collate_original = cls.os.environ[] except KeyError: pass cls.os.environ[] = "C" try: process = cls.subprocess.Popen(command, stdout=cls.subprocess.PIPE, stderr=cls.subprocess.PIPE, stdin=cls.subprocess.PIPE, shell=shell) (stdout, stderr) = process.communicate(stdin) finally: if collate_original: cls.os.environ[] = collate_original else: del cls.os.environ[] return cls(stdout, stderr, stdin, process.returncode, command)
Runs specified command. The command can be fed with data on stdin with parameter ``stdin``. The command can also be treated as a shell command with parameter ``shell``. Please refer to subprocess.Popen on how does this stuff work :returns: Run() instance with resulting data
388,598
def set_memcached_backend(self, config): config[] = if is_importable(config[]): return if config.get() and is_importable(): config[] = return
Select the most suitable Memcached backend based on the config and on what's installed
388,599
def view_umatrix(self, figsize=None, colormap=cm.Spectral_r, colorbar=False, bestmatches=False, bestmatchcolors=None, labels=None, zoom=None, filename=None): if self.umatrix is None: raise Exception("The U-matrix is not available. Either train a map" " or load a U-matrix from a file") return self._view_matrix(self.umatrix, figsize, colormap, colorbar, bestmatches, bestmatchcolors, labels, zoom, filename)
Plot the U-matrix of the trained map. :param figsize: Optional parameter to specify the size of the figure. :type figsize: (int, int) :param colormap: Optional parameter to specify the color map to be used. :type colormap: matplotlib.colors.Colormap :param colorbar: Optional parameter to include a colormap as legend. :type colorbar: bool. :param bestmatches: Optional parameter to plot best matching units. :type bestmatches: bool. :param bestmatchcolors: Optional parameter to specify the color of each best matching unit. :type bestmatchcolors: list of int. :param labels: Optional parameter to specify the label of each point. :type labels: list of str. :param zoom: Optional parameter to zoom into a region on the map. The first two coordinates of the tuple are the row limits, the second tuple contains the column limits. :type zoom: ((int, int), (int, int)) :param filename: If specified, the plot will not be shown but saved to this file. :type filename: str.