Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
379,600
def autozoom(self, points): points = np.asarray(points) extraoff = 0.01 abc = np.array([self.a, self.b, self.c]) old_geom_center = points.sum(axis=0)/len(points) points = points.copy() + self.position geom_center = points.sum(axis=0)/len(points) self.position += self.a * np.dot(geom_center, self.a) self.position += self.b * np.dot(geom_center, self.b) self.pivot = old_geom_center bound_radius = np.sqrt(((points-geom_center) * (points-geom_center)).sum(axis=1).max()) fov_topbottom = self.fov*np.pi/180.0 dist = (bound_radius + self.z_near)/np.tan(fov_topbottom * 0.5) self.position = self.pivot.copy() self.position -= self.c * (dist*(1 + extraoff))
Fit the current view to the correct zoom level to display all *points*. The camera viewing direction and rotation pivot match the geometric center of the points and the distance from that point is calculated in order for all points to be in the field of view. This is currently used to provide optimal visualization for molecules and systems **Parameters** points: np.ndarray((N, 3)) Array of points.
379,601
def prebinned_hist(counts, binlims, ax=None, *args, **kwargs): ax = get_ax(ax) x = bincenters(binlims) weights = counts return ax.hist(x, bins=binlims, weights=weights, *args, **kwargs)
Plot a histogram with counts, binlims already given. Example ======= >>> gaus = np.random.normal(size=100) >>> counts, binlims = np.histogram(gaus, bins='auto') >>> prebinned_hist(countsl binlims)
379,602
def entails(self, other): other = BoolCell.coerce(other) return other.is_entailed_by(self)
Inverse is_entailed_by
379,603
def irregular_sampling(T, N, rseed=None): sampling_period = (T/float(N)) N = int(N) np.random.seed(rseed) t = np.linspace(0, T, num=5*N) t[1:-1] += sampling_period*0.5*np.random.randn(5*N-2) P = np.random.permutation(5*N) t_irr = np.sort(t[P[:N]]) return t_irr
Generates an irregularly sampled time vector by perturbating a linearly spaced vector and latter deleting a certain number of points Parameters ---------- T: float Time span of the vector, i.e. how long it is in time N: positive integer Number of samples of the resulting time vector rseed: Random seed to feed the random number generator Returns ------- t_irr: ndarray An irregulary sampled time vector
379,604
def fit(self, X, y=None): if self.normalize: X = normalize(X) self._check_force_weights() random_state = check_random_state(self.random_state) X = self._check_fit_data(X) ( self.cluster_centers_, self.labels_, self.inertia_, self.weights_, self.concentrations_, self.posterior_, ) = movMF( X, self.n_clusters, posterior_type=self.posterior_type, force_weights=self.force_weights, n_init=self.n_init, n_jobs=self.n_jobs, max_iter=self.max_iter, verbose=self.verbose, init=self.init, random_state=random_state, tol=self.tol, copy_x=self.copy_x, ) return self
Compute mixture of von Mises Fisher clustering. Parameters ---------- X : array-like or sparse matrix, shape=(n_samples, n_features)
379,605
def _fingerprint_target_specs(self, specs): assert self._build_graph is not None, ( .format(specs) ) hasher = sha1() for spec in sorted(specs): for target in sorted(self._build_graph.resolve(spec)): return hasher.hexdigest()
Returns a fingerprint of the targets resolved from given target specs.
379,606
def parse_option(self, option, block_name, *values): if option.endswith(): status = values[0] if status not in self.VALID_STATUSES: raise ValueError(u.format(status)) if len(values) > 2: raise TypeError if option == : option = + option key = option.split(, 1)[0] self.statuses[key] = values[:2] elif option == : if len(values) != 2: raise TypeError name, msg = values self.messages[name] = msg
Parse status, end_status, timer_status and status_msg options.
379,607
def from_config(cls, cp, model, nprocesses=1, use_mpi=False): section = "sampler" assert cp.get(section, "name") == cls.name, ( "name in section [sampler] must match mine") nwalkers = int(cp.get(section, "nwalkers")) obj.set_burn_in_from_config(cp) obj.set_thin_interval_from_config(cp, section) return obj
Loads the sampler from the given config file.
379,608
def get_cgi_parameter_str_or_none(form: cgi.FieldStorage, key: str) -> Optional[str]: s = get_cgi_parameter_str(form, key) if s is None or len(s) == 0: return None return s
Extracts a string parameter from a CGI form, or ``None`` if the key doesn't exist or the string is zero-length.
379,609
def sort(self, column, order=Qt.AscendingOrder): ascending = order == Qt.AscendingOrder self.model.sort(self.COLUMN_INDEX, order=ascending) return True
Overriding sort method.
379,610
def push(self, line): if transforms.FROM_EXPERIMENTAL.match(line): transforms.add_transformers(line) self.buffer.append("\n") else: self.buffer.append(line) add_pass = False if line.rstrip().endswith(":"): add_pass = True source = "\n".join(self.buffer) if add_pass: source += "pass" source = transforms.transform(source) if add_pass: source = source.rstrip() if source.endswith("pass"): source = source[:-4] if not self.buffer[-1]: source += "\n" try: more = self.runsource(source, self.filename) except SystemExit: os._exit(1) if not more: self.resetbuffer() return more
Transform and push a line to the interpreter. The line should not have a trailing newline; it may have internal newlines. The line is appended to a buffer and the interpreter's runsource() method is called with the concatenated contents of the buffer as source. If this indicates that the command was executed or invalid, the buffer is reset; otherwise, the command is incomplete, and the buffer is left as it was after the line was appended. The return value is 1 if more input is required, 0 if the line was dealt with in some way (this is the same as runsource()).
379,611
def _FormatMessage(self, event): message, _ = self._output_mediator.GetFormattedMessages(event) if message is None: data_type = getattr(event, , ) raise errors.NoFormatterFound( .format(data_type)) return message
Formats the message. Args: event (EventObject): event. Returns: str: message field. Raises: NoFormatterFound: if no event formatter can be found to match the data type in the event.
379,612
def getSingle(self, type_uri, default=None): values = self.data.get(type_uri) if not values: return default elif len(values) == 1: return values[0] else: raise AXError( % (type_uri,))
Get a single value for an attribute. If no value was sent for this attribute, use the supplied default. If there is more than one value for this attribute, this method will fail. @type type_uri: str @param type_uri: The URI for the attribute @param default: The value to return if the attribute was not sent in the fetch_response. @returns: The value of the attribute in the fetch_response message, or the default supplied @rtype: unicode or NoneType @raises ValueError: If there is more than one value for this parameter in the fetch_response message. @raises KeyError: If the attribute was not sent in this response
379,613
async def eat(self, philosopher): loop = philosopher._loop while True: forks = self.forks if forks: if len(forks) == 2: self.thinking = 0 self.eaten += 1 philosopher.logger.info("eating... So far %s times", self.eaten) eat_time = 2*self.cfg.eating_period*random.random() await sleep(eat_time) await self.release_forks(philosopher) elif len(forks) == 1: waiting_period = 2*self.cfg.waiting_period*random.random() if self.started_waiting == 0: self.started_waiting = loop.time() elif loop.time() - self.started_waiting > waiting_period: philosopher.logger.debug("tired of waiting") await self.release_forks(philosopher) elif len(forks) > 2: philosopher.logger.critical() await self.release_forks(philosopher) else: if not self.thinking: philosopher.logger.warning() self.thinking += 1 await self.pickup_fork(philosopher)
The ``philosopher`` performs one of these two actions: * eat, if he has both forks and then :meth:`release_forks`. * try to :meth:`pickup_fork`, if he has fewer than 2 forks.
379,614
def _hm_read_address(self): response = self._hm_send_address(self.address, 0, 0, 0) lookup = self.config[] offset = self.config[] keydata = {} for i in lookup: try: kdata = lookup[i] ddata = response[i + offset] keydata[i] = { : kdata, : ddata } except IndexError: logging.info("Finished processing at %d", i) return keydata
Reads from the DCB and maps to yaml config file.
379,615
def internal_name(self): unq = + super().internal_name() if self.tparams is not None: unq += "_" + "_".join(self.tparams) if self.tret is not None: unq += "_" + self.tret return unq
Return the unique internal name
379,616
def stream(self, date_created_from=values.unset, date_created_to=values.unset, limit=None, page_size=None): limits = self._version.read_limits(limit, page_size) page = self.page( date_created_from=date_created_from, date_created_to=date_created_to, page_size=limits[], ) return self._version.stream(page, limits[], limits[])
Streams ExecutionInstance records from the API as a generator stream. This operation lazily loads records as efficiently as possible until the limit is reached. The results are returned as a generator, so this operation is memory efficient. :param datetime date_created_from: Only show Executions that started on or after this ISO8601 date-time. :param datetime date_created_to: Only show Executions that started before this this ISO8601 date-time. :param int limit: Upper limit for the number of records to return. stream() guarantees to never return more than limit. Default is no limit :param int page_size: Number of records to fetch per request, when not set will use the default value of 50 records. If no page_size is defined but a limit is defined, stream() will attempt to read the limit with the most efficient page size, i.e. min(limit, 1000) :returns: Generator that will yield up to limit results :rtype: list[twilio.rest.studio.v1.flow.execution.ExecutionInstance]
379,617
async def _send_loop(self): while self._user_connected and not self._reconnecting: if self._pending_ack: ack = RequestState(MsgsAck(list(self._pending_ack)), self._loop) self._send_queue.append(ack) self._last_acks.append(ack) self._pending_ack.clear() self._log.debug()
This loop is responsible for popping items off the send queue, encrypting them, and sending them over the network. Besides `connect`, only this method ever sends data.
379,618
def fix_lines(source_lines, options, filename=): original_newline = find_newline(source_lines) tmp_source = .join(normalize_line_endings(source_lines, )) previous_hashes = set() if options.line_range: fixed_source = tmp_source else: fixed_source = apply_global_fixes(tmp_source, options, filename=filename) passes = 0 long_line_ignore_cache = set() while hash(fixed_source) not in previous_hashes: if options.pep8_passes >= 0 and passes > options.pep8_passes: break passes += 1 previous_hashes.add(hash(fixed_source)) tmp_source = copy.copy(fixed_source) fix = FixPEP8( filename, options, contents=tmp_source, long_line_ignore_cache=long_line_ignore_cache) fixed_source = fix.fix() sio = io.StringIO(fixed_source) return .join(normalize_line_endings(sio.readlines(), original_newline))
Return fixed source code.
379,619
def __gen_token_anno_file(self, top_level_layer): base_paula_id = .format(self.corpus_name, self.name) paula_id = .format(top_level_layer, self.corpus_name, self.name) E, tree = gen_paula_etree(paula_id) mflist = E(, {XMLBASE: base_paula_id+}) for token_id in self.dg.tokens: mfeat = E(, {XLINKHREF: .format(token_id)}) token_dict = self.dg.node[token_id] for feature in token_dict: if feature not in IGNORED_TOKEN_ATTRIBS \ and feature.startswith(top_level_layer): mfeat.append(E(, {: feature, : token_dict[feature]})) if self.human_readable: mfeat.append(Comment(token_dict[self.dg.ns+])) mflist.append(mfeat) tree.append(mflist) self.files[paula_id] = tree self.file2dtd[paula_id] = PaulaDTDs.multifeat return paula_id
creates an etree representation of a <multiFeat> file that describes all the annotations that only span one token (e.g. POS, lemma etc.). Note: discoursegraphs will create one token annotation file for each top level layer (e.g. conano, tiger etc.).
379,620
def process_form(self, instance, field, form, empty_marker=None, emptyReturnsMarker=False, validating=True): service_uids = form.get("uids", []) return service_uids, {}
Return UIDs of the selected services
379,621
def _extra_trust_root_validation(self): store = None cert_chain_context_pointer = None try: store = crypt32.CertOpenStore( Crypt32Const.CERT_STORE_PROV_MEMORY, Crypt32Const.X509_ASN_ENCODING, null(), 0, null() ) if is_null(store): handle_crypt32_error(0) cert_hashes = set() for cert in self._session._extra_trust_roots: cert_data = cert.dump() result = crypt32.CertAddEncodedCertificateToStore( store, Crypt32Const.X509_ASN_ENCODING, cert_data, len(cert_data), Crypt32Const.CERT_STORE_ADD_USE_EXISTING, null() ) if not result: handle_crypt32_error(0) cert_hashes.add(cert.sha256) cert_context_pointer_pointer = new(crypt32, ) result = secur32.QueryContextAttributesW( self._context_handle_pointer, Secur32Const.SECPKG_ATTR_REMOTE_CERT_CONTEXT, cert_context_pointer_pointer ) handle_error(result) cert_context_pointer = unwrap(cert_context_pointer_pointer) cert_context_pointer = cast(crypt32, , cert_context_pointer) orig_now_pointer = new(kernel32, ) kernel32.GetSystemTimeAsFileTime(orig_now_pointer) now_pointer = cast(crypt32, , orig_now_pointer) usage_identifiers = new(crypt32, ) usage_identifiers[0] = cast(crypt32, , Crypt32Const.PKIX_KP_SERVER_AUTH) usage_identifiers[1] = cast(crypt32, , Crypt32Const.SERVER_GATED_CRYPTO) usage_identifiers[2] = cast(crypt32, , Crypt32Const.SGC_NETSCAPE) cert_enhkey_usage_pointer = struct(crypt32, ) cert_enhkey_usage = unwrap(cert_enhkey_usage_pointer) cert_enhkey_usage.cUsageIdentifier = 3 cert_enhkey_usage.rgpszUsageIdentifier = cast(crypt32, , usage_identifiers) cert_usage_match_pointer = struct(crypt32, ) cert_usage_match = unwrap(cert_usage_match_pointer) cert_usage_match.dwType = Crypt32Const.USAGE_MATCH_TYPE_OR cert_usage_match.Usage = cert_enhkey_usage cert_chain_para_pointer = struct(crypt32, ) cert_chain_para = unwrap(cert_chain_para_pointer) cert_chain_para.RequestedUsage = cert_usage_match cert_chain_para_size = sizeof(crypt32, cert_chain_para) cert_chain_para.cbSize = cert_chain_para_size cert_chain_context_pointer_pointer = new(crypt32, ) result = crypt32.CertGetCertificateChain( null(), cert_context_pointer, now_pointer, store, cert_chain_para_pointer, Crypt32Const.CERT_CHAIN_CACHE_END_CERT | Crypt32Const.CERT_CHAIN_REVOCATION_CHECK_CACHE_ONLY, null(), cert_chain_context_pointer_pointer ) handle_crypt32_error(result) cert_chain_policy_para_flags = Crypt32Const.CERT_CHAIN_POLICY_IGNORE_ALL_REV_UNKNOWN_FLAGS cert_chain_context_pointer = unwrap(cert_chain_context_pointer_pointer) cert_chain_context = unwrap(cert_chain_context_pointer) num_chains = native(int, cert_chain_context.cChain) if num_chains == 1: first_simple_chain_pointer = unwrap(cert_chain_context.rgpChain) first_simple_chain = unwrap(first_simple_chain_pointer) num_elements = native(int, first_simple_chain.cElement) last_element_pointer = first_simple_chain.rgpElement[num_elements - 1] last_element = unwrap(last_element_pointer) last_element_cert = unwrap(last_element.pCertContext) last_element_cert_data = bytes_from_buffer( last_element_cert.pbCertEncoded, native(int, last_element_cert.cbCertEncoded) ) last_cert = x509.Certificate.load(last_element_cert_data) if last_cert.sha256 in cert_hashes: cert_chain_policy_para_flags |= Crypt32Const.CERT_CHAIN_POLICY_ALLOW_UNKNOWN_CA_FLAG ssl_extra_cert_chain_policy_para_pointer = struct(crypt32, ) ssl_extra_cert_chain_policy_para = unwrap(ssl_extra_cert_chain_policy_para_pointer) ssl_extra_cert_chain_policy_para.cbSize = sizeof(crypt32, ssl_extra_cert_chain_policy_para) ssl_extra_cert_chain_policy_para.dwAuthType = Crypt32Const.AUTHTYPE_SERVER ssl_extra_cert_chain_policy_para.fdwChecks = 0 ssl_extra_cert_chain_policy_para.pwszServerName = cast( crypt32, , buffer_from_unicode(self._hostname) ) cert_chain_policy_para_pointer = struct(crypt32, ) cert_chain_policy_para = unwrap(cert_chain_policy_para_pointer) cert_chain_policy_para.cbSize = sizeof(crypt32, cert_chain_policy_para) cert_chain_policy_para.dwFlags = cert_chain_policy_para_flags cert_chain_policy_para.pvExtraPolicyPara = cast(crypt32, , ssl_extra_cert_chain_policy_para_pointer) cert_chain_policy_status_pointer = struct(crypt32, ) cert_chain_policy_status = unwrap(cert_chain_policy_status_pointer) cert_chain_policy_status.cbSize = sizeof(crypt32, cert_chain_policy_status) result = crypt32.CertVerifyCertificateChainPolicy( Crypt32Const.CERT_CHAIN_POLICY_SSL, cert_chain_context_pointer, cert_chain_policy_para_pointer, cert_chain_policy_status_pointer ) handle_crypt32_error(result) cert_context = unwrap(cert_context_pointer) cert_data = bytes_from_buffer(cert_context.pbCertEncoded, native(int, cert_context.cbCertEncoded)) cert = x509.Certificate.load(cert_data) error = cert_chain_policy_status.dwError if error: if error == Crypt32Const.CERT_E_EXPIRED: raise_expired_not_yet_valid(cert) if error == Crypt32Const.CERT_E_UNTRUSTEDROOT: oscrypto_cert = load_certificate(cert) if oscrypto_cert.self_signed: raise_self_signed(cert) else: raise_no_issuer(cert) if error == Crypt32Const.CERT_E_CN_NO_MATCH: raise_hostname(cert, self._hostname) if error == Crypt32Const.TRUST_E_CERT_SIGNATURE: raise_weak_signature(cert) if error == Crypt32Const.CRYPT_E_REVOKED: raise_revoked(cert) raise_verification(cert) if cert.hash_algo in set([, ]): raise_weak_signature(cert) finally: if store: crypt32.CertCloseStore(store, 0) if cert_chain_context_pointer: crypt32.CertFreeCertificateChain(cert_chain_context_pointer)
Manually invoked windows certificate chain builder and verification step when there are extra trust roots to include in the search process
379,622
def _get_init_args(self): args = {} for rop in self.ro_properties: if rop in self.properties: args[rop] = self.properties[rop] return args
Creates dict with properties marked as readonly
379,623
def process_flagged_blocks(self, content: str) -> str: def _sub(flagged_block): options = self.get_options(flagged_block.group()) required_flags = { flag.lower() for flag in re.split(self._flag_delimiters, options.get(, )) if flag } | { f for target in re.split(self._flag_delimiters, options.get(, )) if target } | { f for backend in re.split(self._flag_delimiters, options.get(, )) if backend } env_flags = { flag.lower() for flag in re.split(self._flag_delimiters, getenv(self._flags_envvar, )) if flag } config_flags = {flag.lower() for flag in self.options[]} set_flags = env_flags \ | config_flags \ | {f, f} kind = options.get(, ) if (kind == and required_flags <= set_flags) \ or (kind == and required_flags & set_flags) \ or (kind == and not required_flags & set_flags): return flagged_block.group().strip() else: return return self.pattern.sub(_sub, content)
Replace flagged blocks either with their contents or nothing, depending on the value of ``FOLIANT_FLAGS`` environment variable and ``flags`` config value. :param content: Markdown content :returns: Markdown content without flagged blocks
379,624
def getProjectAreas(self, archived=False, returned_properties=None): return self._getProjectAreas(archived=archived, returned_properties=returned_properties)
Get all :class:`rtcclient.project_area.ProjectArea` objects If no :class:`rtcclient.project_area.ProjectArea` objects are retrieved, `None` is returned. :param archived: (default is False) whether the project area is archived :param returned_properties: the returned properties that you want. Refer to :class:`rtcclient.client.RTCClient` for more explanations :return: A :class:`list` that contains all the :class:`rtcclient.project_area.ProjectArea` objects :rtype: list
379,625
def names(self): if is_term(self.terms): return frozenset([self.terms.name]) return frozenset(term.name for term in com.flatten(self.terms))
Get the names in an expression
379,626
def path_wrapper(func): @functools.wraps(func) def wrapped(node, context=None, _func=func, **kwargs): if context is None: context = contextmod.InferenceContext() if context.push(node): return None yielded = set() generator = _func(node, context, **kwargs) try: while True: res = next(generator) if res.__class__.__name__ == "Instance": ares = res._proxied else: ares = res if ares not in yielded: yield res yielded.add(ares) except StopIteration as error: if error.args: return error.args[0] return None return wrapped
return the given infer function wrapped to handle the path Used to stop inference if the node has already been looked at for a given `InferenceContext` to prevent infinite recursion
379,627
def days(self): monday = self.day(0) return [monday + timedelta(days=i) for i in range(7)]
Return the 7 days of the week as a list (of datetime.date objects)
379,628
def _press_special_key(self, key, down): key_code = special_key_translate_table[key] ev = NSEvent.otherEventWithType_location_modifierFlags_timestamp_windowNumber_context_subtype_data1_data2_( NSSystemDefined, (0,0), 0xa00 if down else 0xb00, 0, 0, 0, 8, (key_code << 16) | ((0xa if down else 0xb) << 8), -1 ) Quartz.CGEventPost(0, ev.Quartz.CGEvent())
Helper method for special keys. Source: http://stackoverflow.com/questions/11045814/emulate-media-key-press-on-mac
379,629
def _validate_alias_command(alias_command): if not alias_command: raise CLIError(EMPTY_ALIAS_ERROR) split_command = shlex.split(alias_command) boundary_index = len(split_command) for i, subcommand in enumerate(split_command): if not re.match(, subcommand.lower()) or i > COLLISION_CHECK_LEVEL_DEPTH: boundary_index = i break command_to_validate = .join(split_command[:boundary_index]).lower() for command in azext_alias.cached_reserved_commands: if re.match(r.format(command_to_validate), command): return _validate_positional_arguments(shlex.split(alias_command))
Check if the alias command is valid. Args: alias_command: The command to validate.
379,630
def insert(self): if not self.curs: raise LIGOLwDBError, "Database connection not initalized" if len(self.table) == 0: raise LIGOLwDBError, for tab in self.table.keys(): generate = [] missingcols = [k for k in self.ldb.uniqueids[tab] if k not in self.table[tab][]] for m in missingcols: generate.append() self.table[tab][].append(m) self.table[tab][] = .join( [, tab, , .join(self.table[tab][]), , .join([ for x in self.table[tab][]]) , .join(generate), ]) for tabtup in self.ldb.tables: tab = tabtup[0].lower() try: try: self.curs.executemany(self.table[tab][], self.table[tab][]) rowcount = self.curs.rowcount except DB2.Error, e: self.curs.execute() msg = e[2] msg += self.xml() + msg += str(self.table[tab][]) + msg += str(self.table[tab][]) + raise LIGOLwDBError, msg except DB2.Warning, e: self.curs.execute() raise LIGOLwDBError, e[2] except KeyError: pass self.curs.execute() return rowcount
Insert the object into the database
379,631
def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0): action = ord() fade_time = int(fade_milliseconds / 10) th = (fade_time & 0xff00) >> 8 tl = fade_time & 0x00ff buf = [REPORT_ID, action, int(red), int(green), int(blue), th, tl, led_number, 0] self.write( buf )
Command blink(1) to fade to RGB color, no color correction applied.
379,632
def predict(self): if self.w_ is not None: sigmoid = lambda t: 1. / (1. + np.exp(-t)) return sigmoid(np.dot(self.centers, self.w_[:-1]) + self.w_[-1]) else: pass
Returns ------- proba : ndarray, shape=(n_clusters, ) The probability of given cluster being label 1.
379,633
def temp_output_file(prefix="tmp", suffix="", dir=None, make_parents=False, always_clean=False): return _temp_output(False, prefix=prefix, suffix=suffix, dir=dir, make_parents=make_parents, always_clean=always_clean)
A context manager for convenience in creating a temporary file, which is deleted when exiting the context. Usage: with temp_output_file() as (fd, path): ...
379,634
def get_families_by_ids(self, *args, **kwargs): catalogs = self._get_provider_session().get_families_by_ids(*args, **kwargs) cat_list = [] for cat in catalogs: cat_list.append(Family(self._provider_manager, cat, self._runtime, self._proxy)) return FamilyList(cat_list)
Pass through to provider FamilyLookupSession.get_families_by_ids
379,635
def set_proxy(self, proxy, update=True): update_web_driver = False if self.current_proxy != proxy: update_web_driver = True self.current_proxy = proxy if proxy is None: self.driver_args[] = self.default_service_args else: proxy_parts = cutil.get_proxy_parts(proxy) self.driver_args[].extend([.format(**proxy_parts), .format(**proxy_parts), ]) if proxy_parts.get() is not None: self.driver_args[].append(.format(**proxy_parts)) if update is True and update_web_driver is True: self._update()
Set proxy for requests session
379,636
def debug(self): debug = False if os.path.isfile(os.path.join(self.tcex.args.tc_temp_path, )): debug = True return debug
Return debug setting
379,637
def _apply_over_vars_with_dim(func, self, dim=None, **kwargs): ds = type(self)(coords=self.coords, attrs=self.attrs) for name, var in self.data_vars.items(): if dim in var.dims: ds[name] = func(var, dim=dim, **kwargs) else: ds[name] = var return ds
wrapper for datasets
379,638
def mode_yubikey_otp(self, private_uid, aes_key): if not self.capabilities.have_yubico_OTP(): raise yubikey_base.YubiKeyVersionError( \ % (self.capabilities.model, self.ykver[0], self.ykver[1])) if private_uid.startswith(b): private_uid = binascii.unhexlify(private_uid[2:]) if len(private_uid) != yubikey_defs.UID_SIZE: raise yubico_exception.InputError( % (yubikey_defs.UID_SIZE)) self._change_mode(, major=0, minor=9) self.uid = private_uid self.aes_key(aes_key)
Set the YubiKey up for standard OTP validation.
379,639
def commit(self): if not self.connection: import boto self.connection = boto.connect_route53() return self.connection.change_rrsets(self.hosted_zone_id, self.to_xml())
Commit this change
379,640
def check_content(content, **kwargs): try: tree = fragment_fromstring(content) processed = False if isinstance(kwargs[], bool): if not kwargs[]: kwargs[] = else: raise ImproperlyConfigured() elif kwargs[] is None: kwargs[] = if kwargs[].lower() in (, , ): urls = tree.xpath() for url in urls: if check_active(url, url, **kwargs): processed = True else: elements = tree.xpath(.format( parent_tag=kwargs[], )) for element in elements: urls = element.xpath() for url in urls: if check_active(url, element, **kwargs): processed = True break if processed: return tostring(tree, encoding=) except ParserError: raise ImproperlyConfigured() return content
check content for "active" urls
379,641
def _parse_function_return_types_from_doc(cls, doc): data = dict(name=, col_types=[], col_names=[], _type=None) if doc: return_doc = __doc__.split()[-1].strip() data[] = return_doc.split()[0] if data[].startswith(): if data[].endswith(): data[] = for row in return_doc.split()[3:]: index, col_type, col_name = row.split(None, 2) assert (index == str(index)) data[].append(col_type) data[].append(col_name.split()[0]) return data
This will extract the return type for list of lists so that the repr can display the header. :param doc: str of the function doc :return dict of {func.__name__:{'api_type':'type','col_name':[], 'col_type':[],'repr_type':None}}
379,642
def get_http_authentication(private_key: RsaKey, private_key_id: str) -> HTTPSignatureHeaderAuth: key = private_key.exportKey() return HTTPSignatureHeaderAuth( headers=["(request-target)", "user-agent", "host", "date"], algorithm="rsa-sha256", key=key, key_id=private_key_id, )
Get HTTP signature authentication for a request.
379,643
def nii_ones_like(in_file, value, dtype, newpath=None): import os import numpy as np import nibabel as nb nii = nb.load(in_file) data = np.ones(nii.shape, dtype=float) * value out_file = os.path.join(newpath or os.getcwd(), "filled.nii.gz") nii = nb.Nifti1Image(data, nii.affine, nii.header) nii.set_data_dtype(dtype) nii.to_filename(out_file) return out_file
Create a NIfTI file filled with ``value``, matching properties of ``in_file``
379,644
def enum_check(*args, func=None): func = func or inspect.stack()[2][3] for var in args: if not isinstance(var, (enum.EnumMeta, aenum.EnumMeta)): name = type(var).__name__ raise EnumError( f)
Check if arguments are of protocol type.
379,645
def _query(self, path, args=None, skip_cache=False, skip_sleep=False): if args is None: args = {} def _cacheable(r): return not ("no-cache" in r and lxml.etree.XML(r).xpath("//meta/@content=")) url = url_base + path defining_args = dict(list(self.default_args.items()) + list(args.items())) full_args = dict(list(self._ident_args.items()) + list(defining_args.items())) cache_key = hashlib.md5(pickle.dumps((url, sorted(defining_args.items())))).hexdigest() sqas = .join([k + + str(v) for k, v in sorted(args.items())]) full_args_str = .join([k + + str(v) for k, v in sorted(full_args.items())]) logging.debug("CACHE:" + str(skip_cache) + "//" + str(self._cache)) if not skip_cache and self._cache: try: v = self._cache[cache_key] _logger.debug(.format( cache_key=cache_key, url=url, sqas=sqas)) return v except KeyError: _logger.debug(.format( cache_key=cache_key, url=url, sqas=sqas)) pass if self.api_key: url += .format(self=self) if not skip_sleep: req_int = self.request_interval sleep_time = req_int - (time.clock() - self._last_request_clock) if sleep_time > 0: _logger.debug(.format(sleep_time=sleep_time)) time.sleep(sleep_time) r = requests.post(url, full_args) self._last_request_clock = time.clock() _logger.debug(.format( url=url, fas=full_args_str, r=r, len=len(r.text))) if not r.ok: if r.headers["Content-Type"] == "application/json": json = r.json() raise EutilsRequestError(.format(r=r, error=json["error"])) try: xml = lxml.etree.fromstring(r.text.encode()) raise EutilsRequestError(.format(r=r, error=xml.find().text)) except Exception as ex: raise EutilsNCBIError(.format(ex)) if any(bad_word in r.text for bad_word in [, ]): if r.text is not None: try: xml = lxml.etree.fromstring(r.text.encode()) raise EutilsRequestError(.format(r=r, error=xml.find().text)) except Exception as ex: raise EutilsNCBIError(.format(ex)) if in r.text: raise EutilsRequestError(.format(url=url)) if self._cache and _cacheable(r.text): self._cache[cache_key] = r.content _logger.info(.format( cache_key=cache_key, url=url, sqas=sqas)) return r.content
return results for a NCBI query, possibly from the cache :param: path: relative query path (e.g., 'einfo.fcgi') :param: args: dictionary of query args :param: skip_cache: whether to bypass the cache on reading :param: skip_sleep: whether to bypass query throttling :rtype: xml string The args are joined with args required by NCBI (tool and email address) and with the default args declared when instantiating the client.
379,646
def exit_standby(name, instance_ids, should_decrement_desired_capacity=False, region=None, key=None, keyid=None, profile=None): ["i-xxxxxx"] conn = _get_conn_autoscaling_boto3( region=region, key=key, keyid=keyid, profile=profile) try: response = conn.exit_standby( InstanceIds=instance_ids, AutoScalingGroupName=name) except ClientError as e: err = __utils__[](e) if e.response.get(, {}).get() == : return {: False} return {: err} return all(activity[] != for activity in response[])
Exit desired instances from StandBy mode .. versionadded:: 2016.11.0 CLI example:: salt-call boto_asg.exit_standby my_autoscale_group_name '["i-xxxxxx"]'
379,647
def get_all_handleable_leaves(self): nodes = self.get_device_tree() return [node.device for node in sorted(nodes.values(), key=DevNode._sort_key) if not node.ignored and node.device and all(child.ignored for child in node.children)]
Get list of all handleable devices, return only those that represent leaf nodes within the filtered device tree.
379,648
def multidict(D): keys = list(D.keys()) if len(keys) == 0: return [[]] try: N = len(D[keys[0]]) islist = True except: N = 1 islist = False dlist = [dict() for d in range(N)] for k in keys: if islist: for i in range(N): dlist[i][k] = D[k][i] else: dlist[0][k] = D[k] return [keys]+dlist
creates a multidictionary
379,649
def create_from_hdu(cls, hdu, ebins): hpx = HPX.create_from_hdu(hdu, ebins) colnames = hdu.columns.names cnames = [] if hpx.conv.convname == : pixs = hdu.data.field() chans = hdu.data.field() keys = chans * hpx.npix + pixs vals = hdu.data.field() nebin = len(ebins) data = np.zeros((nebin, hpx.npix)) data.flat[keys] = vals else: for c in colnames: if c.find(hpx.conv.colstring) == 0: cnames.append(c) nebin = len(cnames) data = np.ndarray((nebin, hpx.npix)) for i, cname in enumerate(cnames): data[i, 0:] = hdu.data.field(cname) return cls(data, hpx)
Creates and returns an HpxMap object from a FITS HDU. hdu : The FITS ebins : Energy bin edges [optional]
379,650
def print_item_callback(item): print(.format( item.get(, ), item.get(, ), item.get(, )))
Print an item callback, used by &listen.
379,651
def update_stats(self, stats, value, _type, sample_rate=1): stats = self.format(stats, value, _type, self.prefix) self.send(self.sample(stats, sample_rate), self.addr)
Pipeline function that formats data, samples it and passes to send() >>> client = StatsdClient() >>> client.update_stats('example.update_stats', 73, "c", 0.9)
379,652
def _macs2_cmd(method="chip"): if method.lower() == "chip": cmd = ("{macs2} callpeak -t {chip_bam} -c {input_bam} {paired} " " {genome_size} -n {name} -B {options}") elif method.lower() == "atac": cmd = ("{macs2} callpeak -t {chip_bam} --nomodel " " {paired} {genome_size} -n {name} -B {options}" " --nolambda --keep-dup all") else: raise ValueError("chip_method should be chip or atac.") return cmd
Main command for macs2 tool.
379,653
def RgbToHsl(r, g, b): minVal = min(r, g, b) maxVal = max(r, g, b) l = (maxVal + minVal) / 2.0 if minVal==maxVal: return (0.0, 0.0, l) d = maxVal - minVal if l < 0.5: s = d / (maxVal + minVal) else: s = d / (2.0 - maxVal - minVal) dr, dg, db = [(maxVal-val) / d for val in (r, g, b)] if r==maxVal: h = db - dg elif g==maxVal: h = 2.0 + dr - db else: h = 4.0 + dg - dr h = (h*60.0) % 360.0 return (h, s, l)
Convert the color from RGB coordinates to HSL. Parameters: :r: The Red component value [0...1] :g: The Green component value [0...1] :b: The Blue component value [0...1] Returns: The color as an (h, s, l) tuple in the range: h[0...360], s[0...1], l[0...1] >>> Color.RgbToHsl(1, 0.5, 0) (30.0, 1.0, 0.5)
379,654
def strip_rts_retries(self, idx): rts_retries, = struct.unpack_from(, self._rtap, idx) return idx + 1, rts_retries
strip(1 byte) rts_retries :idx: int :return: int idx :return: int
379,655
def fetch_items(self, category, **kwargs): from_date = kwargs[] to_date = kwargs[] branches = kwargs[] latest_items = kwargs[] no_update = kwargs[] ncommits = 0 try: if os.path.isfile(self.gitpath): commits = self.__fetch_from_log() else: commits = self.__fetch_from_repo(from_date, to_date, branches, latest_items, no_update) for commit in commits: yield commit ncommits += 1 except EmptyRepositoryError: pass logger.info("Fetch process completed: %s commits fetched", ncommits)
Fetch the commits :param category: the category of items to fetch :param kwargs: backend arguments :returns: a generator of items
379,656
def generate_dummy_graph(network): graph = pypsa.descriptors.OrderedGraph() graph.add_nodes_from([bus for bus in network.buses.index if bus not in buses_to_split]) for node in graph.nodes(): graph.node[node]["pos"] = np.array(network.buses.loc[node,["x","y"]],dtype=float) return graph
Generate a dummy graph to feed to the FIAS libraries. It adds the "pos" attribute and removes the 380 kV duplicate buses when the buses have been split, so that all load and generation is attached to the 220kV bus.
379,657
def reset(self): self.__mem.reset() self.__cpu.reset() self.__tainter.reset() self.__instr_handler_pre = None, None self.__instr_handler_post = None, None self.__set_default_handlers()
Reset emulator. All registers and memory are reset.
379,658
def diff_lines(self): start_lines = self._build_file_source_lines[:] end_lines = self.build_file_lines() diff_generator = unified_diff(start_lines, end_lines, fromfile=self.build_file.relpath, tofile=self.build_file.relpath, lineterm=) return list(diff_generator)
A diff between the original BUILD file and the resulting BUILD file.
379,659
def parse(self, sentence): words = np.zeros((len(sentence) + 1, 1), np.int32) tags = np.zeros((len(sentence) + 1, 1), np.int32) words[0, 0] = ParserVocabulary.ROOT tags[0, 0] = ParserVocabulary.ROOT vocab = self._vocab for i, (word, tag) in enumerate(sentence): words[i + 1, 0], tags[i + 1, 0] = vocab.word2id(word.lower()), vocab.tag2id(tag) with mx.Context(mxnet_prefer_gpu()): outputs = self._parser.forward(words, tags) words = [] for arc, rel, (word, tag) in zip(outputs[0][0], outputs[0][1], sentence): words.append(ConllWord(id=len(words) + 1, form=word, pos=tag, head=arc, relation=vocab.id2rel(rel))) return ConllSentence(words)
Parse raw sentence into ConllSentence Parameters ---------- sentence : list a list of (word, tag) tuples Returns ------- ConllSentence ConllSentence object
379,660
def get_members(cls, member_class=None, is_member=None, sort_key=None, _parameter=None): if member_class is None and is_member is None: raise TypeError("get_members either needs a member_class parameter or an is_member check function (or both)") members = OrderedDict() for base in cls.__bases__: if _parameter is None: inherited_members = get_members(base, member_class=member_class, is_member=is_member, sort_key=sort_key) else: inherited_members = get_declared(base, _parameter) members.update(inherited_members) def generate_member_bindings(): for name in cls.__dict__: if name.startswith(): continue obj = getattr(cls, name) if member_class is not None and isinstance(obj, member_class): yield name, obj elif is_member is not None and is_member(obj): yield name, obj elif type(obj) is tuple and len(obj) == 1 and isinstance(obj[0], member_class): raise TypeError(" is a one-tuple containing what we are looking for. Trailing comma much? Dont." % name) bindings = generate_member_bindings() if sort_key is not None: try: sorted_bindings = sorted(bindings, key=lambda x: sort_key(x[1])) except AttributeError: if sort_key is default_sort_key: raise TypeError() else: raise members.update(sorted_bindings) else: members.update(bindings) return members
Collect all class level attributes matching the given criteria. :param class member_class: Class(es) to collect :param is_member: Function to determine if an object should be collected :param sort_key: Function to invoke on members to obtain ordering (Default is to use ordering from `creation_ordered`) :type is_member: (object) -> bool :type sort_key: (object) -> object
379,661
def set_permissions(filename, uid=None, gid=None, mode=0775): if uid is None: uid = get_ftp_uid() if gid is None: gid = -1 os.chown(filename, uid, gid) os.chmod(filename, mode)
Set pemissions for given `filename`. Args: filename (str): name of the file/directory uid (int, default proftpd): user ID - if not set, user ID of `proftpd` is used gid (int): group ID, if not set, it is not changed mode (int, default 0775): unix access mode
379,662
def dump(data, abspath, indent_format=False, float_precision=None, ensure_ascii=True, overwrite=False, enable_verbose=True): prt("\nDump to ..." % abspath, enable_verbose) abspath = lower_ext(str(abspath)) is_json = is_json_file(abspath) if os.path.exists(abspath): if not overwrite: prt(" Stop! File exists and overwrite is not allowed", enable_verbose) return if float_precision is not None: encoder.FLOAT_REPR = lambda x: format(x, ".%sf" % float_precision) indent_format = True else: encoder.FLOAT_REPR = repr if indent_format: sort_keys = True indent = 4 else: sort_keys = False indent = None st = time.clock() js = json.dumps(data, sort_keys=sort_keys, indent=indent, ensure_ascii=ensure_ascii) content = js.encode("utf-8") if is_json: textfile.writebytes(content, abspath) else: compress.write_gzip(content, abspath) prt(" Complete! Elapse %.6f sec." % (time.clock() - st), enable_verbose)
Dump Json serializable object to file. Provides multiple choice to customize the behavior. :param data: Serializable python object. :type data: dict or list :param abspath: ``save as`` path, file extension has to be ``.json`` or ``.gz`` (for compressed Json) :type abspath: string :param indent_format: default ``False``, If ``True``, then dump to human readable format, but it's slower, the file is larger :type indent_format: boolean :param float_precision: default ``None``, limit flotas to N-decimal points. :type float_precision: integer :param overwrite: default ``False``, If ``True``, when you dump to existing file, it silently overwrite it. If ``False``, an alert message is shown. Default setting ``False`` is to prevent overwrite file by mistake. :type overwrite: boolean :param enable_verbose: default True, help-message-display trigger. :type enable_verbose: boolean Usage:: >>> from dataIO import js >>> data = {"a": 1, "b": 2} >>> dump(data, "test.json", overwrite=True) Dumping to 'test.json'... Complete! Elapse 0.002432 sec **中文文档** 将Python中可被序列化的"字典", "列表"以及他们的组合, 按照Json的编码方式写入文件 文件 参数列表 :param js: 可Json化的Python对象 :type js: ``字典`` 或 ``列表`` :param abspath: Json文件绝对路径, 扩展名需为 ``.json`` 或 ``.gz``, 其中 ``.gz`` 是被压缩后的Json文件 :type abspath: ``字符串`` :param indent_format: 默认 ``False``, 当为 ``True`` 时, Json编码时会对Key进行 排序, 并进行缩进排版。但是这样写入速度较慢, 文件体积也更大。 :type indent_format: "布尔值" :param overwrite: 默认 ``False``, 当为``True``时, 如果写入路径已经存在, 则会 自动覆盖原文件。而为``False``时, 则会打印警告文件, 防止误操作覆盖源文件。 :type overwrite: "布尔值" :param float_precision: 默认 ``None``, 当为任意整数时, 则会保留小数点后N位 :type float_precision: "整数" :param enable_verbose: 默认 ``True``, 信息提示的开关, 批处理时建议关闭 :type enable_verbose: ``布尔值``
379,663
def _get_choices(self, gandi): packages = super(CertificatePackageType, self)._get_choices(gandi) return list(set([pack.split()[1] for pack in packages]))
Internal method to get choices list
379,664
def benchmark_setup(self): def f(): self._setup() self.mod_ext.synchronize(**self.ext_kwargs) f() self.setup_stat = self._calc_benchmark_stat(f)
Benchmark setup execution.
379,665
def p_gate_op_5(self, program): program[0] = node.Barrier([program[2]]) self.verify_bit_list(program[2]) self.verify_distinct([program[2]])
gate_op : BARRIER id_list ';'
379,666
def get_devicecore_api(self): from devicecloud.devicecore import DeviceCoreAPI return DeviceCoreAPI(self._conn, self.get_sci_api())
Returns a :class:`.DeviceCoreAPI` bound to this device cloud instance This provides access to the same API as :attr:`.DeviceCloud.devicecore` but will create a new object (with a new cache) each time called. :return: devicecore API object bound to this device cloud account :rtype: :class:`.DeviceCoreAPI`
379,667
def _ConvertValueMessage(value, message): if isinstance(value, dict): _ConvertStructMessage(value, message.struct_value) elif isinstance(value, list): _ConvertListValueMessage(value, message.list_value) elif value is None: message.null_value = 0 elif isinstance(value, bool): message.bool_value = value elif isinstance(value, six.string_types): message.string_value = value elif isinstance(value, _INT_OR_FLOAT): message.number_value = value else: raise ParseError()
Convert a JSON representation into Value message.
379,668
def stem_singular_word(self, word): context = Context(word, self.dictionary, self.visitor_provider) context.execute() return context.result
Stem a singular word to its common stem form.
379,669
def to_array_with_default(value, default_value): result = ArrayConverter.to_nullable_array(value) return result if result != None else default_value
Converts value into array object with specified default. Single values are converted into arrays with single element. :param value: the value to convert. :param default_value: default array object. :return: array object or default array when value is None.
379,670
async def delete_shade_from_scene(self, shade_id, scene_id): return await self.request.delete( self._base_path, params={ATTR_SCENE_ID: scene_id, ATTR_SHADE_ID: shade_id} )
Delete a shade from a scene.
379,671
def next(self, times=1): return Range(copy(self.end), self.end + self.elapse, tz=self.start.tz)
Returns a new instance of self times is not supported yet.
379,672
def randkey(bits, keyspace=string.ascii_letters + string.digits + , rng=None): return "".join(char for char in iter_random_chars(bits, keyspace, rng))
Returns a cryptographically secure random key of desired @bits of entropy within @keyspace using :class:random.SystemRandom @bits: (#int) minimum bits of entropy @keyspace: (#str) or iterable allowed output chars @rng: the random number generator to use. Defaults to :class:random.SystemRandom. Must have a |choice| method -> (#str) random key .. from vital.security import randkey randkey(24) # -> '9qaX' randkey(48) # -> 'iPJ5YWs9' randkey(64) # - > 'C..VJ.KLdxg' randkey(64, keyspace="abc", rng=random) # -> 'aabcccbabcaacaccccabcaabbabcacabacbbbaaab' ..
379,673
def local_check (self): log.debug(LOG_CHECK, "Checking %s", unicode(self)) assert not self.extern[1], log.debug(LOG_CHECK, "checking connection") try: self.check_connection() self.set_content_type() self.add_size_info() self.aggregate.plugin_manager.run_connection_plugins(self) except tuple(ExcList) as exc: value = self.handle_exception() if isinstance(exc, socket.error) and exc.args[0] == -2: value = _() elif isinstance(exc, UnicodeError): value = _() % {: self.host, : str(value)} self.set_result(unicode_safe(value), valid=False)
Local check function can be overridden in subclasses.
379,674
def discard(self, element): try: i = int(element) set.discard(self, i) except ValueError: pass
Remove element from the RangeSet if it is a member. If the element is not a member, do nothing.
379,675
def for_category(self, category, live_only=False): filters = {: category.tag} if live_only: filters.update({: True}) return self.filter(**filters)
Returns queryset of EntryTag instances for specified category. :param category: the Category instance. :param live_only: flag to include only "live" entries. :rtype: django.db.models.query.QuerySet.
379,676
def binary(self): if isinstance(self.value, bytes): length = len(self.value) if length > 4294967295: raise OutputException() elif self.bits != 8: return ( b_chr(_TAG_BIT_BINARY_EXT) + struct.pack(b, length) + b_chr(self.bits) + self.value ) else: return ( b_chr(_TAG_BINARY_EXT) + struct.pack(b, length) + self.value ) else: raise OutputException()
return encoded representation
379,677
def _free(self, ptr): raise NotImplementedError("%s not implemented for %s" % (self._free.__func__.__name__, self.__class__.__name__))
Handler for any libc `free` SimProcedure call. If the heap has faithful support for `free`, it ought to be implemented in a `free` function (as opposed to the `_free` function). :param ptr: the location in memory to be freed
379,678
def getDocFactory(self, fragmentName, default=None): themes = self._preferredThemes() for t in themes: fact = t.getDocFactory(fragmentName, None) if fact is not None: return fact return default
Retrieve a Nevow document factory for the given name. @param fragmentName: a short string that names a fragment template. @param default: value to be returned if the named template is not found.
379,679
def send_mail( subject, sender, to, message, html_message=None, cc=None, bcc=None, attachments=None, host=None, port=None, auth_user=None, auth_password=None, use_tls=False, fail_silently=False, ): if message is None and html_message is None: raise ValueError("Either message or html_message must be provided") if message is None: message = strip_tags(html_message) connection = SMTPConnection( host=host, port=port, username=auth_user, password=auth_password, use_tls=use_tls, fail_silently=fail_silently, ) if isinstance(to, six.string_types): to = [to] if html_message is None: email = EmailMessage( subject=subject, body=message, sender=sender, to=to, cc=cc, bcc=bcc, attachments=attachments, connection=connection, ) else: email = EmailMultiAlternatives( subject=subject, body=message, sender=sender, to=to, cc=cc, bcc=bcc, attachments=attachments, connection=connection, ) email.attach_alternative(html_message, "text/html") return email.send()
Send a single email to a recipient list. All members of the recipient list will see the other recipients in the 'To' field. Note: The API for this method is frozen. New code wanting to extend the functionality should use the EmailMessage class directly.
379,680
def _ProcessTask(self, task): logger.debug(.format(task.identifier)) if self._tasks_profiler: self._tasks_profiler.Sample(task, ) self._task = task storage_writer = self._storage_writer.CreateTaskStorage(task) if self._serializers_profiler: storage_writer.SetSerializersProfiler(self._serializers_profiler) storage_writer.Open() self._parser_mediator.SetStorageWriter(storage_writer) storage_writer.WriteTaskStart() try: self._ProcessPathSpec( self._extraction_worker, self._parser_mediator, task.path_spec) self._number_of_consumed_sources += 1 if self._guppy_memory_profiler: self._guppy_memory_profiler.Sample() finally: storage_writer.WriteTaskCompletion(aborted=self._abort) self._parser_mediator.SetStorageWriter(None) storage_writer.Close() try: self._storage_writer.FinalizeTaskStorage(task) except IOError: pass self._task = None if self._tasks_profiler: self._tasks_profiler.Sample(task, ) logger.debug(.format(task.identifier))
Processes a task. Args: task (Task): task.
379,681
def report_numbers2marc(self, key, value): def _get_mangled_source(source): if source == : return return source source = _get_mangled_source(value.get()) if value.get(): return { : source, : value.get(), } return { : source, : value.get(), }
Populate the ``037`` MARC field.
379,682
def read(self, line, f, data): data["energy"] = float(f.readline().split()[1]) N = len(data["symbols"]) gradient = data.get("gradient") if gradient is None: gradient = np.zeros((N,3), float) data["gradient"] = gradient for i in range(N): words = f.readline().split() gradient[i,0] = float(words[2]) gradient[i,1] = float(words[3]) gradient[i,2] = float(words[4])
See :meth:`PunchParser.read`
379,683
def is_valid(self): is_valid = super(GAErrorReportingMixin, self).is_valid() if self.is_bound and not is_valid: try: self.report_errors_to_ga(self.errors) except: logger.exception() return is_valid
Error reporting is triggered when a form is checked for validity
379,684
def power_spectrum(self, input_filepath): effect_args = [, , , ] _, _, stat_output = self.build( input_filepath, None, extra_args=effect_args, return_output=True ) power_spectrum = [] lines = stat_output.split() for line in lines: split_line = line.split() if len(split_line) != 2: continue freq, amp = split_line power_spectrum.append([float(freq), float(amp)]) return power_spectrum
Calculates the power spectrum (4096 point DFT). This method internally invokes the stat command with the -freq option. Note: The file is downmixed to mono prior to computation. Parameters ---------- input_filepath : str Path to input file to compute stats on. Returns ------- power_spectrum : list List of frequency (Hz), amplitude pairs. See Also -------- stat, stats, sox.file_info
379,685
def getLocation(self): method = try: data = _doget(method, photo_id=self.id) except FlickrError: return None loc = data.rsp.photo.location return [loc.latitude, loc.longitude]
Return the latitude+longitutde of the picture. Returns None if no location given for this pic.
379,686
def cli(env): manager = CapacityManager(env.client) result = manager.list() table = formatting.Table( ["ID", "Name", "Capacity", "Flavor", "Location", "Created"], title="Reserved Capacity" ) for r_c in result: occupied_string = " available_string = "-" * int(r_c.get(, 0)) try: flavor = r_c[][0][][] except KeyError: flavor = "Unknown Billing Item" location = r_c[][] capacity = "%s%s" % (occupied_string, available_string) table.add_row([r_c[], r_c[], capacity, flavor, location, r_c[]]) env.fout(table)
List Reserved Capacity groups.
379,687
def properties_strict(instance): if instance[] not in enums.TYPES: return defined_props = enums.PROPERTIES.get(instance[], []) for prop in instance.keys(): if prop not in defined_props: yield JSONError("Property is not one of those defined in the" " specification." % prop, instance[]) if has_cyber_observable_data(instance): for key, obj in instance[].items(): type_ = obj.get(, ) if type_ not in enums.OBSERVABLE_PROPERTIES: continue observable_props = enums.OBSERVABLE_PROPERTIES.get(type_, []) embedded_props = enums.OBSERVABLE_EMBEDDED_PROPERTIES.get(type_, {}) extensions = enums.OBSERVABLE_EXTENSIONS.get(type_, []) for prop in obj.keys(): if prop not in observable_props: yield JSONError("Property is not one of those defined in the" " specification for %s objects." % (prop, type_), instance[]) elif prop in embedded_props: embedded_prop_keys = embedded_props.get(prop, []) for embedded_key in obj[prop]: if isinstance(embedded_key, dict): for embedded in embedded_key: if embedded not in embedded_prop_keys: yield JSONError("Property is not one of those defined in the" " specification for the %s property in %s objects." % (embedded, prop, type_), instance[]) elif embedded_key not in embedded_prop_keys: yield JSONError("Property is not one of those defined in the" " specification for the %s property in %s objects." % (embedded_key, prop, type_), instance[]) for ext_key in obj.get(, {}): if ext_key not in extensions: continue
Ensure that no custom properties are used, but only the official ones from the specification.
379,688
def reduce_min(attrs, inputs, proto_obj): new_attrs = translation_utils._fix_attribute_names(attrs, {:}) return , new_attrs, inputs
Reduce the array along a given axis by minimum value
379,689
def get_task_fs(self, courseid, taskid): if not id_checker(courseid): raise InvalidNameException("Course with invalid name: " + courseid) if not id_checker(taskid): raise InvalidNameException("Task with invalid name: " + taskid) return self._filesystem.from_subfolder(courseid).from_subfolder(taskid)
:param courseid: the course id of the course :param taskid: the task id of the task :raise InvalidNameException :return: A FileSystemProvider to the folder containing the task files
379,690
def update(self, max_norm=None): if max_norm is not None: self._clip_by_global_norm(max_norm) self._module.update()
Updates parameters according to the installed optimizer and the gradients computed in the previous forward-backward batch. Gradients are clipped by their global norm if `max_norm` is set. Parameters ---------- max_norm: float, optional If set, clip values of all gradients the ratio of the sum of their norms.
379,691
def get(self): try: cluster = self.get_argument_cluster() role = self.get_argument_role() environ = self.get_argument_environ() topology_name = self.get_argument_topology() component = self.get_argument_component() metric_names = self.get_required_arguments_metricnames() start_time = self.get_argument_starttime() end_time = self.get_argument_endtime() self.validateInterval(start_time, end_time) instances = self.get_arguments(constants.PARAM_INSTANCE) topology = self.tracker.getTopologyByClusterRoleEnvironAndName( cluster, role, environ, topology_name) metrics = yield tornado.gen.Task(metricstimeline.getMetricsTimeline, topology.tmaster, component, metric_names, instances, int(start_time), int(end_time)) self.write_success_response(metrics) except Exception as e: Log.debug(traceback.format_exc()) self.write_error_response(e)
get method
379,692
def incr(self, key, to_add=1): if key not in self.value: self.value[key] = CountMetric() self.value[key].incr(to_add)
Increments the value of a given key by ``to_add``
379,693
def summary(self, solution=None, threshold=1E-06, fva=None, names=False, floatfmt=): from cobra.flux_analysis.summary import model_summary return model_summary(self, solution=solution, threshold=threshold, fva=fva, names=names, floatfmt=floatfmt)
Print a summary of the input and output fluxes of the model. Parameters ---------- solution: cobra.Solution, optional A previously solved model solution to use for generating the summary. If none provided (default), the summary method will resolve the model. Note that the solution object must match the model, i.e., changes to the model such as changed bounds, added or removed reactions are not taken into account by this method. threshold : float, optional Threshold below which fluxes are not reported. fva : pandas.DataFrame, float or None, optional Whether or not to include flux variability analysis in the output. If given, fva should either be a previous FVA solution matching the model or a float between 0 and 1 representing the fraction of the optimum objective to be searched. names : bool, optional Emit reaction and metabolite names rather than identifiers (default False). floatfmt : string, optional Format string for floats (default '.3g').
379,694
def install_payment_instruction(self, instruction, token_type="Unrestricted", transaction_id=None): if(transaction_id == None): transaction_id = uuid.uuid4() params = {} params[] = instruction params[] = token_type params[] = transaction_id response = self.make_request("InstallPaymentInstruction", params) return response
InstallPaymentInstruction instruction: The PaymentInstruction to send, for example: MyRole=='Caller' orSay 'Roles do not match'; token_type: Defaults to "Unrestricted" transaction_id: Defaults to a new ID
379,695
def get_mass(chebi_id): if len(__MASSES) == 0: __parse_chemical_data() return __MASSES[chebi_id] if chebi_id in __MASSES else float()
Returns mass
379,696
def execute(self): if self.args and self.argument(0) == "help": self.error(self.usage() + "\n\n" + self.help()) return False return True
Execute the command. Intercepts the help subsubcommand to show the help text.
379,697
def update_view(self, table, view): body = { : { : table.project_id, : table.dataset_id, : table.table_id }, : { : view } } if self.table_exists(table): self.client.tables().update(projectId=table.project_id, datasetId=table.dataset_id, tableId=table.table_id, body=body).execute() else: self.client.tables().insert(projectId=table.project_id, datasetId=table.dataset_id, body=body).execute()
Updates the SQL query for a view. If the output table exists, it is replaced with the supplied view query. Otherwise a new table is created with this view. :param table: The table to contain the view. :type table: BQTable :param view: The SQL query for the view. :type view: str
379,698
def identifiers(self, identifiers): if (isinstance(identifiers, subject_abcs.IdentifierCollection) or identifiers is None): self._identifiers = identifiers else: raise ValueError()
:type identifiers: subject_abcs.IdentifierCollection
379,699
def deploy_snmp(snmp, host=None, admin_username=None, admin_password=None, module=None): return __execute_cmd(.format(snmp), host=host, admin_username=admin_username, admin_password=admin_password, module=module)
Change the QuickDeploy SNMP community string, used for switches as well CLI Example: .. code-block:: bash salt dell dracr.deploy_snmp SNMP_STRING host=<remote DRAC or CMC> admin_username=<DRAC user> admin_password=<DRAC PW> salt dell dracr.deploy_password diana secret