Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
378,600
def __readimzmlmeta(self): d = {} scan_settings_list_elem = self.root.find( % self.sl) instrument_config_list_elem = self.root.find( % self.sl) supportedparams1 = [("max count of pixels x", int), ("max count of pixels y", int), ("max dimension x", int), ("max dimension y", int), ("pixel size x", float), ("pixel size y", float), ("matrix solution concentration", float)] supportedparams2 = [("wavelength", float), ("focus diameter x", float), ("focus diameter y", float), ("pulse energy", float), ("pulse duration", float), ("attenuation", float)] supportedaccessions1 = [("IMS:1000042", "value"), ("IMS:1000043", "value"), ("IMS:1000044", "value"), ("IMS:1000045", "value"), ("IMS:1000046", "value"), ("IMS:1000047", "value"), ("MS:1000835", "value")] supportedaccessions2 = [("MS:1000843", "value"), ("MS:1000844", "value"), ("MS:1000845", "value"), ("MS:1000846", "value"), ("MS:1000847", "value"), ("MS:1000848", "value")] for i in range(len(supportedparams1)): acc, attr = supportedaccessions1[i] elem = scan_settings_list_elem.find( % (self.sl, acc)) if elem is None: break name, T = supportedparams1[i] try: d[name] = T(elem.attrib[attr]) except ValueError: warn(Warning( % name)) for i in range(len(supportedparams2)): acc, attr = supportedaccessions2[i] elem = instrument_config_list_elem.find( % (self.sl, acc)) if elem is None: break name, T = supportedparams2[i] try: d[name] = T(elem.attrib[attr]) except ValueError: warn(Warning( % name)) return d
This method should only be called by __init__. Initializes the imzmldict with frequently used metadata from the .imzML file. This method reads only a subset of the available meta information and may be extended in the future. The keys are named similarly to the imzML names. Currently supported keys: "max dimension x", "max dimension y", "pixel size x", "pixel size y", "matrix solution concentration", "wavelength", "focus diameter x", "focus diameter y", "pulse energy", "pulse duration", "attenuation". If a key is not found in the XML tree, it will not be in the dict either. :return d: dict containing above mentioned meta data :rtype: dict :raises Warning: if an xml attribute has a number format different from the imzML specification
378,601
def extract_tree(self, labels, without, suppress_unifurcations=True): if not isinstance(suppress_unifurcations, bool): raise TypeError("suppress_unifurcations must be a bool") if labels is not None and not isinstance(labels, set): try: labels = set(labels) except: raise TypeError("labels must be iterable") label_to_leaf = dict(); keep = set() for node in self.traverse_leaves(): label_to_leaf[str(node)] = node if labels is None or (without and str(node) not in labels) or (not without and str(node) in labels): keep.add(node) for node in list(keep): for a in node.traverse_ancestors(include_self=False): keep.add(a) out = Tree(); out.root.label = self.root.label; out.root.edge_length = self.root.edge_length q_old = deque(); q_old.append(self.root) q_new = deque(); q_new.append(out.root) while len(q_old) != 0: n_old = q_old.popleft(); n_new = q_new.popleft() for c_old in n_old.children: if c_old in keep: c_new = Node(label=str(c_old), edge_length=c_old.edge_length); n_new.add_child(c_new) q_old.append(c_old); q_new.append(c_new) if suppress_unifurcations: out.suppress_unifurcations() return out
Helper function for ``extract_tree_*`` functions
378,602
def rehearse(self, docs, sgd=None, losses=None, config=None): if len(docs) == 0: return if sgd is None: if self._optimizer is None: self._optimizer = create_default_optimizer(Model.ops) sgd = self._optimizer docs = list(docs) for i, doc in enumerate(docs): if isinstance(doc, basestring_): docs[i] = self.make_doc(doc) pipes = list(self.pipeline) random.shuffle(pipes) if config is None: config = {} grads = {} def get_grads(W, dW, key=None): grads[key] = (W, dW) get_grads.alpha = sgd.alpha get_grads.b1 = sgd.b1 get_grads.b2 = sgd.b2 for name, proc in pipes: if not hasattr(proc, "rehearse"): continue grads = {} proc.rehearse(docs, sgd=get_grads, losses=losses, **config.get(name, {})) for key, (W, dW) in grads.items(): sgd(W, dW, key=key) return losses
Make a "rehearsal" update to the models in the pipeline, to prevent forgetting. Rehearsal updates run an initial copy of the model over some data, and update the model so its current predictions are more like the initial ones. This is useful for keeping a pre-trained model on-track, even if you're updating it with a smaller set of examples. docs (iterable): A batch of `Doc` objects. drop (float): The droput rate. sgd (callable): An optimizer. RETURNS (dict): Results from the update. EXAMPLE: >>> raw_text_batches = minibatch(raw_texts) >>> for labelled_batch in minibatch(zip(train_docs, train_golds)): >>> docs, golds = zip(*train_docs) >>> nlp.update(docs, golds) >>> raw_batch = [nlp.make_doc(text) for text in next(raw_text_batches)] >>> nlp.rehearse(raw_batch)
378,603
def results(cls, function, group=None): return numpy.array(cls._results[group][function])
Returns a numpy nparray representing the benchmark results of a function in a group.
378,604
def van_dec_2d(x, skip_connections, output_shape, first_depth, hparams=None): with tf.variable_scope(): dec = tf.layers.conv2d_transpose( x, first_depth * 4, 3, padding=, activation=tf.nn.relu, strides=2) dec = tf.nn.dropout(dec, hparams.van_keep_prob) dec = tf.contrib.layers.layer_norm(dec) dec = tf.layers.conv2d_transpose( dec, first_depth * 4, 3, padding=, activation=tf.nn.relu, strides=1) dec = tf.nn.dropout(dec, hparams.van_keep_prob) dec = tf.layers.conv2d_transpose( dec, first_depth * 2, 3, padding=, activation=tf.nn.relu, strides=1) dec = tf.nn.dropout(dec, hparams.van_keep_prob) dec = tf.contrib.layers.layer_norm(dec) dec = tf.layers.conv2d_transpose( dec, first_depth * 2, 3, padding=, activation=tf.nn.relu, strides=2) dec = tf.nn.dropout(dec, hparams.van_keep_prob) dec = tf.layers.conv2d_transpose( dec, first_depth, 3, padding=, activation=tf.nn.relu, strides=1) dec = tf.nn.dropout(dec, hparams.van_keep_prob) dec = tf.contrib.layers.layer_norm(dec) dec = tf.layers.conv2d_transpose( dec, output_shape[3] + 1, 3, padding=, activation=tf.nn.relu, strides=2) dec = tf.nn.dropout(dec, hparams.van_keep_prob) out_mask = tf.layers.conv2d_transpose( dec, output_shape[3] + 1, 3, strides=1, padding=, activation=None) mask = tf.nn.sigmoid(out_mask[:, :, :, 3:4]) out = out_mask[:, :, :, :3] return out * mask + skip_connections[0] * (1 - mask)
The VAN decoder. Args: x: The analogy information to decode. skip_connections: The encoder layers which can be used as skip connections. output_shape: The shape of the desired output image. first_depth: The depth of the first layer of the van image encoder. hparams: The python hparams. Returns: The decoded image prediction.
378,605
def process_request(self, request): if not self.is_resource_protected(request): return if self.deny_access_condition(request): return self.deny_access(request)
The actual middleware method, called on all incoming requests. This default implementation will ignore the middleware (return None) if the conditions specified in is_resource_protected aren't met. If they are, it then tests to see if the user should be denied access via the denied_access_condition method, and calls deny_access (which implements failure behaviour) if so.
378,606
def critical_angle(pressure, u, v, heights, stormu, stormv): r u = u.to() v = v.to() stormu = stormu.to() stormv = stormv.to() sort_inds = np.argsort(pressure[::-1]) pressure = pressure[sort_inds] heights = heights[sort_inds] u = u[sort_inds] v = v[sort_inds] shr5 = bulk_shear(pressure, u, v, heights=heights, depth=500 * units()) umn = stormu - u[0] vmn = stormv - v[0] vshr = np.asarray([shr5[0].magnitude, shr5[1].magnitude]) vsm = np.asarray([umn.magnitude, vmn.magnitude]) angle_c = np.dot(vshr, vsm) / (np.linalg.norm(vshr) * np.linalg.norm(vsm)) critical_angle = np.arccos(angle_c) * units() return critical_angle.to()
r"""Calculate the critical angle. The critical angle is the angle between the 10m storm-relative inflow vector and the 10m-500m shear vector. A critical angle near 90 degrees indicates that a storm in this environment on the indicated storm motion vector is likely ingesting purely streamwise vorticity into its updraft, and [Esterheld2008]_ showed that significantly tornadic supercells tend to occur in environments with critical angles near 90 degrees. Parameters ---------- pressure : `pint.Quantity` Pressures from sounding. u : `pint.Quantity` U-component of sounding winds. v : `pint.Quantity` V-component of sounding winds. heights : `pint.Quantity` Heights from sounding. stormu : `pint.Quantity` U-component of storm motion. stormv : `pint.Quantity` V-component of storm motion. Returns ------- `pint.Quantity` critical angle in degrees
378,607
def ccmod_setcoef(k): mp_Zf[k] = sl.rfftn(mp_Z_Y[k], mp_cri.Nv, mp_cri.axisN) mp_ZSf[k] = np.conj(mp_Zf[k]) * mp_Sf[k]
Set the coefficient maps for the ccmod stage. The only parameter is the slice index `k` and there are no return values; all inputs and outputs are from and to global variables.
378,608
def usage(asked_for=0): exit = fsq.const() if asked_for else\ fsq.const() f = sys.stdout if asked_for else sys.stderr shout(.format( os.path.basename(_PROG)), f) if asked_for: shout(\ \ .format(os.path.basename(_PROG)), f) shout(\ \ .format(os.path.basename(_PROG)), f) shout(, f) return exit
Exit with a usage string, used for bad argument or with -h
378,609
def ack(self, message, subscription_id=None, **kwargs): if isinstance(message, dict): message_id = message.get("message-id") if not subscription_id: subscription_id = message.get("subscription") else: message_id = message if not message_id: raise workflows.Error("Cannot acknowledge message without " + "message ID") if not subscription_id: raise workflows.Error( "Cannot acknowledge message without " + "subscription ID" ) self.log.debug( "Acknowledging message %s on subscription %s", message_id, subscription_id ) self._ack(message_id, subscription_id, **kwargs)
Acknowledge receipt of a message. This only makes sense when the 'acknowledgement' flag was set for the relevant subscription. :param message: ID of the message to be acknowledged, OR a dictionary containing a field 'message-id'. :param subscription_id: ID of the associated subscription. Optional when a dictionary is passed as first parameter and that dictionary contains field 'subscription'. :param **kwargs: Further parameters for the transport layer. For example transaction: Transaction ID if acknowledgement should be part of a transaction
378,610
def get_md5(string): try: hasher = hashlib.md5() except BaseException: hasher = hashlib.new(, usedForSecurity=False) hasher.update(string) return hasher.hexdigest()
Get a string's MD5
378,611
def copy_images(images, source, target): image_err = False if len(images) > 0: images_dir = os.path.join(target, ) os.makedirs(images_dir) for image in images: if os.path.isabs(image): old_image_file = image else: old_image_file = os.path.join(source, image) new_image_file = os.path.join(images_dir, os.path.basename(image)) if os.path.isfile(os.path.abspath(old_image_file)): shutil.copy(old_image_file, new_image_file) else: image_err = True logging.error( % old_image_file) return image_err
Copy images to converted topology :param images: Images to copy :param source: Old Topology Directory :param target: Target topology files directory :return: True when an image cannot be found, otherwise false :rtype: bool
378,612
def refresh(self, item): client = self._clients[item].get_client() self[item] = val = self.item_class(client) return val
Forces a refresh of a cached item. :param item: Client name. :type item: unicode | str :return: Items in the cache. :rtype: DockerHostItemCache.item_class
378,613
def on_resize(width, height): glViewport(0, 0, width, height) glMatrixMode(GL_PROJECTION) glLoadIdentity() gluPerspective(30, 1.0*width/height, 0.1, 1000.0) glMatrixMode(GL_MODELVIEW) glLoadIdentity()
Setup 3D projection
378,614
def _get_arguments_for_execution(self, function_name, serialized_args): arguments = [] for (i, arg) in enumerate(serialized_args): if isinstance(arg, ObjectID): argument = self.get_object([arg])[0] if isinstance(argument, RayError): raise argument else: argument = arg arguments.append(argument) return arguments
Retrieve the arguments for the remote function. This retrieves the values for the arguments to the remote function that were passed in as object IDs. Arguments that were passed by value are not changed. This is called by the worker that is executing the remote function. Args: function_name (str): The name of the remote function whose arguments are being retrieved. serialized_args (List): The arguments to the function. These are either strings representing serialized objects passed by value or they are ray.ObjectIDs. Returns: The retrieved arguments in addition to the arguments that were passed by value. Raises: RayError: This exception is raised if a task that created one of the arguments failed.
378,615
def spline_base1d(length, nr_knots = 20, spline_order = 5, marginal = None): if marginal is None: knots = augknt(np.linspace(0,length+1, nr_knots), spline_order) else: knots = knots_from_marginal(marginal, nr_knots, spline_order) x_eval = np.arange(1,length+1).astype(float) Bsplines = spcol(x_eval,knots,spline_order) return Bsplines, knots
Computes a 1D spline basis Input: length: int length of each basis nr_knots: int Number of knots, i.e. number of basis functions. spline_order: int Order of the splines. marginal: array, optional Estimate of the marginal distribution of the input to be fitted. If given, it is used to determine the positioning of knots, each knot will cover the same amount of probability mass. If not given, knots are equally spaced.
378,616
def add(self, url: str, anything: Any) -> None: url = normalize_url(url) parts = url.split() curr_partial_routes = self._routes curr_key_parts = [] for part in parts: if part.startswith(): curr_key_parts.append(part[2:]) part = self._VAR_ANY_NODE self._max_depth = self._max_depth_custom elif part.startswith(): curr_key_parts.append(part[1:]) part = self._VAR_NODE curr_partial_routes = (curr_partial_routes .setdefault(part, {})) curr_partial_routes[self._ROUTE_NODE] = _Route( key_parts=curr_key_parts, anything=anything) self._max_depth = max(self._max_depth, depth_of(parts))
Register a URL pattern into\ the routes for later matching. It's possible to attach any kind of\ object to the pattern for later\ retrieving. A dict with methods and callbacks,\ for example. Anything really. Registration order does not matter.\ Adding a URL first or last makes no difference. :param url: URL :param anything: Literally anything.
378,617
def write_stream (stream, holders, defaultsection=None): anybefore = False for h in holders: if anybefore: print (, file=stream) s = h.get (, defaultsection) if s is None: raise ValueError ( % h) print ( % s, file=stream) for k in sorted (x for x in six.iterkeys (h.__dict__) if x != ): v = h.get (k) if v is None: continue print ( % (k, v), file=stream) anybefore = True
Very simple writing in ini format. The simple stringification of each value in each Holder is printed, and no escaping is performed. (This is most relevant for multiline values or ones containing pound signs.) `None` values are skipped. Arguments: stream A text stream to write to. holders An iterable of objects to write. Their fields will be written as sections. defaultsection=None Section name to use if a holder doesn't contain a `section` field.
378,618
def precess_coordinates(ra, dec, epoch_one, epoch_two, jd=None, mu_ra=0.0, mu_dec=0.0, outscalar=False): s VARTOOLS/converttime.c [coordprecess]. Parameters ---------- ra,dec : float The equatorial coordinates of the object at `epoch_one` to precess in decimal degrees. epoch_one : float Origin epoch to precess from to target epoch. This is a float, like: 1985.0, 2000.0, etc. epoch_two : float Target epoch to precess from origin epoch. This is a float, like: 2000.0, 2018.0, etc. jd : float The full Julian date to use along with the propermotions in `mu_ra`, and `mu_dec` to handle proper motion along with the coordinate frame precession. If one of `jd`, `mu_ra`, or `mu_dec` is missing, the proper motion will not be used to calculate the final precessed coordinates. mu_ra,mu_dec : float The proper motion in mas/yr in right ascension and declination. If these are provided along with `jd`, the total proper motion of the object will be taken into account to calculate the final precessed coordinates. outscalar : bool If True, converts the output coordinates from one-element np.arrays to scalars. Returns ------- precessed_ra, precessed_dec : float A tuple of precessed equatorial coordinates in decimal degrees at `epoch_two` taking into account proper motion if `jd`, `mu_ra`, and `mu_dec` are provided. ' raproc, decproc = np.radians(ra), np.radians(dec) if ((mu_ra != 0.0) and (mu_dec != 0.0) and jd): jd_epoch_one = JD2000 + (epoch_one - epoch_two)*365.25 raproc = ( raproc + (jd - jd_epoch_one)*mu_ra*MAS_P_YR_TO_RAD_P_DAY/np.cos(decproc) ) decproc = decproc + (jd - jd_epoch_one)*mu_dec*MAS_P_YR_TO_RAD_P_DAY ca = np.cos(raproc) cd = np.cos(decproc) sa = np.sin(raproc) sd = np.sin(decproc) if epoch_one != epoch_two: t1 = 1.0e-3 * (epoch_two - epoch_one) t2 = 1.0e-3 * (epoch_one - 2000.0) a = ( t1*ARCSEC_TO_RADIANS * (23062.181 + t2*(139.656 + 0.0139*t2) + t1*(30.188 - 0.344*t2+17.998*t1)) ) b = t1*t1*ARCSEC_TO_RADIANS*(79.280 + 0.410*t2 + 0.205*t1) + a c = ( ARCSEC_TO_RADIANS*t1*(20043.109 - t2*(85.33 + 0.217*t2) + t1*(-42.665 - 0.217*t2 - 41.833*t2)) ) sina, sinb, sinc = np.sin(a), np.sin(b), np.sin(c) cosa, cosb, cosc = np.cos(a), np.cos(b), np.cos(c) precmatrix = np.matrix([[cosa*cosb*cosc - sina*sinb, sina*cosb + cosa*sinb*cosc, cosa*sinc], [-cosa*sinb - sina*cosb*cosc, cosa*cosb - sina*sinb*cosc, -sina*sinc], [-cosb*sinc, -sinb*sinc, cosc]]) precmatrix = precmatrix.transpose() x = (np.matrix([cd*ca, cd*sa, sd])).transpose() x2 = precmatrix * x outra = np.arctan2(x2[1],x2[0]) outdec = np.arcsin(x2[2]) outradeg = np.rad2deg(outra) outdecdeg = np.rad2deg(outdec) if outradeg < 0.0: outradeg = outradeg + 360.0 if outscalar: return float(outradeg), float(outdecdeg) else: return outradeg, outdecdeg else: return np.degrees(raproc), np.degrees(decproc)
Precesses target coordinates `ra`, `dec` from `epoch_one` to `epoch_two`. This takes into account the jd of the observations, as well as the proper motion of the target mu_ra, mu_dec. Adapted from J. D. Hartman's VARTOOLS/converttime.c [coordprecess]. Parameters ---------- ra,dec : float The equatorial coordinates of the object at `epoch_one` to precess in decimal degrees. epoch_one : float Origin epoch to precess from to target epoch. This is a float, like: 1985.0, 2000.0, etc. epoch_two : float Target epoch to precess from origin epoch. This is a float, like: 2000.0, 2018.0, etc. jd : float The full Julian date to use along with the propermotions in `mu_ra`, and `mu_dec` to handle proper motion along with the coordinate frame precession. If one of `jd`, `mu_ra`, or `mu_dec` is missing, the proper motion will not be used to calculate the final precessed coordinates. mu_ra,mu_dec : float The proper motion in mas/yr in right ascension and declination. If these are provided along with `jd`, the total proper motion of the object will be taken into account to calculate the final precessed coordinates. outscalar : bool If True, converts the output coordinates from one-element np.arrays to scalars. Returns ------- precessed_ra, precessed_dec : float A tuple of precessed equatorial coordinates in decimal degrees at `epoch_two` taking into account proper motion if `jd`, `mu_ra`, and `mu_dec` are provided.
378,619
def rr_history(self, ips): api_name = fmt_url_path = u return self._multi_get(api_name, fmt_url_path, ips)
Get the domains related to input ips. Args: ips: an enumerable of strings as ips Returns: An enumerable of resource records and features
378,620
def install(zone, nodataset=False, brand_opts=None): ** ret = {: True} res = __salt__[](.format( zone=zone, nodataset= if nodataset else , brand_opts=.format(brand_opts) if brand_opts else , )) ret[] = res[] == 0 ret[] = res[] if ret[] else res[] ret[] = ret[].replace(, ) if ret[] == : del ret[] return ret
Install the specified zone from the system. zone : string name of the zone nodataset : boolean do not create a ZFS file system brand_opts : string brand specific options to pass CLI Example: .. code-block:: bash salt '*' zoneadm.install dolores salt '*' zoneadm.install teddy True
378,621
def user_choice(prompt, choices=("yes", "no"), default=None): assert default is None or default in choices choice_list = .join((choice.title() if choice == default else choice for choice in choices)) response = None while response not in choices: response = input(prompt + + choice_list + ) response = response.lower() if response else default return response
Prompts the user for confirmation. The default value, if any, is capitalized. :param prompt: Information to display to the user. :param choices: an iterable of possible choices. :param default: default choice :return: the user's choice
378,622
def execute_migrations(self, show_traceback=True): all_migrations = get_pending_migrations(self.path, self.databases) if not len(all_migrations): sys.stdout.write("There are no migrations to apply.\n") for db, migrations in all_migrations.iteritems(): connection = connections[db] cursor = connection.cursor() cursor.close() for migration in migrations: migration_path = self._get_migration_path(db, migration) with Transactional(): sys.stdout.write( "Executing migration %r on %r...." % (migration, db) ) created_models = self._execute_migration( db, migration_path, show_traceback=show_traceback ) emit_post_sync_signal( created_models=created_models, verbosity=self.verbosity, interactive=self.interactive, db=db, ) if self.load_initial_data: sys.stdout.write( "Running loaddata for initial_data fixtures on %r.\n" % db ) call_command( "loaddata", "initial_data", verbosity=self.verbosity, database=db, )
Executes all pending migrations across all capable databases
378,623
def import_module(path): mod = __import__(path, locals={}, globals={}) for item in path.split()[1:]: try: mod = getattr(mod, item) except AttributeError: raise ImportError( % path) return mod
Import a module given a dotted *path* in the form of ``.name(.name)*``, and returns the last module (unlike ``__import__`` which just returns the first module). :param path: The dotted path to the module.
378,624
def all_cities(): cities = [] fname = pkg_resources.resource_filename(__name__, ) with open(fname, ) as csvfile: reader = csv.reader(csvfile, delimiter = ) for row in reader: cities.append(row[0]) cities.sort() return cities
Get a list of all Backpage city names. Returns: list of city names as Strings
378,625
def sector_shift(self): header = self.source.header return header.mini_sector_shift if self._is_mini \ else header.sector_shift
Property with current sector size shift. Actually sector size is 2 ** sector shift
378,626
def _extend_str(class_node, rvalue): code = dedent( ) code = code.format(rvalue=rvalue) fake = AstroidBuilder(MANAGER).string_build(code)["whatever"] for method in fake.mymethods(): method.parent = class_node method.lineno = None method.col_offset = None if "__class__" in method.locals: method.locals["__class__"] = [class_node] class_node.locals[method.name] = [method] method.parent = class_node
function to extend builtin str/unicode class
378,627
def addcomment(accountable, body): r = accountable.issue_add_comment(body) headers = sorted([, , ]) rows = [[v for k, v in sorted(r.items()) if k in headers]] rows.insert(0, headers) print_table(SingleTable(rows))
Add a comment to the given issue key. Accepts a body argument to be used as the comment's body.
378,628
def insert_child(self, child_pid, index=-1): self._check_child_limits(child_pid) if index is None: index = -1 try: with db.session.begin_nested(): if not isinstance(child_pid, PersistentIdentifier): child_pid = resolve_pid(child_pid) child_relations = self._resolved_pid.child_relations.filter( PIDRelation.relation_type == self.relation_type.id ).order_by(PIDRelation.index).all() relation_obj = PIDRelation.create( self._resolved_pid, child_pid, self.relation_type.id, None) if index == -1: child_relations.append(relation_obj) else: child_relations.insert(index, relation_obj) for idx, c in enumerate(child_relations): c.index = idx except IntegrityError: raise PIDRelationConsistencyError("PID Relation already exists.")
Insert a new child into a PID concept. Argument 'index' can take the following values: 0,1,2,... - insert child PID at the specified position -1 - insert the child PID at the last position None - insert child without order (no re-ordering is done) NOTE: If 'index' is specified, all sibling relations should have PIDRelation.index information.
378,629
def uri(self): if self._uds_path: uri = % (quote_plus(self._uds_path),) else: uri = % (format_addr(self._address),) return uri + if self._ssl else uri
Connection string to pass to `~pymongo.mongo_client.MongoClient`.
378,630
def get_authors(self, language): return self.gettext(language, self._author) if self._author else ""
Return the list of this task's authors
378,631
def get_last_scene_id(self, refresh=False): if refresh: self.refresh_complex_value() self.refresh_complex_value() val = self.get_complex_value() or self.get_complex_value() return val
Get last scene id. Refresh data from Vera if refresh is True, otherwise use local cache. Refresh is only needed if you're not using subscriptions.
378,632
def copyVarStatesFrom(self, particleState, varNames): self.permuteVars[varName].resetVelocity(self._rng)
Copy specific variables from particleState into this particle. Parameters: -------------------------------------------------------------- particleState: dict produced by a particle's getState() method varNames: which variables to copy
378,633
def byte_to_channels(self, byte): assert isinstance(byte, int) assert byte >= 0 assert byte < 256 result = [] for offset in range(0, 8): if byte & (1 << offset): result.append(offset + 1) return result
:return: list(int)
378,634
def index(path=None): payload = { "username": "soandso", "message": "Hello bot", "vars": { "name": "Soandso", } } return Response(r.format(json.dumps(payload)), mimetype="text/plain")
On all other routes, just return an example `curl` command.
378,635
def result_to_dict(raw_result): result = {} for channel_index, channel in enumerate(raw_result): channel_id, channel_name = channel[0], channel[1] channel_result = { : channel_id, : channel_name, : [] } for movie in channel[2]: channel_result[].append({ : movie[1], : datetime.fromtimestamp(movie[2]), : datetime.fromtimestamp(movie[2] + movie[3]), : True if movie[3] else False, }) result[channel_id] = channel_result return result
Parse raw result from fetcher into readable dictionary Args: raw_result (dict) - raw data from `fetcher` Returns: dict - readable dictionary
378,636
def run(self) -> None: if self.loop is None: return create_server = asyncio.ensure_future(self._run(), loop=self.loop) try: self.loop.run_until_complete(create_server) self.loop.run_until_complete(self._check_alive()) finally: self.loop.run_until_complete(self.loop.shutdown_asyncgens()) self.loop.close()
创建了 sock 的运行回调
378,637
def sbar(Ss): if type(Ss) == list: Ss = np.array(Ss) npts = Ss.shape[0] Ss = Ss.transpose() avd, avs = [], [] D = np.array([Ss[0], Ss[1], Ss[2], Ss[3] + 0.5 * (Ss[0] + Ss[1]), Ss[4] + 0.5 * (Ss[1] + Ss[2]), Ss[5] + 0.5 * (Ss[0] + Ss[2])]) for j in range(6): avd.append(np.average(D[j])) avs.append(np.average(Ss[j])) D = D.transpose() nf = (npts - 1) * 6 s0 = 0 Dels = (D - avd)**2 s0 = np.sum(Dels) sigma = np.sqrt(s0/float(nf)) return nf, sigma, avs
calculate average s,sigma from list of "s"s.
378,638
def error_asymptotes(pca,**kwargs): ax = kwargs.pop("ax",current_axes()) lon,lat = pca.plane_errors(, n=1000) ax.plot(lon,lat,) lon,lat = pca.plane_errors(, n=1000) ax.plot(lon,lat,) ax.plane(*pca.strike_dip())
Plots asymptotic error bounds for hyperbola on a stereonet.
378,639
def __get_constants(self): helper = ConstantClass(self._constants_class_name, self._io) helper.reload() constants = helper.constants() for name, value in constants.items(): self._add_replace_pair(name, value, True) self._io.text(. format(len(constants), helper.file_name()))
Gets the constants from the class that acts like a namespace for constants and adds them to the replace pairs.
378,640
def console_print(con: tcod.console.Console, x: int, y: int, fmt: str) -> None: lib.TCOD_console_printf(_console(con), x, y, _fmt(fmt))
Print a color formatted string on a console. Args: con (Console): Any Console instance. x (int): Character x position from the left. y (int): Character y position from the top. fmt (AnyStr): A unicode or bytes string optionaly using color codes. .. deprecated:: 8.5 Use :any:`Console.print_` instead.
378,641
def _show_or_dump(self, dump=False, indent=3, lvl="", label_lvl="", first_call=True): if dump: from scapy.themes import AnsiColorTheme ct = AnsiColorTheme() else: ct = conf.color_theme s = "%s%s %s %s \n" % (label_lvl, ct.punct(" ct.layer_name(self.name), ct.punct("] for f in self.fields_desc: if isinstance(f, ConditionalField) and not f._evalcond(self): continue if isinstance(f, Emph) or f in conf.emph: ncol = ct.emph_field_name vcol = ct.emph_field_value else: ncol = ct.field_name vcol = ct.field_value fvalue = self.getfieldval(f.name) if isinstance(fvalue, Packet) or (f.islist and f.holds_packets and isinstance(fvalue, list)): s += "%s \\%-10s\\\n" % (label_lvl + lvl, ncol(f.name)) fvalue_gen = SetGen(fvalue, _iterpacket=0) for fvalue in fvalue_gen: s += fvalue._show_or_dump(dump=dump, indent=indent, label_lvl=label_lvl + lvl + " |", first_call=False) else: begn = "%s %-10s%s " % (label_lvl + lvl, ncol(f.name), ct.punct("="),) reprval = f.i2repr(self, fvalue) if isinstance(reprval, str): reprval = reprval.replace("\n", "\n" + " " * (len(label_lvl) + len(lvl) + len(f.name) + 4)) s += "%s%s\n" % (begn, vcol(reprval)) if self.payload: s += self.payload._show_or_dump(dump=dump, indent=indent, lvl=lvl + (" " * indent * self.show_indent), label_lvl=label_lvl, first_call=False) if first_call and not dump: print(s) else: return s
Internal method that shows or dumps a hierarchical view of a packet. Called by show. :param dump: determine if it prints or returns the string value :param int indent: the size of indentation for each layer :param str lvl: additional information about the layer lvl :param str label_lvl: additional information about the layer fields :param first_call: determine if the current function is the first :return: return a hierarchical view if dump, else print it
378,642
def view_include(view_module, namespace=None, app_name=None): view_dict = defaultdict(list) if isinstance(view_module, six.string_types): view_module = importlib.import_module(view_module) for member_name, member in inspect.getmembers(view_module): is_class_view = inspect.isclass(member) and issubclass(member, URLView) is_func_view = (inspect.isfunction(member) and hasattr(member, ) and member.urljects_view) if (is_class_view and member is not URLView) or is_func_view: view_dict[member.url_priority].append( url(member.url, member, name=member.url_name)) view_patterns = list(*[ view_dict[priority] for priority in sorted(view_dict) ]) return urls.include( arg=view_patterns, namespace=namespace, app_name=app_name)
Includes view in the url, works similar to django include function. Auto imports all class based views that are subclass of ``URLView`` and all functional views that have been decorated with ``url_view``. :param view_module: object of the module or string with importable path :param namespace: name of the namespaces, it will be guessed otherwise :param app_name: application name :return: result of urls.include
378,643
def ParseOptions(cls, options, config_object, category=None, names=None): for helper_name, helper_class in cls._helper_classes.items(): if ((category and helper_class.CATEGORY != category) or (names and helper_name not in names)): continue try: helper_class.ParseOptions(options, config_object) except errors.BadConfigObject: pass
Parses and validates arguments using the appropriate helpers. Args: options (argparse.Namespace): parser options. config_object (object): object to be configured by an argument helper. category (Optional[str]): category of helpers to apply to the group, such as storage, output, where None will apply the arguments to all helpers. The category can be used to add arguments to a specific group of registered helpers. names (Optional[list[str]]): names of argument helpers to apply, where None will apply the arguments to all helpers.
378,644
def update_check(self, entity, check, label=None, name=None, disabled=None, metadata=None, monitoring_zones_poll=None, timeout=None, period=None, target_alias=None, target_hostname=None, target_receiver=None): entity.update_check(check, label=label, name=name, disabled=disabled, metadata=metadata, monitoring_zones_poll=monitoring_zones_poll, timeout=timeout, period=period, target_alias=target_alias, target_hostname=target_hostname, target_receiver=target_receiver)
Updates an existing check with any of the parameters.
378,645
def ucas_download_playlist(url, output_dir = , merge = False, info_only = False, **kwargs): html = get_content(url) parts = re.findall( r, html) assert parts, for part_path in parts: ucas_download( + part_path, output_dir=output_dir, merge=merge, info_only=info_only)
course page
378,646
def memoized_method(method=None, cache_factory=None): if method is None: return lambda f: memoized_method(f, cache_factory=cache_factory) cache_factory = cache_factory or dict @wraps(method) def memoized_method_property(self): cache = cache_factory() cache_attr = "_%s_cache" %(method.__name__, ) setattr(self, cache_attr, cache) result = partial( _memoized_call, partial(method, self), cache ) result.memoize_cache = cache return result return memoized_property(memoized_method_property)
Memoize a class's method. Arguments are similar to to `memoized`, except that the cache container is specified with `cache_factory`: a function called with no arguments to create the caching container for the instance. Note that, unlike `memoized`, the result cache will be stored on the instance, so cached results will be deallocated along with the instance. Example:: >>> class Person(object): ... def __init__(self, name): ... self._name = name ... @memoized_method ... def get_name(self): ... print("Calling get_name on %r" %(self._name, )) ... return self._name >>> shazow = Person("shazow") >>> shazow.get_name() Calling get_name on 'shazow' 'shazow' >>> shazow.get_name() 'shazow' >>> shazow._get_name_cache {((), ()): 'shazow'} Example with a specific cache container:: >>> from unstdlib.standard.collections_ import RecentlyUsedContainer >>> class Foo(object): ... @memoized_method(cache_factory=lambda: RecentlyUsedContainer(maxsize=2)) ... def add(self, a, b): ... print("Calling add with %r and %r" %(a, b)) ... return a + b >>> foo = Foo() >>> foo.add(1, 1) Calling add with 1 and 1 2 >>> foo.add(1, 1) 2 >>> foo.add(2, 2) Calling add with 2 and 2 4 >>> foo.add(3, 3) Calling add with 3 and 3 6 >>> foo.add(1, 1) Calling add with 1 and 1 2
378,647
def _handle_log_rotations(self): s log file if necessary ' for h in self.capture_handlers: if self._should_rotate_log(h): self._rotate_log(h)
Rotate each handler's log file if necessary
378,648
def get(session, api_key, **kwargs): args, kwargs = validate_args(api_key, **kwargs) resp = session.get(*args, **kwargs) return WeatherAnswer.validate(resp.json())
Выполняет доступ к API. session - модуль requests или сессия из него api_key - строка ключа доступа к API rate - тариф, может быть `informers` или `forecast` lat, lon - широта и долгота ``` import yandex_weather_api import requests as req yandex_weather_api.get(req, "ЗАМЕНИ_МЕНЯ_КЛЮЧОМ", lat=55.10, lon=60.10) ```
378,649
def _check_conflict(cls, dirPath, name): old_sys_path = sys.path try: sys.path = [d for d in old_sys_path if os.path.realpath(d) != os.path.realpath(dirPath)] try: colliding_module = importlib.import_module(name) except ImportError: pass else: raise ResourceException( "The user module collides with module %s'." % ( name, colliding_module.__name__, colliding_module.__file__)) finally: sys.path = old_sys_path
Check whether the module of the given name conflicts with another module on the sys.path. :param dirPath: the directory from which the module was originally loaded :param name: the mpdule name
378,650
def get_netloc_and_auth(self, netloc, scheme): if scheme == : return super(Subversion, self).get_netloc_and_auth( netloc, scheme) return split_auth_from_netloc(netloc)
This override allows the auth information to be passed to svn via the --username and --password options instead of via the URL.
378,651
def encode(i, *, width=-1): if i < 0: raise ValueError("value is negative") assert width != 0 data = bytearray() while i: data.append(i & 127) i >>= 7 if width > 0 and len(data) > width: raise ValueError("Integer too large") if len(data) < abs(width): data.extend([0] * (abs(width) - len(data))) data.reverse() return data
Encodes a nonnegative integer into syncsafe format When width > 0, then len(result) == width When width < 0, then len(result) >= abs(width)
378,652
def readline(self, size=-1): if size == 0: return self.string_type() index = self.expect([self.crlf, self.delimiter]) if index == 0: return self.before + self.crlf else: return self.before
This reads and returns one entire line. The newline at the end of line is returned as part of the string, unless the file ends without a newline. An empty string is returned if EOF is encountered immediately. This looks for a newline as a CR/LF pair (\\r\\n) even on UNIX because this is what the pseudotty device returns. So contrary to what you may expect you will receive newlines as \\r\\n. If the size argument is 0 then an empty string is returned. In all other cases the size argument is ignored, which is not standard behavior for a file-like object.
378,653
def verify_chunks(self, chunks): err = [] for chunk in chunks: err.extend(self.verify_data(chunk)) return err
Verify the chunks in a list of low data structures
378,654
def load_config(self, argv=None, aliases=None, flags=None): self.clear() if argv is None: argv = self.argv if aliases is None: aliases = self.aliases if flags is None: flags = self.flags self._create_parser(aliases, flags) self._parse_args(argv) self._convert_to_config() return self.config
Parse command line arguments and return as a Config object. Parameters ---------- args : optional, list If given, a list with the structure of sys.argv[1:] to parse arguments from. If not given, the instance's self.argv attribute (given at construction time) is used.
378,655
def p_rst(p): val = p[2].eval() if val not in (0, 8, 16, 24, 32, 40, 48, 56): error(p.lineno(1), % val) p[0] = None return p[0] = Asm(p.lineno(1), % val)
asm : RST expr
378,656
def del_calc(db, job_id, user): job_id = int(job_id) dependent = db( , job_id) if dependent: return {"error": % (job_id, [j.id for j in dependent])} try: owner, path = db(, job_id, one=True) except NotFound: return {"error": % job_id} deleted = db(, job_id, user).rowcount if not deleted: return {"error": % (job_id, owner, user)} fname = path + ".hdf5" try: os.remove(fname) except OSError as exc: return {"error": % (fname, exc)} return {"success": fname}
Delete a calculation and all associated outputs, if possible. :param db: a :class:`openquake.server.dbapi.Db` instance :param job_id: job ID, can be an integer or a string :param user: username :returns: None if everything went fine or an error message
378,657
def _build(self, inputs): shape_inputs = inputs.get_shape().as_list() rank = len(shape_inputs) full_multiples = [1] * rank for dim, multiple in zip(self._dims, self._multiples): full_multiples[dim] = multiple return tf.tile(inputs, multiples=full_multiples)
Connects the `TileByDim` module into the graph. Args: inputs: `Tensor` to tile. Returns: The tiled tensor.
378,658
def combine_action_handlers(*handlers): for handler in handlers: if not (iscoroutinefunction(handler) or iscoroutine(handler)): raise ValueError("Provided handler is not a coroutine: %s" % handler) async def combined_handler(*args, **kwds): for handler in handlers: await handler(*args, **kwds) return combined_handler
This function combines the given action handlers into a single function which will call all of them.
378,659
def unpack_binary(self, offset, length=False): if not length: return bytes("".encode("ascii")) o = self._offset + offset try: return bytes(struct.unpack_from("<{}s".format(length), self._buf, o)[0]) except struct.error: raise OverrunBufferException(o, len(self._buf))
Returns raw binary data from the relative offset with the given length. Arguments: - `offset`: The relative offset from the start of the block. - `length`: The length of the binary blob. If zero, the empty string zero length is returned. Throws: - `OverrunBufferException`
378,660
def get_deffacts(self): return sorted(self._get_by_type(DefFacts), key=lambda d: d.order)
Return the existing deffacts sorted by the internal order
378,661
def finish(self): if self.finished: return self.exit_code checkpoint_status = self.checkpoint() self.exit_code = self._exit_code() if self.exit_code != 0: raise TeradataPTError("BulkLoad job finished with return code ".format(self.exit_code)) if self.applied_count > 0: self._end_acquisition() self._apply_rows() self.exit_code = self._exit_code() if self.exit_code != 0: raise TeradataPTError("BulkLoad job finished with return code ".format(self.exit_code)) self.finished = True return self.exit_code
Finishes the load job. Called automatically when the connection closes. :return: The exit code returned when applying rows to the table
378,662
def defer_sync(self, func): latch = Latch() def wrapper(): try: latch.put(func()) except Exception: latch.put(sys.exc_info()[1]) self.defer(wrapper) res = latch.get() if isinstance(res, Exception): raise res return res
Arrange for `func()` to execute on :class:`Broker` thread, blocking the current thread until a result or exception is available. :returns: Return value of `func()`.
378,663
def master_callback(self, m, master): sysid = m.get_srcSystem() mtype = m.get_type() if sysid in self.mpstate.sysid_outputs: self.mpstate.sysid_outputs[sysid].write(m.get_msgbuf()) if mtype == "GLOBAL_POSITION_INT": for modname in , , , : mod = self.module(modname) if mod is not None: mod.set_secondary_vehicle_position(m) return if getattr(m, , None) is None: master.post_message(m) self.status.counters[][master.linknum] += 1 if mtype == : for sysid in self.mpstate.sysid_outputs: self.mpstate.sysid_outputs[sysid].write(m.get_msgbuf()) if self.mpstate.settings.fwdpos: for link in self.mpstate.mav_master: if link != master: link.write(m.get_msgbuf()) if mtype not in dataPackets and self.mpstate.logqueue: usec = self.get_usec() usec = (usec & ~3) | master.linknum self.mpstate.logqueue.put(bytearray(struct.pack(, usec) + m.get_msgbuf())) self.status.msgs[mtype] = m if mtype not in self.status.msg_count: self.status.msg_count[mtype] = 0 self.status.msg_count[mtype] += 1 if m.get_srcComponent() == mavutil.mavlink.MAV_COMP_ID_GIMBAL and mtype == : return if getattr(m, , None) is not None and self.settings.target_system == m.get_srcSystem(): self.handle_msec_timestamp(m, master) if mtype in activityPackets: if master.linkerror: master.linkerror = False self.say("link %s OK" % (self.link_label(master))) self.status.last_message = time.time() master.last_message = self.status.last_message if master.link_delayed and self.mpstate.settings.checkdelay: if mtype != : if self.mpstate.settings.mavfwd_rate or mtype != : if mtype not in self.no_fwd_types: for r in self.mpstate.mav_outputs: r.write(m.get_msgbuf()) sysid = m.get_srcSystem() target_sysid = self.target_system for (mod,pm) in self.mpstate.modules: if not hasattr(mod, ): continue if not mod.multi_vehicle and sysid != target_sysid: continue try: mod.mavlink_packet(m) except Exception as msg: if self.mpstate.settings.moddebug == 1: print(msg) elif self.mpstate.settings.moddebug > 1: exc_type, exc_value, exc_traceback = sys.exc_info() traceback.print_exception(exc_type, exc_value, exc_traceback, limit=2, file=sys.stdout)
process mavlink message m on master, sending any messages to recipients
378,664
def _templated(fn): @functools.wraps(fn) def inner(ctl): return [i.format(**ctl) for i in fn(ctl)] return inner
Return a function which applies ``str.format(**ctl)`` to all results of ``fn(ctl)``.
378,665
def get_snippet(self, snippet_id, timeout=None): return self._api_request( self.SNIPPET_ENDPOINT % (snippet_id), self.HTTP_GET, timeout=timeout )
API call to get a specific Snippet
378,666
def getlist(self, name: str, default: Any = None) -> List[Any]: return super().get(name, default)
Return the entire list
378,667
def accuracy(conf_matrix): total, correct = 0.0, 0.0 for true_response, guess_dict in conf_matrix.items(): for guess, count in guess_dict.items(): if true_response == guess: correct += count total += count return correct/total
Given a confusion matrix, returns the accuracy. Accuracy Definition: http://research.ics.aalto.fi/events/eyechallenge2005/evaluation.shtml
378,668
def writeln(self, string=, *args, **kwargs): self.write(string + , *args, **kwargs) self.on_new_line = True self.current_indent += 1 self.auto_added_line = False
Writes a string into the source code _and_ appends a new line, applying indentation if required
378,669
def get_rms(self): return np.sqrt(np.mean(np.square(self._entry_scores)))
Gets the root mean square of the score. If this system is based on grades, the RMS of the output score is returned. return: (decimal) - the median score *compliance: mandatory -- This method must be implemented.*
378,670
def result_pretty(self, number_of_runs=0, time_str=None, fbestever=None): if fbestever is None: fbestever = self.best.f s = ( + ( if number_of_runs > 1 else )) \ % number_of_runs if number_of_runs else for k, v in self.stop().items(): print( % (k, str(v), s + ( % time_str if time_str else ))) print( % (self.best.last.f, fbestever)) if self.N < 9: print( + str(list(self.gp.pheno(self.mean, into_bounds=self.boundary_handler.repair)))) print( + str(list(self.sigma * self.sigma_vec * sqrt(self.dC) * self.gp.scales))) else: print( % (str(self.gp.pheno(self.mean, into_bounds=self.boundary_handler.repair)[:8])[:-1])) print( % (str((self.sigma * self.sigma_vec * sqrt(self.dC) * self.gp.scales)[:8])[:-1])) return self.result()
pretty print result. Returns ``self.result()``
378,671
def save(self, *args, **kwargs): self._pre_save(*args, **kwargs) response = self._save(*args, **kwargs) response = self._post_save(response, *args, **kwargs) return response
saves creates or updates current resource returns new resource
378,672
def String(self, str): ret = libxml2mod.xmlTextReaderConstString(self._o, str) return ret
Get an interned string from the reader, allows for example to speedup string name comparisons
378,673
def delete_instance(self, instance_id, project_id=None): instance = self._get_client(project_id=project_id).instance(instance_id) try: instance.delete() return except GoogleAPICallError as e: self.log.error(, e.message) raise e
Deletes an existing Cloud Spanner instance. :param instance_id: The ID of the Cloud Spanner instance. :type instance_id: str :param project_id: Optional, the ID of the GCP project that owns the Cloud Spanner database. If set to None or missing, the default project_id from the GCP connection is used. :type project_id: str :return: None
378,674
def unpack_shards(shards, stream_arn, session): if not shards: return {} if "ShardId" in shards[0]: shards = _translate_shards(shards) by_id = {shard_token["shard_id"]: Shard(stream_arn=stream_arn, shard_id=shard_token["shard_id"], iterator_type=shard_token.get("iterator_type"), sequence_number=shard_token.get("sequence_number"), parent=shard_token.get("parent"), session=session) for shard_token in shards} for shard in by_id.values(): if shard.parent: shard.parent = by_id[shard.parent] shard.parent.children.append(shard) return by_id
List[Dict] -> Dict[shard_id, Shard]. Each Shards' parent/children are hooked up with the other Shards in the list.
378,675
def extract(self, destination, format=, csv_delimiter=None, csv_header=True, compress=False): job = self.extract_async(destination, format=format, csv_delimiter=csv_delimiter, csv_header=csv_header, compress=compress) if job is not None: job.wait() return job
Exports the table to GCS; blocks until complete. Args: destination: the destination URI(s). Can be a single URI or a list. format: the format to use for the exported data; one of 'csv', 'json', or 'avro' (default 'csv'). csv_delimiter: for CSV exports, the field delimiter to use. Defaults to ',' csv_header: for CSV exports, whether to include an initial header line. Default true. compress: whether to compress the data on export. Compression is not supported for AVRO format. Defaults to False. Returns: A Job object for the completed export Job if it was started successfully; else None.
378,676
def delete(event, saltenv=, test=None): salt/cloud/*/destroyed sevent = salt.utils.event.get_event( , __opts__[], __opts__[], opts=__opts__, listen=True) master_key = salt.utils.master.get_master_key(, __opts__) __jid_event__.fire_event({: event, : master_key}, ) res = sevent.get_event(wait=30, tag=) return res[]
Delete a reactor CLI Example: .. code-block:: bash salt-run reactor.delete 'salt/cloud/*/destroyed'
378,677
def auth_recv(self): id_r = auth_data = None for p in self.packets[-1].payloads: if p._type == payloads.Type.IDr: id_r = p logger.debug(.format(dump(bytes(p)))) if p._type == payloads.Type.AUTH: auth_data = p._data if p._type == payloads.Type.SA: logger.debug(.format(p.spi)) self.esp_SPIin = p.spi for proposal in p.proposals: logger.debug("Proposal: {}".format(proposal.__dict__)) logger.debug(proposal.spi) if id_r is None or auth_data is None: raise IkeError() message2 = bytes(self.packets[1]) authenticated = self.authenticate_peer(auth_data, id_r, message2) assert authenticated keymat = prfplus(self.SK_d, self.Ni + self.Nr, 4 * 32) (self.esp_ei, self.esp_ai, self.esp_er, self.esp_ar, ) = unpack("32s" * 4, keymat) self.install_ipsec_sas()
Handle peer's IKE_AUTH response.
378,678
def connect_array(self, address, connection_key, connection_type, **kwargs): data = {"management_address": address, "connection_key": connection_key, "type": connection_type} data.update(kwargs) return self._request("POST", "array/connection", data)
Connect this array with another one. :param address: IP address or DNS name of other array. :type address: str :param connection_key: Connection key of other array. :type connection_key: str :param connection_type: Type(s) of connection desired. :type connection_type: list :param \*\*kwargs: See the REST API Guide on your array for the documentation on the request: **POST array/connection** :type \*\*kwargs: optional :returns: A dictionary describing the connection to the other array. :rtype: ResponseDict .. note:: Currently, the only type of connection is "replication". .. note:: Requires use of REST API 1.2 or later.
378,679
def write_sources_list(url, codename, filename=, mode=0o644): repo_path = os.path.join(, filename) content = .format( url=url, codename=codename, ) write_file(repo_path, content.encode(), mode)
add deb repo to /etc/apt/sources.list.d/
378,680
def update_context(self, context, update_mask=None, retry=google.api_core.gapic_v1.method.DEFAULT, timeout=google.api_core.gapic_v1.method.DEFAULT, metadata=None): if not in self._inner_api_calls: self._inner_api_calls[ ] = google.api_core.gapic_v1.method.wrap_method( self.transport.update_context, default_retry=self._method_configs[].retry, default_timeout=self._method_configs[] .timeout, client_info=self._client_info, ) request = context_pb2.UpdateContextRequest( context=context, update_mask=update_mask, ) return self._inner_api_calls[]( request, retry=retry, timeout=timeout, metadata=metadata)
Updates the specified context. Example: >>> import dialogflow_v2 >>> >>> client = dialogflow_v2.ContextsClient() >>> >>> # TODO: Initialize ``context``: >>> context = {} >>> >>> response = client.update_context(context) Args: context (Union[dict, ~google.cloud.dialogflow_v2.types.Context]): Required. The context to update. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.dialogflow_v2.types.Context` update_mask (Union[dict, ~google.cloud.dialogflow_v2.types.FieldMask]): Optional. The mask to control which fields get updated. If a dict is provided, it must be of the same form as the protobuf message :class:`~google.cloud.dialogflow_v2.types.FieldMask` retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will not be retried. timeout (Optional[float]): The amount of time, in seconds, to wait for the request to complete. Note that if ``retry`` is specified, the timeout applies to each individual attempt. metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata that is provided to the method. Returns: A :class:`~google.cloud.dialogflow_v2.types.Context` instance. Raises: google.api_core.exceptions.GoogleAPICallError: If the request failed for any reason. google.api_core.exceptions.RetryError: If the request failed due to a retryable error and retry attempts failed. ValueError: If the parameters are invalid.
378,681
def comunicar_certificado_icpbrasil(self, certificado): resp = self._http_post(, certificado=certificado) conteudo = resp.json() return RespostaSAT.comunicar_certificado_icpbrasil( conteudo.get())
Sobrepõe :meth:`~satcfe.base.FuncoesSAT.comunicar_certificado_icpbrasil`. :return: Uma resposta SAT padrão. :rtype: satcfe.resposta.padrao.RespostaSAT
378,682
def get_driver(self): driver = self.__get_driver_for_channel(self.__get_channel()) if driver is None: driver = self.new_driver() return driver
Get an already running instance of Webdriver. If there is none, it will create one. Returns: Webdriver - Selenium Webdriver instance. Usage:: driver = WTF_WEBDRIVER_MANAGER.new_driver() driver.get("http://the-internet.herokuapp.com") same_driver = WTF_WEBDRIVER_MANAGER.get_driver() print(driver is same_driver) # True
378,683
def _child(details): if isinstance(details, list): return OptionsNode(details) elif isinstance(details, dict): if in details: return ArrayNode(details) elif in details: return HashNode(details) elif in details: if isinstance(details[], (dict,list)): return _child(details[]) else: return Parent(details) elif isinstance(details, basestring): return Node(details) else: raise TypeError()
Child A private function to figure out the child node type Arguments: details {dict} -- A dictionary describing a data point Returns: _NodeInterface
378,684
def _get_type(self, policy): if isinstance(policy, string_types) or is_instrinsic(policy): return PolicyTypes.MANAGED_POLICY if isinstance(policy, dict) and "Statement" in policy: return PolicyTypes.POLICY_STATEMENT if self._is_policy_template(policy): return PolicyTypes.POLICY_TEMPLATE return PolicyTypes.UNKNOWN
Returns the type of the given policy :param string or dict policy: Policy data :return PolicyTypes: Type of the given policy. None, if type could not be inferred
378,685
def _do_read_config(self, config_file, pommanipext): parser = InterpolationConfigParser() dataset = parser.read(config_file) if config_file not in dataset: raise IOError("Config file %s not found." % config_file) if parser.has_option(,): include = parser.get(, ) if include is not "": sections_ = self.read_and_load(include) for section_ in sections_: if parser.has_section(section_): raise DuplicateSectionError( "The config section [%s] is existed in %s and include %s cfg file" % ( section_, config_file, re.split("\\s+", include.strip())[1])) parser._sections.update(sections_) pom_manipulator_config = {} common_section = {} package_configs = {} if pommanipext and pommanipext != and pommanipext != : parse_pom_manipulator_ext(pom_manipulator_config, parser, pommanipext) if not parser.has_section(): logging.error() raise NoSectionError() common_section[] = parser.get(, ) common_section[] = parser.get(, ) common_section[] = parser.get(, ) common_section[] = parser.get(, ) common_section[] = parser.get(, ) if parser.has_option(, ): common_section[] = parser.get(, ) if parser.has_option(, ): common_section[] = parser.get(, ) if parser.has_option(, ): common_section[] = parser.get(, ) if parser.has_option(, ): common_section[] = parser.get(, ) if parser.has_option(, ): common_section[] = parser.get(, ) if parser.has_option(, ): common_section[] = parser.get(, ) if parser.has_option(, ): common_section[] = parser.get(, ) common_section[] = parser.get(, ) config_dir = utils.get_dir(config_file) if parser.has_option(, ) and parser.get(, ) is not "": parse_shared_config(common_section, config_dir, parser) common_section[] = parser.getint(, ) common_section[] = {} if parser.has_option(, ): common_section[][] = dict(x.strip().split() for x in parser.get(, ).replace(",\n", ",").split()) else: common_section[][] = [] if os.path.dirname(config_file): config_path = os.path.dirname(config_file) else: config_path = os.getcwd() logging.info("Configuration file is %s and path %s", os.path.basename(config_file), config_path) for section in parser.sections(): config_type = self.read_config_type(parser, section) if section == or config_type == "bom-builder-meta": logging.debug (, section) continue self._do_read_section(config_path, os.path.basename(config_file), package_configs, parser, section) return (common_section, package_configs, pom_manipulator_config)
Reads config for a single job defined by section.
378,686
def list_(bank): redis_server = _get_redis_server() bank_redis_key = _get_bank_redis_key(bank) try: banks = redis_server.smembers(bank_redis_key) except (RedisConnectionError, RedisResponseError) as rerr: mesg = .format(rkey=bank_redis_key, rerr=rerr) log.error(mesg) raise SaltCacheError(mesg) if not banks: return [] return list(banks)
Lists entries stored in the specified bank.
378,687
def iterator_cycle(variables: VarType, parent: str) -> Iterable[VarMatrix]: if isinstance(variables, dict): if variables.get("times"): times = int(variables["times"]) del variables["times"] yield list(variable_matrix(variables, parent, "product")) * times else: raise ValueError(f"times is a required keyword for the repeat iterator.") else: raise ValueError( f"The repeat operator only takes a dict as arguments, got {variables} of type {type(variables)}" )
Cycle through a list of values a specified number of times Args: variables: The input variables for the creation of the range parent: The variable for which the values are being generated. Returns: A list of dictionaries mapping the parent to each value.
378,688
async def debug_create_unit(self, unit_spawn_commands: List[List[Union[UnitTypeId, int, Point2, Point3]]]): assert isinstance(unit_spawn_commands, list) assert unit_spawn_commands assert isinstance(unit_spawn_commands[0], list) assert len(unit_spawn_commands[0]) == 4 assert isinstance(unit_spawn_commands[0][0], UnitTypeId) assert unit_spawn_commands[0][1] > 0 assert isinstance(unit_spawn_commands[0][2], (Point2, Point3)) assert 1 <= unit_spawn_commands[0][3] <= 2 await self._execute( debug=sc_pb.RequestDebug( debug=[ debug_pb.DebugCommand( create_unit=debug_pb.DebugCreateUnit( unit_type=unit_type.value, owner=owner_id, pos=common_pb.Point2D(x=position.x, y=position.y), quantity=amount_of_units, ) ) for unit_type, amount_of_units, position, owner_id in unit_spawn_commands ] ) )
Usage example (will spawn 1 marine in the center of the map for player ID 1): await self._client.debug_create_unit([[UnitTypeId.MARINE, 1, self._game_info.map_center, 1]])
378,689
def get_xml_type(val): if type(val).__name__ in (, ): return if type(val).__name__ in (, ): return if type(val).__name__ == : return if type(val).__name__ == : return if isinstance(val, numbers.Number): return if type(val).__name__ == : return if isinstance(val, dict): return if isinstance(val, collections.Iterable): return return type(val).__name__
Returns the data type for the xml type attribute
378,690
def partition(pred, iterable): pos, neg = [], [] pos_append, neg_append = pos.append, neg.append for elem in iterable: if pred(elem): pos_append(elem) else: neg_append(elem) return neg, pos
Partition an iterable. Arguments --------- pred : function A function that takes an element of the iterable and returns a boolen indicating to which partition it belongs iterable : iterable Returns ------- A two-tuple of lists with the first list containing the elements on which the predicate indicated False and the second list containing the elements on which the predicate indicated True. Note that, unlike the recipe which returns generators, this version returns lists.
378,691
def _push_tag_buffer(self, data): if data.context & data.CX_QUOTED: self._emit_first(tokens.TagAttrQuote(char=data.quoter)) self._emit_all(self._pop()) buf = data.padding_buffer self._emit_first(tokens.TagAttrStart( pad_first=buf["first"], pad_before_eq=buf["before_eq"], pad_after_eq=buf["after_eq"])) self._emit_all(self._pop()) for key in data.padding_buffer: data.padding_buffer[key] = ""
Write a pending tag attribute from *data* to the stack.
378,692
def calcinds(data, threshold, ignoret=None): inds = [] for i in range(len(data[])): snr = data[][i] time = data[][i] if (threshold >= 0 and snr > threshold): if ignoret: incl = [t0 for (t0, t1) in ignoret if np.round(time).astype(int) in range(t0,t1)] logger.debug(.format(np.round(time).astype(int), t0, t1, incl)) if not incl: inds.append(i) else: inds.append(i) elif threshold < 0 and snr < threshold: if ignoret: incl = [t0 for (t0, t1) in ignoret if np.round(time).astype(int) in range(t0,t1)] logger.debug(.format(np.round(time).astype(int), t0, t1, incl)) if not incl: inds.append(i) else: inds.append(i) return inds
Find indexes for data above (or below) given threshold.
378,693
def activities(self, name=None, pk=None, scope=None, **kwargs): request_params = { : pk, : name, : scope } if self.match_app_version(label=, version=, default=False): request_params.update(API_EXTRA_PARAMS[]) if kwargs: request_params.update(**kwargs) response = self._request(, self._build_url(), params=request_params) if response.status_code != requests.codes.ok: raise NotFoundError("Could not retrieve activities. Server responded with {}".format(str(response))) data = response.json() if self.match_app_version(label=, version=, default=True): return [Activity(a, client=self) for a in data[]] else: return [Activity2(a, client=self) for a in data[]]
Search for activities with optional name, pk and scope filter. If additional `keyword=value` arguments are provided, these are added to the request parameters. Please refer to the documentation of the KE-chain API for additional query parameters. :param pk: id (primary key) of the activity to retrieve :type pk: basestring or None :param name: filter the activities by name :type name: basestring or None :param scope: filter by scope id :type scope: basestring or None :return: list of :class:`models.Activity` :raises NotFoundError: If no `Activities` are found
378,694
def drop(self, index=None, columns=None): if self._is_transposed: return self.transpose().drop(index=columns, columns=index).transpose() if index is None: new_data = self.data new_index = self.index else: def delitem(df, internal_indices=[]): return df.drop(index=df.index[internal_indices]) numeric_indices = list(self.index.get_indexer_for(index)) new_data = self.data.apply_func_to_select_indices( 1, delitem, numeric_indices, keep_remaining=True ) new_index = self.index[~self.index.isin(index)] if columns is None: new_columns = self.columns new_dtypes = self.dtypes else: def delitem(df, internal_indices=[]): return df.drop(columns=df.columns[internal_indices]) numeric_indices = list(self.columns.get_indexer_for(columns)) new_data = new_data.apply_func_to_select_indices( 0, delitem, numeric_indices, keep_remaining=True ) new_columns = self.columns[~self.columns.isin(columns)] new_dtypes = self.dtypes.drop(columns) return self.__constructor__(new_data, new_index, new_columns, new_dtypes)
Remove row data for target index and columns. Args: index: Target index to drop. columns: Target columns to drop. Returns: A new QueryCompiler.
378,695
def decrypt(self, ciphertext, encoder=encoding.RawEncoder): ciphertext = encoder.decode(ciphertext) plaintext = nacl.bindings.crypto_box_seal_open( ciphertext, self._public_key, self._private_key, ) return plaintext
Decrypts the ciphertext using the ephemeral public key enclosed in the ciphertext and the SealedBox private key, returning the plaintext message. :param ciphertext: [:class:`bytes`] The encrypted message to decrypt :param encoder: The encoder used to decode the ciphertext. :return bytes: The original plaintext
378,696
def _save(self): collection = JSONClientValidated(, collection=, runtime=self._runtime) if in self._my_map: collection.save(self._my_map) else: insert_result = collection.insert_one(self._my_map) self._my_map = collection.find_one({: insert_result.inserted_id})
Saves the current state of this AssessmentSection to database. Should be called every time the question map changes.
378,697
def init_instance(self, key): with self._mor_lock: if key not in self._mor: self._mor[key] = {}
Create an empty instance if it doesn't exist. If the instance already exists, this is a noop.
378,698
def _find_longest_parent_path(path_set, path): while path not in path_set: if not path: return None path = os.path.dirname(path) return path
Finds the longest "parent-path" of 'path' in 'path_set'. This function takes and returns "path-like" strings which are strings made of strings separated by os.sep. No file access is performed here, so these strings need not correspond to actual files in some file-system.. This function returns the longest ancestor path For example, for path_set=["/foo/bar", "/foo", "/bar/foo"] and path="/foo/bar/sub_dir", returns "/foo/bar". Args: path_set: set of path-like strings -- e.g. a list of strings separated by os.sep. No actual disk-access is performed here, so these need not correspond to actual files. path: a path-like string. Returns: The element in path_set which is the longest parent directory of 'path'.
378,699
def load(self, value): self.reset( value, validator=self.__dict__.get(), env=self.__dict__.get(), )
enforce env > value when loading from file