Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
25,300
def rate_unstable(self): if not self.started or self.stalled: return 0.0 x1, y1 = self._timing_data[-2] x2, y2 = self._timing_data[-1] return (y2 - y1) / (x2 - x1)
Returns an unstable rate based on the last two entries in the timing data. Less intensive to compute.
25,301
def post_event(api_key=None, app_key=None, title=None, text=None, date_happened=None, priority=None, host=None, tags=None, alert_type=None, aggregation_key=None, source_type_name=None): 01234567899876543210Salt Highstate["service:salt", "event:highstate"]normallow _initialize_connection(api_key, app_key) if title is None: raise SaltInvocationError() if text is None: raise SaltInvocationError() if alert_type not in [None, , , , ]: return ret
Post an event to the Datadog stream. CLI Example .. code-block:: bash salt-call datadog.post_event api_key='0123456789' \\ app_key='9876543210' \\ title='Salt Highstate' \\ text="Salt highstate was run on $(salt-call grains.get id)" \\ tags='["service:salt", "event:highstate"]' Required arguments :param title: The event title. Limited to 100 characters. :param text: The body of the event. Limited to 4000 characters. The text supports markdown. Optional arguments :param date_happened: POSIX timestamp of the event. :param priority: The priority of the event ('normal' or 'low'). :param host: Host name to associate with the event. :param tags: A list of tags to apply to the event. :param alert_type: "error", "warning", "info" or "success". :param aggregation_key: An arbitrary string to use for aggregation, max length of 100 characters. :param source_type_name: The type of event being posted.
25,302
def disable_nn_ha(self, active_name, snn_host_id, snn_check_point_dir_list, snn_name=None): args = dict( activeNnName = active_name, snnHostId = snn_host_id, snnCheckpointDirList = snn_check_point_dir_list, snnName = snn_name ) return self._cmd(, data=args, api_version=6)
Disable high availability with automatic failover for an HDFS NameNode. @param active_name: Name of the NamdeNode role that is going to be active after High Availability is disabled. @param snn_host_id: Id of the host where the new SecondaryNameNode will be created. @param snn_check_point_dir_list : List of directories used for checkpointing by the new SecondaryNameNode. @param snn_name: Name of the new SecondaryNameNode role (Optional). @return: Reference to the submitted command. @since: API v6
25,303
def get_agent_sock_path(env=None, sp=subprocess): args = [util.which(), ] output = check_output(args=args, env=env, sp=sp) lines = output.strip().split(b) dirs = dict(line.split(b, 1) for line in lines) log.debug(, args, dirs) return dirs[b]
Parse gpgconf output to find out GPG agent UNIX socket path.
25,304
def run_global_hook(hook_name, *args): hook_finder = HookFinder(get_global_hook_path()) hook = hook_finder(hook_name) if hook: hook.run(*args)
Attempt to run a global hook by name with args
25,305
def top_prior(name, z_shape, learn_prior="normal", temperature=1.0): with tf.variable_scope(name, reuse=tf.AUTO_REUSE): h = tf.zeros(z_shape, dtype=tf.float32) if learn_prior == "normal": prior_dist = tfp.distributions.Normal(h, tf.exp(h)) elif learn_prior == "single_conv": prior_dist = single_conv_dist("top_learn_prior", h) else: raise ValueError("Expected learn_prior to be normal or single_conv " "got %s" % learn_prior) return TemperedNormal(prior_dist.loc, prior_dist.scale, temperature)
Unconditional prior distribution. Args: name: variable scope z_shape: Shape of the mean / scale of the prior distribution. learn_prior: Possible options are "normal" and "single_conv". If set to "single_conv", the gaussian is parametrized by a single convolutional layer whose input are an array of zeros and initialized such that the mean and std are zero and one. If set to "normal", the prior is just a Gaussian with zero mean and unit variance. temperature: Temperature with which to sample from the Gaussian. Returns: objective: 1-D Tensor shape=(batch_size,) summed across spatial components. Raises: ValueError: If learn_prior not in "normal" or "single_conv"
25,306
def code(item): _res = [] i = 0 for attr in ATTR: val = getattr(item, attr) if val: _res.append("%d=%s" % (i, quote(val))) i += 1 return ",".join(_res)
Turn a NameID class instance into a quoted string of comma separated attribute,value pairs. The attribute names are replaced with digits. Depends on knowledge on the specific order of the attributes for the class that is used. :param item: The class instance :return: A quoted string
25,307
def running_state(self, running_state): allowed_values = ["ONGOING", "PENDING", "ENDED"] if running_state not in allowed_values: raise ValueError( "Invalid value for `running_state` ({0}), must be one of {1}" .format(running_state, allowed_values) ) self._running_state = running_state
Sets the running_state of this MaintenanceWindow. :param running_state: The running_state of this MaintenanceWindow. # noqa: E501 :type: str
25,308
def send_signal(self, signum): if self._process is None: raise RuntimeError() self._process.kill(signum)
Send the signal *signum* to the child. On Windows, SIGTERM, SIGKILL and SIGINT are emulated using TerminateProcess(). This will cause the child to exit unconditionally with status 1. No other signals can be sent on Windows.
25,309
def get_username(uid,**kwargs): rs = db.DBSession.query(User.username).filter(User.id==uid).one() if rs is None: raise ResourceNotFoundError("User with ID %s not found"%uid) return rs.username
Return the username of a given user_id
25,310
def index(self, row, column, parent): item = parent.internalPointer() d = self.declaration if item is None else item.declaration if row < len(d._items): proxy = d._items[row].proxy assert isinstance(proxy, QtTreeViewItem), \ "Invalid item {}".format(proxy) else: proxy = d.proxy return self.createIndex(row, column, proxy)
The index should point to the corresponding QtControl in the enaml object hierarchy.
25,311
def remove(self, indices): if not self._is_valid_index(indices): self.log_exc(u"The given list of indices is not valid", None, True, ValueError) new_fragments = [] sorted_indices = sorted(indices) i = 0 j = 0 while (i < len(self)) and (j < len(sorted_indices)): if i != sorted_indices[j]: new_fragments.append(self[i]) else: j += 1 i += 1 while i < len(self): new_fragments.append(self[i]) i += 1 self.__fragments = new_fragments
Remove the fragments corresponding to the given list of indices. :param indices: the list of indices to be removed :type indices: list of int :raises ValueError: if one of the indices is not valid
25,312
def _package_exists(path): while path: if os.path.exists(path): return True else: path = os.path.dirname(path) return False
Checks if the given Python path matches a valid file or a valid container file :param path: A Python path :return: True if the module or its container exists
25,313
def repartition(self, numPartitions): return self.transform( lambda rdd: (rdd.repartition(numPartitions) if not isinstance(rdd, EmptyRDD) else rdd) )
Repartition every RDD. :rtype: DStream Example: >>> import pysparkling >>> sc = pysparkling.Context() >>> ssc = pysparkling.streaming.StreamingContext(sc, 0.1) >>> ( ... ssc ... .queueStream([['hello', 'world']]) ... .repartition(2) ... .foreachRDD(lambda rdd: print(len(rdd.partitions()))) ... ) >>> ssc.start() >>> ssc.awaitTermination(0.25) 2 0
25,314
def to_int(b:Any)->Union[int,List[int]]: "Convert `b` to an int or list of ints (if `is_listy`); raises exception if not convertible" if is_listy(b): return [to_int(x) for x in b] else: return int(b)
Convert `b` to an int or list of ints (if `is_listy`); raises exception if not convertible
25,315
def add_primary_key(self, column="id"): if not self.primary_key: sql = .format( s=self.schema, t=self.name, c=column ) self.db.execute(sql)
Add primary key constraint to specified column
25,316
def _resize_panels(self): self.theme.setup_figure(self.figure) self.facet.spaceout_and_resize_panels()
Resize panels
25,317
def handle(cls, value, **kwargs): try: env_var_name, default_val = value.split("::", 1) except ValueError: raise ValueError("Invalid value for default: %s. Must be in " "<env_var>::<default value> format." % value) if env_var_name in kwargs[].environment: return kwargs[].environment[env_var_name] else: return default_val
Use a value from the environment or fall back to a default if the environment doesn't contain the variable. Format of value: <env_var>::<default value> For example: Groups: ${default app_security_groups::sg-12345,sg-67890} If `app_security_groups` is defined in the environment, its defined value will be returned. Otherwise, `sg-12345,sg-67890` will be the returned value. This allows defaults to be set at the config file level.
25,318
def _signal_handler(self, signal_interupt, frame): if self.container is not None: print(.format(c.Style.BRIGHT, c.Fore.YELLOW)) self.container.stop() print(.format(c.Style.BRIGHT, c.Fore.RED)) self.log.error() sys.exit(1)
Handle singal interrupt. Args: signal_interupt ([type]): [Description] frame ([type]): [Description]
25,319
def _init_browser(self): self.browser = splinter.Browser() self.browser.visit(self.server_url) self.browser.find_link_by_partial_text("Sign in").click() self.browser.fill( , self.user) self.browser.fill( , self.password) self.browser.find_by_css().click() self.browser.find_by_css().click()
Update this everytime the CERN SSO login form is refactored.
25,320
def load_phonopy(filename, structure, dim, symprec=0.01, primitive_matrix=None, factor=VaspToTHz, symmetrise=True, born=None, write_fc=False): unitcell = get_phonopy_structure(structure) num_atom = unitcell.get_number_of_atoms() num_satom = determinant(dim) * num_atom phonon = Phonopy(unitcell, dim, primitive_matrix=primitive_matrix, factor=factor, symprec=symprec) if == filename or in filename: if in filename: fc = file_IO.read_force_constants_hdf5(filename) elif == filename: fc = file_IO.parse_FORCE_CONSTANTS(filename=filename) if fc.shape[0] != num_satom: msg = ("\nNumber of atoms in supercell is not consistent with the " "matrix shape of\nforce constants read from {}.\nPlease" "carefully check --dim.") logging.error(msg.format(filename)) sys.exit() phonon.set_force_constants(fc) elif == filename: fs = file_IO.parse_FORCE_SETS() if fs[] != num_satom: msg = ("\nNumber of atoms in supercell is not consistent with the " "the data in FORCE_SETS\nPlease carefully check --dim.") logging.error(msg.format(filename)) sys.exit() phonon.set_displacement_dataset(fs) logging.info("Calculating force constants...") phonon.produce_force_constants() if born: nac_params = file_IO.parse_BORN(unitcell, filename=born) nac_params[] = Hartree * Bohr phonon.set_nac_params(nac_params) if symmetrise: phonon.symmetrize_force_constants() if write_fc == : file_IO.write_force_constants_to_hdf5(phonon.get_force_constants()) logging.info("Force constants written to force_constants.hdf5.") elif write_fc: file_IO.write_FORCE_CONSTANTS(phonon.get_force_constants()) logging.info("Force constants written to FORCE_CONSTANTS.") return phonon
Load phonopy output and return an ``phonopy.Phonopy`` object. Args: filename (str): Path to phonopy output. Can be any of ``FORCE_SETS``, ``FORCE_CONSTANTS``, or ``force_constants.hdf5``. structure (:obj:`~pymatgen.core.structure.Structure`): The unitcell structure. dim (list): The supercell size, as a :obj:`list` of :obj:`float`. symprec (:obj:`float`, optional): The tolerance for determining the crystal symmetry. primitive_matrix (:obj:`list`, optional): The transformation matrix from the conventional to primitive cell. Only required when the conventional cell was used as the starting structure. Should be provided as a 3x3 :obj:`list` of :obj:`float`. factor (:obj:`float`, optional): The conversion factor for phonon frequency. Defaults to :obj:`phonopy.units.VaspToTHz`. symmetrise (:obj:`bool`, optional): Symmetrise the force constants. Defaults to ``True``. born (:obj:`str`, optional): Path to file containing Born effective charges. Should be in the same format as the file produced by the ``phonopy-vasp-born`` script provided by phonopy. write_fc (:obj:`bool` or :obj:`str`, optional): Write the force constants to disk. If ``True``, a ``FORCE_CONSTANTS`` file will be written. Alternatively, if set to ``"hdf5"``, a ``force_constants.hdf5`` file will be written. Defaults to ``False`` (force constants not written).
25,321
def parse_command_line(self, argv=None): argv = sys.argv[1:] if argv is None else argv if in argv: argv = argv[:] argv.pop(idx+1) argv[idx] = sub return super(TerminalIPythonApp, self).parse_command_line(argv)
override to allow old '-pylab' flag with deprecation warning
25,322
def start(self): assert not self.is_running(), self._measurement_process = subprocess.Popen( [self._executable, ], stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=10000, preexec_fn=os.setpgrp, )
Starts the external measurement program.
25,323
def dist(self, src, tar): if src == tar: return 0.0 src = src.encode() tar = tar.encode() self._compressor.compress(src) src_comp = self._compressor.flush(zlib.Z_FULL_FLUSH) self._compressor.compress(tar) tar_comp = self._compressor.flush(zlib.Z_FULL_FLUSH) self._compressor.compress(src + tar) concat_comp = self._compressor.flush(zlib.Z_FULL_FLUSH) self._compressor.compress(tar + src) concat_comp2 = self._compressor.flush(zlib.Z_FULL_FLUSH) return ( min(len(concat_comp), len(concat_comp2)) - min(len(src_comp), len(tar_comp)) ) / max(len(src_comp), len(tar_comp))
Return the NCD between two strings using zlib compression. Parameters ---------- src : str Source string for comparison tar : str Target string for comparison Returns ------- float Compression distance Examples -------- >>> cmp = NCDzlib() >>> cmp.dist('cat', 'hat') 0.3333333333333333 >>> cmp.dist('Niall', 'Neil') 0.45454545454545453 >>> cmp.dist('aluminum', 'Catalan') 0.5714285714285714 >>> cmp.dist('ATCG', 'TAGC') 0.4
25,324
def _nested_relations(self, relation): nested = {} for name, constraints in self._eager_load.items(): if self._is_nested(name, relation): nested[name[len(relation + ".") :]] = constraints return nested
Get the deeply nested relations for a given top-level relation. :rtype: dict
25,325
def _add_default_tz_bindings(self, context, switch, network_id): default_tz = CONF.NVP.default_tz if not default_tz: LOG.warn("additional_default_tz_types specified, " "but no default_tz. Skipping " "_add_default_tz_bindings().") return if not network_id: LOG.warn("neutron network_id not specified, skipping " "_add_default_tz_bindings()") return for net_type in CONF.NVP.additional_default_tz_types: if net_type in TZ_BINDINGS: binding = TZ_BINDINGS[net_type] binding.add(context, switch, default_tz, network_id) else: LOG.warn("Unknown default tz type %s" % (net_type))
Configure any additional default transport zone bindings.
25,326
def yticksize(self, size, index=1): self.layout[ + str(index)][][] = size return self
Set the tick font size. Parameters ---------- size : int Returns ------- Chart
25,327
def cursor(self, offset=0, limit=None, order_by=None, as_dict=False): if offset and limit is None: raise DataJointError() sql = self.make_sql() if order_by is not None: sql += + .join(order_by) if limit is not None: sql += % limit + ( % offset if offset else "") logger.debug(sql) return self.connection.query(sql, as_dict=as_dict)
See expression.fetch() for input description. :return: query cursor
25,328
def line_line_collide(line1, line2): s, t, success = segment_intersection( line1[:, 0], line1[:, 1], line2[:, 0], line2[:, 1] ) if success: return _helpers.in_interval(s, 0.0, 1.0) and _helpers.in_interval( t, 0.0, 1.0 ) else: disjoint, _ = parallel_lines_parameters( line1[:, 0], line1[:, 1], line2[:, 0], line2[:, 1] ) return not disjoint
Determine if two line segments meet. This is a helper for :func:`convex_hull_collide` in the special case that the two convex hulls are actually just line segments. (Even in this case, this is only problematic if both segments are on a single line.) Args: line1 (numpy.ndarray): ``2 x 2`` array of start and end nodes. line2 (numpy.ndarray): ``2 x 2`` array of start and end nodes. Returns: bool: Indicating if the line segments collide.
25,329
def get_loss_maps(dstore, kind): oq = dstore[] name = % kind if name in dstore: return _to_loss_maps(dstore[name].value, oq.loss_maps_dt()) name = % kind if name in dstore: loss_curves = dstore[name] loss_maps = scientific.broadcast( scientific.loss_maps, loss_curves, oq.conditional_loss_poes) return loss_maps raise KeyError( % dstore)
:param dstore: a DataStore instance :param kind: 'rlzs' or 'stats'
25,330
def get_wbfmt(self, data_nt=None): if data_nt is None or self.b_plain: return self.fmtname2wbfmtobj.get() if self.ntfld_wbfmt is not None: return self.__get_wbfmt_usrfld(data_nt) if self.b_format_txt: wbfmt = self.__get_wbfmt_format_txt(data_nt) if wbfmt is not None: return wbfmt return self.fmtname2wbfmtobj.get()
Return format for text cell.
25,331
def get_distance_to(self, origin=None, other_atoms=None, sort=False): if origin is None: origin = np.zeros(3) elif pd.api.types.is_list_like(origin): origin = np.array(origin, dtype=) else: origin = self.loc[origin, [, , ]] if other_atoms is None: other_atoms = self.index new = self.loc[other_atoms, :].copy() norm = np.linalg.norm try: new[] = norm((new - origin).loc[:, [, , ]], axis=1) except AttributeError: new[] = norm((new - origin).loc[:, [, , ]]) if sort: new.sort_values(by=, inplace=True) return new
Return a Cartesian with a column for the distance from origin.
25,332
def load(path=None, first_data_line=, filters=, text=, default_directory=, quiet=True, header_only=False, transpose=False, **kwargs): d = databox(**kwargs) d.load_file(path=path, first_data_line=first_data_line, filters=filters, text=text, default_directory=default_directory, header_only=header_only) if not quiet: print("\nloaded", d.path, "\n") if transpose: return d.transpose() return d
Loads a data file into the databox data class. Returns the data object. Most keyword arguments are sent to databox.load() so check there for documentation.(if their function isn't obvious). Parameters ---------- path=None Supply a path to a data file; None means use a dialog. first_data_line="auto" Specify the index of the first data line, or have it figure this out automatically. filters="*.*" Specify file filters. text="Select a file, FACEHEAD." Window title text. default_directory="default_directory" Which directory to start in (by key). This lives in spinmob.settings. quiet=True Don't print stuff while loading. header_only=False Load only the header information. transpose = False Return databox.transpose(). Additioinal optional keyword arguments are sent to spinmob.data.databox(), so check there for more information.
25,333
def verify_message(self, message): if verify_checksum( message, self.in_checksum.get(message.id, 0), ): self.in_checksum[message.id] = message.checksum[1] if message.flags == FlagsType.none: self.in_checksum.pop(message.id) else: self.in_checksum.pop(message.id, None) raise InvalidChecksumError( description="Checksum does not match!", id=message.id, )
Verify the checksum of the message.
25,334
def aggregate(self, rankings, epsilon, max_iters): w = np.zeros((self.m, self.m)) for ranking in rankings: localw = np.zeros((self.m, self.m)) for ind1, alt1 in enumerate(self.alts): for ind2, alt2 in enumerate(self.alts): if ind1 == ind2: continue alt1_rank = util.get_index_nested(ranking, alt1) alt2_rank = util.get_index_nested(ranking, alt2) if alt1_rank < alt2_rank: localw[ind1][ind2] = 1 w += localw W = w.sum(axis=1) gamma_t = np.ones(self.m) / self.m gamma_t1 = np.empty(self.m) for f in range(max_iters): for i in range(self.m): s = 0 for j in range(self.m): if j != i: s += (w[j][i] + w[i][j]) / (gamma_t[i]+gamma_t[j]) gamma_t1[i] = W[i] / s gamma_t1 /= np.sum(gamma_t1) if epsilon != None and np.all(np.absolute(gamma_t1 - gamma_t) < epsilon): alt_scores = {cand: gamma_t1[ind] for ind, cand in enumerate(self.alts)} self.create_rank_dicts(alt_scores) return gamma_t1 gamma_t = gamma_t1 alt_scores = {cand: gamma_t1[ind] for ind, cand in enumerate(self.alts)} self.create_rank_dicts(alt_scores) return gamma_t1
Description: Minorization-Maximization algorithm which returns an estimate of the ground-truth parameters, gamma for the given data. Parameters: rankings: set of rankings to aggregate epsilon: convergence condition value, set to None for iteration only max_iters: maximum number of iterations of MM algorithm
25,335
def user_parse(data): _user = data.get(, {}) _id = _user.get() or _user.get() yield , _id yield , _user.get() yield , _user.get() first_name, _, last_name = _user.get(, ).partition() yield , first_name yield , last_name yield , .format(_id) city, country = map(lambda s: s.strip(), _user.get(, ).split()) yield , city yield , country
Parse information from the provider.
25,336
def update_alias_mapping(settings, alias, new_mapping): mapping = aliases_database[alias] new_mapping = Mapping(mapping=new_mapping, backend=mapping.backend) aliases_database[alias] = new_mapping settings.add_alias(alias, new_mapping) settings.write_config()
Override `alias` mapping in the user configuration file with the given `new_mapping`, which should be a tuple with 2 or 3 elements (in the form `(project_id, activity_id, role_id)`).
25,337
def want_host_notification(self, notifways, timeperiods, timestamp, state, n_type, business_impact, cmd=None): if not self.host_notifications_enabled: return False for downtime in self.downtimes: if downtime.is_in_effect: self.in_scheduled_downtime = True return False self.in_scheduled_downtime = False for notifway_id in self.notificationways: notifway = notifways[notifway_id] nw_b = notifway.want_host_notification(timeperiods, timestamp, state, n_type, business_impact, cmd) if nw_b: return True return False
Check if notification options match the state of the host :param timestamp: time we want to notify the contact (usually now) :type timestamp: int :param state: host or service state ("UP", "DOWN" ..) :type state: str :param n_type: type of notification ("PROBLEM", "RECOVERY" ..) :type n_type: str :param business_impact: impact of this host :type business_impact: int :param cmd: command launch to notify the contact :type cmd: str :return: True if contact wants notification, otherwise False :rtype: bool
25,338
def post_mortem(traceback): try: from ipdb import post_mortem except ImportError: from pdb import post_mortem message = "Entering post-mortem debugger. Type `help` for help." redline = colorized("{autored}%s{/autored}") % "{0:=^{1}}" print() print(redline.format(" CRASH! ", len(message))) print(message) print(redline.format("", len(message))) print() post_mortem(traceback)
Work with an exception in a post-mortem debugger. Try to use `ipdb` first, falling back to `pdb`.
25,339
def update_interfaces(self, added_sg, updated_sg, removed_sg): if not (added_sg or updated_sg or removed_sg): return with self.sessioned() as session: self._set_security_groups(session, added_sg) self._unset_security_groups(session, removed_sg) combined = added_sg + updated_sg + removed_sg self._refresh_interfaces(session, combined)
Handles changes to interfaces' security groups Calls refresh_interfaces on argument VIFs. Set security groups on added_sg's VIFs. Unsets security groups on removed_sg's VIFs.
25,340
def is_holiday(self, date): time = [ date.year, date.month, date.day, date.isoweekday(), _extract_week_number(date) ] target = [] for key, data in list(zip(TIME_LABEL, time)): d = getattr(self, key) asterisk = d.get("*", set()) s = asterisk.union(d.get(data, set())) target.append(list(s)) for result in map(set, product(*target)): if len(result) == 1: return True return False
Whether holiday judges :param datetime date: datetime.date object :rtype: bool
25,341
def send_handle_delete_request(self, **args): s response. handleindicesophandleindicesop__send_handle_delete_request: Deleting values from handle .__send_handle_delete_request: Deleting handle .DELETE Request to DELETEuser_pwcertDELETE', handle=handle, url=url, headers=head, verify=veri, resp=resp ) if b2handle.hsresponses.not_authenticated(resp): raise HandleAuthenticationError( operation=op, handle=handle, response=resp, username=self.__username ) self.__first_request = False return resp
Send a HTTP DELETE request to the handle server to delete either an entire handle or to some specified values from a handle record, using the requests module. :param handle: The handle. :param indices: Optional. A list of indices to delete. Defaults to None (i.e. the entire handle is deleted.). The list can contain integers or strings. :return: The server's response.
25,342
def get_comments(self, commentable_type, id_): return _get_request(_COM.format(c_api=_C_API_BEGINNING, ct=commentable_type, id_=id_, api=_API_VERSION, at=self.access_token))
commentable_type: 'Press', 'Review', 'Startup', 'StartupRole', 'StatusUpdate'
25,343
def salt_minion(): import signal import salt.utils.platform import salt.utils.process salt.utils.process.notify_systemd() import salt.cli.daemons import multiprocessing if in sys.path: sys.path.remove() if salt.utils.platform.is_windows(): minion = salt.cli.daemons.Minion() minion.start() return elif six.PY2: from salt.utils.versions import warn_until ) if in sys.argv: sys.argv.remove() minion = salt.cli.daemons.Minion() minion.start() return def escalate_signal_to_process(pid, signum, sigframe): os.kill(pid, signum) prev_sigint_handler = signal.getsignal(signal.SIGINT) prev_sigterm_handler = signal.getsignal(signal.SIGTERM) while True: try: process = multiprocessing.Process(target=minion_process) process.start() signal.signal(signal.SIGTERM, functools.partial(escalate_signal_to_process, process.pid)) signal.signal(signal.SIGINT, functools.partial(escalate_signal_to_process, process.pid)) signal.signal(signal.SIGHUP, functools.partial(escalate_signal_to_process, process.pid)) except Exception: minion = salt.cli.daemons.Minion() minion.start() break process.join() signal.signal(signal.SIGINT, prev_sigint_handler) signal.signal(signal.SIGTERM, prev_sigterm_handler) if not process.exitcode == salt.defaults.exitcodes.SALT_KEEPALIVE: sys.exit(process.exitcode) time.sleep(2 + randint(1, 10)) rlogger = logging.getLogger() for handler in rlogger.handlers: rlogger.removeHandler(handler) logging.basicConfig()
Start the salt minion in a subprocess. Auto restart minion on error.
25,344
def post_process_data(self, voltage=None, incremental_capacity=None, voltage_step=None): logging.debug("post-processing data") if voltage is None: voltage = self.voltage_processed incremental_capacity = self.incremental_capacity voltage_step = self.voltage_inverted_step if self.post_smoothing: logging.debug(" - post smoothing (gaussian)") logging.debug(f" * using voltage fwhm: {self.voltage_fwhm}") points_fwhm = int(self.voltage_fwhm / voltage_step) sigma = np.amax([1, points_fwhm / 2]) self.incremental_capacity = gaussian_filter1d( incremental_capacity, sigma=sigma, order=self.gaussian_order, mode=self.gaussian_mode, cval=self.gaussian_cval, truncate=self.gaussian_truncate ) if self.normalize: logging.debug(" - normalizing") area = simps(incremental_capacity, voltage) self.incremental_capacity = incremental_capacity * self.normalizing_factor / abs(area) fixed_range = False if isinstance(self.fixed_voltage_range, np.ndarray): fixed_range = True else: if self.fixed_voltage_range: fixed_range = True if fixed_range: logging.debug(" - using fixed voltage range (interpolating)") v1, v2, number_of_points = self.fixed_voltage_range v = np.linspace(v1, v2, number_of_points) f = interp1d(x=self.voltage_processed, y=self.incremental_capacity, kind=self.interpolation_method, bounds_error=False, fill_value=np.NaN) self.incremental_capacity = f(v) self.voltage_processed = v
perform post-processing (smoothing, normalisation, interpolation) of the data
25,345
def parseJSON(js): decoded = json.JSONDecoder().decode(js) actors = decoded.get("actors") if actors is None: raise Exception("No actors found in the system !") s = System() for a in actors: a_name = a.get("name") a_formula = a.get("formula") a_trace = a.get("trace") sa_events = a.get("events") a_events = [] a_speed = 1 if a.get("speed") is None else int(a["speed"]) for e in sa_events: tmp = e.split("|") tmp2 = [] for x in tmp: tmp2.append(Actor.Event.parse(x)) a_events.append(tmp2) a_formula = eval(a_formula) a_trace = Trace.parse(a_trace) actor = Actor(name=a_name, formula=a_formula, trace=a_trace, events=a_events, speed=a_speed) s.add_actors(actor) s.generate_monitors() return s
{ kv_type : "", type : "", actors : <Actors list> [ { actorName : <String>, formula: <String>, events: ["->b", "b->"], trace: [], speed: 1,2,3... } ] } :param json: :return:
25,346
def get_dataset(self, ds_name, mode=): if ds_name in self._datasets: return self._datasets[ds_name] else: return self.create_empty_dataset(ds_name)
Returns a h5py dataset given its registered name. :param ds_name: string Name of the dataset to be returned. :return:
25,347
def meter_data_from_json(data, orient="list"): if orient == "list": df = pd.DataFrame(data, columns=["start", "value"]) df["start"] = pd.DatetimeIndex(df.start).tz_localize("UTC") df = df.set_index("start") return df else: raise ValueError("orientation not recognized.")
Load meter data from json. Default format:: [ ['2017-01-01T00:00:00+00:00', 3.5], ['2017-02-01T00:00:00+00:00', 0.4], ['2017-03-01T00:00:00+00:00', 0.46], ] Parameters ---------- data : :any:`list` List elements are each a rows of data. Returns ------- df : :any:`pandas.DataFrame` DataFrame with a single column (``'value'``) and a :any:`pandas.DatetimeIndex`.
25,348
def inject(self, raw_data, row_change_callback=None): return self._update(raw_data, row_change_callback, delete_rows=False)
Use this function to add rows or update existing rows in the spreadsheet. Args: raw_data (dict): A dictionary of dictionaries. Where the keys of the outer dictionary uniquely identify each row of data, and the inner dictionaries represent the field,value pairs for a row of data. row_change_callback (Optional) (func): A callback function that you can use to track changes to rows on the spreadsheet. The row_change_callback function must take four parameters like so: change_callback(row_key, row_dict_before, row_dict_after, list_of_changed_keys) Returns: UpdateResults (object): A simple counter object providing statistics about the changes made by sheetsync.
25,349
def on_done(self): if self._d: self._d.callback(self) self._d = None
Reimplemented from :meth:`~AsyncViewBase.on_done`
25,350
def enforce_timezone(cls, value): field_timezone = cls.default_timezone() if (field_timezone is not None) and not is_aware(value): return make_aware(value, field_timezone) elif (field_timezone is None) and is_aware(value): return make_naive(value, utc) return value
When `self.default_timezone` is `None`, always return naive datetimes. When `self.default_timezone` is not `None`, always return aware datetimes.
25,351
def create(name, dry_run, verbose, query=None, parent=None): if parent is not None: parent = Collection.query.filter_by(name=parent).one().id collection = Collection(name=name, dbquery=query, parent_id=parent) db.session.add(collection) if verbose: click.secho(.format(collection))
Create new collection.
25,352
def box(self, x0, y0, width, height): assert width > 1 assert height > 1 width -= 1 height -= 1 for x in range(x0, x0 + width): self.point(x, y0, "-") self.point(x, y0 + height, "-") for y in range(y0, y0 + height): self.point(x0, y, "|") self.point(x0 + width, y, "|") self.point(x0, y0, "+") self.point(x0 + width, y0, "+") self.point(x0, y0 + height, "+") self.point(x0 + width, y0 + height, "+")
Create a box on ASCII canvas. Args: x0 (int): x coordinate of the box corner. y0 (int): y coordinate of the box corner. width (int): box width. height (int): box height.
25,353
def masses_of_galaxies_within_circles_in_units(self, radius : dim.Length, unit_mass=, critical_surface_density=None): return list(map(lambda galaxy: galaxy.mass_within_circle_in_units( radius=radius, unit_mass=unit_mass, kpc_per_arcsec=self.kpc_per_arcsec, critical_surface_density=critical_surface_density), self.galaxies))
Compute the total mass of all galaxies in this plane within a circle of specified radius. See *galaxy.angular_mass_within_circle* and *mass_profiles.angular_mass_within_circle* for details of how this is performed. Parameters ---------- radius : float The radius of the circle to compute the dimensionless mass within. units_mass : str The units the mass is returned in (angular | solMass). critical_surface_density : float The critical surface mass density of the strong lens configuration, which converts mass from angulalr \ units to physical units (e.g. solar masses).
25,354
def convert_pmod(pmod): if pmod.args[0].value in spec["bel1_migration"]["protein_modifications"]: pmod.args[0].value = spec["bel1_migration"]["protein_modifications"][ pmod.args[0].value ] return pmod
Update BEL1 pmod() protein modification term
25,355
def _tensor_proto_to_health_pill(self, tensor_event, node_name, device, output_slot): return self._process_health_pill_value( wall_time=tensor_event.wall_time, step=tensor_event.step, device_name=device, output_slot=output_slot, node_name=node_name, tensor_proto=tensor_event.tensor_proto)
Converts an event_accumulator.TensorEvent to a HealthPillEvent. Args: tensor_event: The event_accumulator.TensorEvent to convert. node_name: The name of the node (without the output slot). device: The device. output_slot: The integer output slot this health pill is relevant to. Returns: A HealthPillEvent.
25,356
def set_step(self, value, block_events=False): if block_events: self.block_events() self._widget.setSingleStep(value) if block_events: self.unblock_events()
Sets the step of the number box. Setting block_events=True will temporarily block the widget from sending any signals when setting the value.
25,357
def download_and_compile_igraph(self): print("We will now try to download and compile the C core from scratch.") print("Version number of the C core: %s" % self.c_core_versions[0]) if len(self.c_core_versions) > 1: print("We will also try: %s" % ", ".join(self.c_core_versions[1:])) print("") igraph_builder = IgraphCCoreBuilder(self.c_core_versions, self.c_core_url, show_progress_bar=self.show_progress_bar) if not igraph_builder.run(): print("Could not download and compile the C core of igraph.") print("") return False else: return True
Downloads and compiles the C core of igraph.
25,358
def setCurveModel(self, model): self.stimModel = model self.ui.curveWidget.setModel(model)
Sets the stimulus model for the calibration curve test :param model: Stimulus model that has a tone curve configured :type model: :class:`StimulusModel <sparkle.stim.stimulus_model.StimulusModel>`
25,359
def value_eq(self, other): self_sorted = ordered.ordered(self.getvalues()) other_sorted = ordered.ordered(repeated.getvalues(other)) return self_sorted == other_sorted
Sorted comparison of values.
25,360
async def reset_config(self, to_default): app_facade = client.ApplicationFacade.from_connection(self.connection) log.debug( , self.name, to_default) return await app_facade.Unset(self.name, to_default)
Restore application config to default values. :param list to_default: A list of config options to be reset to their default value.
25,361
def preferred(self): if in self.data[]: preferred = self.data[][] type = self.data[] if type == : type = return Subtag(preferred, type) return None
Get the preferred subtag. :return: preferred :class:`language_tags.Subtag.Subtag` if exists, otherwise None.
25,362
def _expand_syntax_quote( ctx: ReaderContext, form: IterableLispForm ) -> Iterable[LispForm]: expanded = [] for elem in form: if _is_unquote(elem): expanded.append(llist.l(_LIST, elem[1])) elif _is_unquote_splicing(elem): expanded.append(elem[1]) else: expanded.append(llist.l(_LIST, _process_syntax_quoted_form(ctx, elem))) return expanded
Expand syntax quoted forms to handle unquoting and unquote-splicing. The unquoted form (unquote x) becomes: (list x) The unquote-spliced form (unquote-splicing x) becomes x All other forms are recursively processed as by _process_syntax_quoted_form and are returned as: (list form)
25,363
def show_firmware_version_output_show_firmware_version_build_time(self, **kwargs): config = ET.Element("config") show_firmware_version = ET.Element("show_firmware_version") config = show_firmware_version output = ET.SubElement(show_firmware_version, "output") show_firmware_version = ET.SubElement(output, "show-firmware-version") build_time = ET.SubElement(show_firmware_version, "build-time") build_time.text = kwargs.pop() callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code
25,364
def get_maintenance_window(self, id, **kwargs): kwargs[] = True if kwargs.get(): return self.get_maintenance_window_with_http_info(id, **kwargs) else: (data) = self.get_maintenance_window_with_http_info(id, **kwargs) return data
Get a specific maintenance window # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_maintenance_window(id, async_req=True) >>> result = thread.get() :param async_req bool :param str id: (required) :return: ResponseContainerMaintenanceWindow If the method is called asynchronously, returns the request thread.
25,365
def _hmm_command(self, input_pipe, pairs_to_run): r element = pairs_to_run.pop() hmmsearch_cmd = self._individual_hmm_command(element[0][0], element[0][1], element[1]) while len(pairs_to_run) > 0: element = pairs_to_run.pop() hmmsearch_cmd = "tee >(%s) | %s" % (self._individual_hmm_command(element[0][0], element[0][1], element[1]), hmmsearch_cmd) hmmsearch_cmd = "%s | %s" % (input_pipe, hmmsearch_cmd) return hmmsearch_cmd
r"""INTERNAL method for getting cmdline for running a batch of HMMs. Parameters ---------- input_pipe: as hmmsearch pairs_to_run: list list with 2 members: (1) list of hmm and output file, (2) number of CPUs to use when searching Returns ------- A string command to be run with bash
25,366
def scene_on(self): user_data = Userdata({: self._group, : 0x00, : 0x00, : 0x11, : 0xff, : 0x00}) self._set_sent_property(DIMMABLE_KEYPAD_SCENE_ON_LEVEL, 0xff) cmd = ExtendedSend(self._address, COMMAND_EXTENDED_TRIGGER_ALL_LINK_0X30_0X00, user_data) cmd.set_checksum() _LOGGER.debug( ) self._send_method(cmd, self._received_scene_triggered)
Trigger group/scene to ON level.
25,367
def set_hosts_file_entry_for_role(self, role_name, network_name=, fqdn=None, domain_name=None): log = logging.getLogger(self.cls_logger + ) if fqdn: host_file_entry = fqdn else: if domain_name: host_file_entry = .format(r=role_name, d=domain_name) else: host_file_entry = role_name log.info(.format(e=host_file_entry)) log.info(.format(r=role_name, n=network_name)) for scenario_host in self.scenario_network_info: if scenario_host[] == role_name: for host_network_info in scenario_host[]: if host_network_info[] == network_name: self.update_hosts_file(ip=host_network_info[], entry=host_file_entry)
Adds an entry to the hosts file for a scenario host given the role name and network name :param role_name: (str) role name of the host to add :param network_name: (str) Name of the network to add to the hosts file :param fqdn: (str) Fully qualified domain name to use in the hosts file entry (trumps domain name) :param domain_name: (str) Domain name to include in the hosts file entries if provided :return:
25,368
def mkdir(path): try: os.makedirs(path) if not os.path.isdir(path): raise IOError() except OSError as e: if e.errno == 17 and os.path.isdir(path): return raise
Make a directory and its parents. Args: path (str): path to create Returns: None Raises: OSError if the directory cannot be created.
25,369
def events(self, year, simple=False, keys=False): if keys: return self._get( % year) else: return [Event(raw) for raw in self._get( % (year, if simple else ))]
Get a list of events in a given year. :param year: Year to get events from. :param keys: Get only keys of the events rather than full data. :param simple: Get only vital data. :return: List of string event keys or Event objects.
25,370
def move(self, path, raise_if_exists=False): self.fs.move(self.path, path, raise_if_exists)
Call MockFileSystem's move command
25,371
def validate_is_primary(self, is_primary): if is_primary and not (self.instance and self.instance.is_verified): raise serializers.ValidationError( _( "Unverified email addresses may not be used as the " "primary address." ) ) return is_primary
Validate the provided 'is_primary' parameter. Returns: The validated 'is_primary' value. Raises: serializers.ValidationError: If the user attempted to mark an unverified email as their primary email address.
25,372
def referenceable(method): def wrapper(self, value, *args): deref = self._prepare(value) if deref is not None: return deref packer, data = method(self, value, *args) return self._preserve(value, packer, data) return wrapper
Used in BaseSerializer and its sub-classes to flatten referenceable values. Hide the reference handling from sub-classes. For example, to make strings referenceable in a sub-class only use this decorator with decorate flatten_str().
25,373
def get_annotated_chain_sequence_string(self, chain_id, use_seqres_sequences_if_possible, raise_Exception_if_not_found = True): if use_seqres_sequences_if_possible and self.seqres_sequences and self.seqres_sequences.get(chain_id): return (, self.seqres_sequences[chain_id]) elif self.atom_sequences.get(chain_id): return (, self.atom_sequences[chain_id]) elif raise_Exception_if_not_found: raise Exception( % (str(chain_id))) else: return None
A helper function to return the Sequence for a chain. If use_seqres_sequences_if_possible then we return the SEQRES Sequence if it exists. We return a tuple of values, the first identifying which sequence was returned.
25,374
def create_binding(site, hostheader=, ipaddress=, port=80, protocol=, sslflags=None): *site0example.com*80 protocol = six.text_type(protocol).lower() name = _get_binding_info(hostheader, ipaddress, port) if protocol not in _VALID_PROTOCOLS: message = ("Invalid protocol specified. Valid formats:" ).format(protocol, _VALID_PROTOCOLS) raise SaltInvocationError(message) if sslflags: sslflags = int(sslflags) if sslflags not in _VALID_SSL_FLAGS: message = ("Invalid sslflags specified. Valid sslflags range:" ).format(sslflags, _VALID_SSL_FLAGS[0], _VALID_SSL_FLAGS[-1]) raise SaltInvocationError(message) current_bindings = list_bindings(site) if name in current_bindings: log.debug(, name) return True if sslflags: ps_cmd = [, , "".format(site), , "".format(hostheader), , "".format(ipaddress), , "".format(port), , "".format(protocol), , .format(sslflags)] else: ps_cmd = [, , "".format(site), , "".format(hostheader), , "".format(ipaddress), , "".format(port), , "".format(protocol)] cmd_ret = _srvmgr(ps_cmd) if cmd_ret[] != 0: msg = \ .format(site, cmd_ret[]) raise CommandExecutionError(msg) if name in list_bindings(site): log.debug(, site) return True log.error(, site) return False
Create an IIS Web Binding. .. note:: This function only validates against the binding ipaddress:port:hostheader combination, and will return True even if the binding already exists with a different configuration. It will not modify the configuration of an existing binding. Args: site (str): The IIS site name. hostheader (str): The host header of the binding. Usually a hostname. ipaddress (str): The IP address of the binding. port (int): The TCP port of the binding. protocol (str): The application protocol of the binding. sslflags (str): The flags representing certificate type and storage of the binding. Returns: bool: True if successful, otherwise False CLI Example: .. code-block:: bash salt '*' win_iis.create_binding site='site0' hostheader='example.com' ipaddress='*' port='80'
25,375
def _show_loading_page(self): self.shellwidget.hide() self.infowidget.show() self.info_page = self.loading_page self.set_info_page()
Show animation while the kernel is loading.
25,376
def pop_marker(self, reset): saved = self.saved_markers.pop() if reset: self.marker = saved elif self.saved_markers: self.saved_markers[-1] = saved
Pop a marker off of the marker stack. If reset is True then the iterator will be returned to the state it was in before the corresponding call to push_marker().
25,377
def plot_all(*args, **kwargs): dfs = do_all(*args, **kwargs) ps = [] for line in dfs: f, df, config = line df.plot(title=config[]) ps.append(df) return ps
Read all the trial data and plot the result of applying a function on them.
25,378
def gather_facts_list(self, file): facts = [] contents = utils.file_to_string(os.path.join(self.paths["role"], file)) contents = re.sub(r"\s+", "", contents) matches = self.regex_facts.findall(contents) for match in matches: facts.append(match.split(":")[1]) return facts
Return a list of facts.
25,379
def select_objects(self, json_string, expr): load_input_json = self.string_to_json(json_string) tree = objectpath.Tree(load_input_json) values = tree.execute(expr) return list(values)
Return list of elements from _json_string_, matching [ http://objectpath.org// | ObjectPath] expression. *Args:*\n _json_string_ - JSON string;\n _expr_ - ObjectPath expression; *Returns:*\n List of found elements. If no elements were found, empty list will be returned *Example:*\n | *Settings* | *Value* | | Library | JsonValidator | | Library | OperatingSystem | | *Test Cases* | *Action* | *Argument* | *Argument* | | Select json objects | ${json_example}= | OperatingSystem.Get File | ${CURDIR}${/}json_example.json | | | ${json_objectss}= | Select objects | ${json_example} | $..book[@.author.name is "Evelyn Waugh"].price | =>\n | [12.99]
25,380
def checksum(command): crc = 0x147A for b in command: crc = ((crc << 1) & 0xFFFF) | (crc & 0x8000) >> 15 crc = crc ^ 0xFFFF crc = (crc + (crc >> 8) + b) & 0xFFFF return crc
Function to calculate checksum as per Satel manual.
25,381
def reinitialize_command(self, command, reinit_subcommands=0): cmd = self.distribution.reinitialize_command( command, reinit_subcommands) if command in (, ): cmd.install_lib = None return cmd
Supplement reinitialize_command to work around http://bugs.python.org/issue20819
25,382
def get_output_volume(): * cmd = call = __salt__[]( cmd, output_loglevel=, python_shell=False ) _check_cmd(call) return call.get()
Get the output volume (range 0 to 100) CLI Example: .. code-block:: bash salt '*' desktop.get_output_volume
25,383
def _add_junction(item): type_, channels = _expand_one_key_dictionary(item) junction = UnnamedStatement(type=) for item in channels: type_, value = _expand_one_key_dictionary(item) channel = UnnamedStatement(type=) for val in value: if _is_reference(val): _add_reference(val, channel) elif _is_inline_definition(val): _add_inline_definition(val, channel) junction.add_child(channel) _current_statement.add_child(junction)
Adds a junction to the _current_statement.
25,384
def query(self, query_text, n=10): if query_text.startswith("learn:"): doctext = os.popen(query_text[len("learn:"):], ).read() self.index_document(doctext, query_text) return [] qwords = [w for w in words(query_text) if w not in self.stopwords] shortest = argmin(qwords, lambda w: len(self.index[w])) docs = self.index[shortest] results = [(sum([self.score(w, d) for w in qwords]), d) for d in docs] results.sort(); results.reverse() return results[:n]
Return a list of n (score, docid) pairs for the best matches. Also handle the special syntax for 'learn: command'.
25,385
def getClassPath(): global _CLASSPATHS global _SEP out=[] for path in _CLASSPATHS: if path==: continue if path.endswith(): paths=_glob.glob(path+".jar") if len(path)==0: continue out.extend(paths) else: out.append(path) return _SEP.join(out)
Get the full java class path. Includes user added paths and the environment CLASSPATH.
25,386
def bookSSE(symbols=None, on_data=None, token=, version=): return _runSSE(, symbols, on_data, token, version)
Book shows IEX’s bids and asks for given symbols. https://iexcloud.io/docs/api/#deep-book Args: symbols (string); Tickers to request on_data (function): Callback on data token (string); Access token version (string); API version
25,387
def chart_type(cls, plot): try: chart_type_method = { : cls._differentiate_area_chart_type, : cls._differentiate_area_3d_chart_type, : cls._differentiate_bar_chart_type, : cls._differentiate_bubble_chart_type, : cls._differentiate_doughnut_chart_type, : cls._differentiate_line_chart_type, : cls._differentiate_pie_chart_type, : cls._differentiate_radar_chart_type, : cls._differentiate_xy_chart_type, }[plot.__class__.__name__] except KeyError: raise NotImplementedError( "chart_type() not implemented for %s" % plot.__class__.__name__ ) return chart_type_method(plot)
Return the member of :ref:`XlChartType` that corresponds to the chart type of *plot*.
25,388
def get_inline_views_from_fieldsets(fieldsets): inline_views = [] for _, opts in fieldsets or (): if in opts: inline_views += get_inline_views_from_fieldsets(opts.get()) elif in opts: inline_views.append(opts.get()) return inline_views
Returns a list of field names from an admin fieldsets structure.
25,389
def plot_border(mask, should_plot_border, units, kpc_per_arcsec, pointsize, zoom_offset_pixels): if should_plot_border and mask is not None: plt.gca() border_pixels = mask.masked_grid_index_to_pixel[mask.border_pixels] if zoom_offset_pixels is not None: border_pixels -= zoom_offset_pixels border_arcsec = mask.grid_pixels_to_grid_arcsec(grid_pixels=border_pixels) border_units = convert_grid_units(array=mask, grid_arcsec=border_arcsec, units=units, kpc_per_arcsec=kpc_per_arcsec) plt.scatter(y=border_units[:,0], x=border_units[:,1], s=pointsize, c=)
Plot the borders of the mask or the array on the figure. Parameters -----------t. mask : ndarray of data.array.mask.Mask The mask applied to the array, the edge of which is plotted as a set of points over the plotted array. should_plot_border : bool If a mask is supplied, its borders pixels (e.g. the exterior edge) is plotted if this is *True*. units : str The units of the y / x axis of the plots, in arc-seconds ('arcsec') or kiloparsecs ('kpc'). kpc_per_arcsec : float or None The conversion factor between arc-seconds and kiloparsecs, required to plot the units in kpc. border_pointsize : int The size of the points plotted to show the borders.
25,390
def to_file(self, output_file, smooth_fwhm=0, outdtype=None): outmat, mask_indices, mask_shape = self.to_matrix(smooth_fwhm, outdtype) exporter = ExportData() content = {: outmat, : self.labels, : mask_indices, : mask_shape, } if self.others: content.update(self.others) log.debug(.format(output_file)) try: exporter.save_variables(output_file, content) except Exception as exc: raise Exception(.format(output_file)) from exc
Save the Numpy array created from to_matrix function to the output_file. Will save into the file: outmat, mask_indices, vol_shape and self.others (put here whatever you want) data: Numpy array with shape N x prod(vol.shape) containing the N files as flat vectors. mask_indices: matrix with indices of the voxels in the mask vol_shape: Tuple with shape of the volumes, for reshaping. Parameters ---------- output_file: str Path to the output file. The extension of the file will be taken into account for the file format. Choices of extensions: '.pyshelf' or '.shelf' (Python shelve) '.mat' (Matlab archive), '.hdf5' or '.h5' (HDF5 file) smooth_fwhm: int Integer indicating the size of the FWHM Gaussian smoothing kernel to smooth the subject volumes before creating the data matrix outdtype: dtype Type of the elements of the array, if None will obtain the dtype from the first nifti file.
25,391
def null_space(M, k, k_skip=1, eigen_solver=, random_state=None, solver_kwds=None): eigen_solver, solver_kwds = check_eigen_solver(eigen_solver, solver_kwds, size=M.shape[0], nvec=k + k_skip) random_state = check_random_state(random_state) if eigen_solver == : v0 = random_state.uniform(-1, 1, M.shape[0]) try: eigen_values, eigen_vectors = eigsh(M, k + k_skip, sigma=0.0, v0=v0,**(solver_kwds or {})) except RuntimeError as msg: raise ValueError("Error in determining null-space with ARPACK. " "Error message: . " "Note that method= can fail when the " "weight matrix is singular or otherwise " "ill-behaved. method= is recommended. " "See online documentation for more information." % msg) return eigen_vectors[:, k_skip:], np.sum(eigen_values[k_skip:]) elif eigen_solver == : if hasattr(M, ): M = M.toarray() eigen_values, eigen_vectors = eigh(M, eigvals=(0, k+k_skip),overwrite_a=True, **(solver_kwds or {})) index = np.argsort(np.abs(eigen_values)) eigen_vectors = eigen_vectors[:, index] eigen_values = eigen_values[index] return eigen_vectors[:, k_skip:k+1], np.sum(eigen_values[k_skip:k+1]) elif (eigen_solver == or eigen_solver == ): try: M = sparse.identity(M.shape[0]) + M n_components = min(k + k_skip + 10, M.shape[0]) eigen_values, eigen_vectors = eigen_decomposition(M, n_components, eigen_solver = eigen_solver, drop_first = False, largest = False, random_state=random_state, solver_kwds=solver_kwds) eigen_values = eigen_values -1 index = np.argsort(np.abs(eigen_values)) eigen_values = eigen_values[index] eigen_vectors = eigen_vectors[:, index] return eigen_vectors[:, k_skip:k+1], np.sum(eigen_values[k_skip:k+1]) except np.linalg.LinAlgError: warnings.warn("LOBPCG failed the first time. Increasing Pos Def adjustment.") M = 2.0*sparse.identity(M.shape[0]) + M n_components = min(k + k_skip + 10, M.shape[0]) eigen_values, eigen_vectors = eigen_decomposition(M, n_components, eigen_solver = eigen_solver, drop_first = False, largest = False, random_state=random_state, solver_kwds=solver_kwds) eigen_values = eigen_values - 2 index = np.argsort(np.abs(eigen_values)) eigen_values = eigen_values[index] eigen_vectors = eigen_vectors[:, index] return eigen_vectors[:, k_skip:k+1], np.sum(eigen_values[k_skip:k+1]) else: raise ValueError("Unrecognized eigen_solver " % eigen_solver)
Find the null space of a matrix M: eigenvectors associated with 0 eigenvalues Parameters ---------- M : {array, matrix, sparse matrix, LinearOperator} Input covariance matrix: should be symmetric positive semi-definite k : integer Number of eigenvalues/vectors to return k_skip : integer, optional Number of low eigenvalues to skip. eigen_solver : {'auto', 'dense', 'arpack', 'lobpcg', or 'amg'} 'auto' : algorithm will attempt to choose the best method for input data 'dense' : use standard dense matrix operations for the eigenvalue decomposition. For this method, M must be an array or matrix type. This method should be avoided for large problems. 'arpack' : use arnoldi iteration in shift-invert mode. For this method, M may be a dense matrix, sparse matrix, or general linear operator. Warning: ARPACK can be unstable for some problems. It is best to try several random seeds in order to check results. 'lobpcg' : Locally Optimal Block Preconditioned Conjugate Gradient Method. A preconditioned eigensolver for large symmetric positive definite (SPD) generalized eigenproblems. 'amg' : AMG requires pyamg to be installed. It can be faster on very large, sparse problems, but may also lead to instabilities. random_state: numpy.RandomState or int, optional The generator or seed used to determine the starting vector for arpack iterations. Defaults to numpy.random. solver_kwds : any additional keyword arguments to pass to the selected eigen_solver Returns ------- null_space : estimated k vectors of the null space error : estimated error (sum of eigenvalues) Notes ----- dense solver key words: see http://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.linalg.eigh.html for symmetric problems and http://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.linalg.eig.html#scipy.linalg.eig for non symmetric problems. arpack sovler key words: see http://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.sparse.linalg.eigsh.html for symmetric problems and http://docs.scipy.org/doc/scipy-0.14.0/reference/generated/scipy.sparse.linalg.eigs.html#scipy.sparse.linalg.eigs for non symmetric problems. lobpcg solver keywords: see http://docs.scipy.org/doc/scipy/reference/generated/scipy.sparse.linalg.lobpcg.html amg solver keywords: see http://pyamg.googlecode.com/svn/branches/1.0.x/Docs/html/pyamg.aggregation.html#module-pyamg.aggregation.aggregation (Note amg solver uses lobpcg and also accepts lobpcg keywords)
25,392
def coupleTo_vswitch(userid, vswitch_name): print("\nCoupleing to vswitch for %s ..." % userid) vswitch_info = client.send_request(, userid, , vswitch_name) if vswitch_info[]: raise RuntimeError("Failed to couple to vswitch for guest %s!\n%s" % (userid, vswitch_info)) else: print("Succeeded to couple to vswitch for guest %s!" % userid)
Couple to vswitch. Input parameters: :userid: USERID of the guest, last 8 if length > 8 :network_info: dict of network info
25,393
def start(grains=False, grain_keys=None, pillar=False, pillar_keys=None): state = salt.thorium.ThorState( __opts__, grains, grain_keys, pillar, pillar_keys) state.start_runtime()
Execute the Thorium runtime
25,394
def uchroot(*args, **kwargs): uchroot_cmd = with_mounts(*args, uchroot_cmd_fn=no_llvm, **kwargs) return uchroot_cmd["--"]
Return a customizable uchroot command. Args: args: List of additional arguments for uchroot (typical: mounts) Return: chroot_cmd
25,395
def remove_handlers_bound_to_instance(self, obj): for handler in self.handlers: if handler.im_self == obj: self -= handler
Remove all handlers bound to given object instance. This is useful to remove all handler methods that are part of an instance. :param object obj: Remove handlers that are methods of this instance
25,396
def _get_available_encodings(): available_encodings = set(encodings.aliases.aliases.values()) paths = [os.path.dirname(encodings.__file__)] for importer, modname, ispkg in pkgutil.walk_packages(path=paths): available_encodings.add(modname) available_encodings = list(available_encodings) available_encodings.sort() return available_encodings
Get a list of the available encodings to make it easy to tab-complete the command line interface. Inspiration from http://stackoverflow.com/a/3824405/564709
25,397
def save(self) -> None: for name, field in self.fields.items(): value = self.cleaned_data[name] if isinstance(value, UploadedFile): fname = self._s.get(name, as_type=File) if fname: try: default_storage.delete(fname.name) except OSError: logger.error( % fname.name) newname = default_storage.save(self.get_new_filename(value.name), value) value._name = newname self._s.set(name, value) elif isinstance(value, File): continue elif isinstance(field, forms.FileField): fname = self._s.get(name, as_type=File) if fname: try: default_storage.delete(fname.name) except OSError: logger.error( % fname.name) del self._s[name] elif value is None: del self._s[name] elif self._s.get(name, as_type=type(value)) != value: self._s.set(name, value)
Saves all changed values to the database.
25,398
def delete_job(job_id, deployment_name, token_manager=None, app_url=defaults.APP_URL): headers = token_manager.get_access_token_headers() data_url = get_data_url_for_job(job_id, deployment_name, token_manager=token_manager, app_url=app_url) url = % (data_url, job_id) response = requests.delete(url, headers=headers) if response.status_code != 200: raise JutException( % (response.status_code, response.text))
delete a job with a specific job id
25,399
def open(self, filename, mode=, bufsize=-1): sftp_client = self.open_sftp() return sftp_client.open(filename, mode, bufsize)
Open a file on the remote system and return a file-like object.