Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
22,100
def reply_regexp(self, user, regexp): if regexp in self.master._regexc["trigger"]: return self.master._regexc["trigger"][regexp] regexp = re.sub(RE.zero_star, r, regexp) arrays = re.findall(RE.array, regexp) for array in arrays: rep = if array in self.master._array: rep = r + .join(self.expand_array(array)) + regexp = re.sub(r + re.escape(array) + r, rep, regexp) regexp = regexp.replace(, ) regexp = regexp.replace(, ) regexp = regexp.replace(, ) regexp = re.sub(RE.weight, , regexp) regexp = regexp.replace(, r) optionals = re.findall(RE.optionals, regexp) for match in optionals: parts = match.split("|") new = [] for p in parts: p = r.format(p.strip()) new.append(p) pipes = .join(new) pipes = pipes.replace(r, r) pipes = pipes.replace(r, r) pipes = pipes.replace(r, r) regexp = re.sub(r + re.escape(match) + , + pipes + r, regexp) if self.utf8: return re.compile(r + regexp.lower() + r, re.UNICODE) else: return re.compile(r + regexp.lower() + r)
Prepares a trigger for the regular expression engine. :param str user: The user ID invoking a reply. :param str regexp: The original trigger text to be turned into a regexp. :return regexp: The final regexp object.
22,101
def resize(self, shape): if not (isinstance(shape, tuple) and len(shape) == 2): raise ValueError() for buf in (self.color_buffer, self.depth_buffer, self.stencil_buffer): if buf is None: continue shape_ = shape if isinstance(buf, Texture2D): shape_ = shape + (self.color_buffer.shape[-1], ) buf.resize(shape_, buf.format)
Resize all attached buffers with the given shape Parameters ---------- shape : tuple of two integers New buffer shape (h, w), to be applied to all currently attached buffers. For buffers that are a texture, the number of color channels is preserved.
22,102
def update(self, get_running_apps=True): screen_on, awake, wake_lock_size, _current_app, running_apps = self.get_properties(get_running_apps=get_running_apps, lazy=True) if not screen_on: state = STATE_OFF current_app = None running_apps = None elif not awake: state = STATE_IDLE current_app = None running_apps = None else: if isinstance(_current_app, dict) and in _current_app: current_app = _current_app[] else: current_app = None if running_apps is None and current_app: running_apps = [current_app] if current_app in [PACKAGE_LAUNCHER, PACKAGE_SETTINGS]: state = STATE_STANDBY elif current_app == AMAZON_VIDEO: if wake_lock_size == 5: state = STATE_PLAYING else: state = STATE_PAUSED elif current_app == NETFLIX: if wake_lock_size > 3: state = STATE_PLAYING else: state = STATE_PAUSED elif wake_lock_size == 1: state = STATE_PLAYING else: state = STATE_PAUSED return state, current_app, running_apps
Get the state of the device, the current app, and the running apps. :param get_running_apps: whether or not to get the ``running_apps`` property :return state: the state of the device :return current_app: the current app :return running_apps: the running apps
22,103
def ignore(name): -* to_ignore = name.rsplit(, 1)[0] cmd = [, , to_ignore] salt.utils.mac_utils.execute_return_success(cmd) return to_ignore in list_ignored()
Ignore a specific program update. When an update is ignored the '-' and version number at the end will be omitted, so "SecUpd2014-001-1.0" becomes "SecUpd2014-001". It will be removed automatically if present. An update is successfully ignored when it no longer shows up after list_updates. :param name: The name of the update to add to the ignore list. :ptype: str :return: True if successful, False if not :rtype: bool CLI Example: .. code-block:: bash salt '*' softwareupdate.ignore <update-name>
22,104
def _prep_subsampled_bams(data, work_dir): sr_bam, disc_bam = sshared.get_split_discordants(data, work_dir) ds_bam = bam.downsample(dd.get_align_bam(data), data, 1e8, read_filter="-F ", always_run=True, work_dir=work_dir) out_bam = "%s-final%s" % utils.splitext_plus(ds_bam) if not utils.file_exists(out_bam): bam.merge([ds_bam, sr_bam, disc_bam], out_bam, data["config"]) bam.index(out_bam, data["config"]) return [out_bam]
Prepare a subsampled BAM file with discordants from samblaster and minimal correct pairs. This attempts to minimize run times by pre-extracting useful reads mixed with subsampled normal pairs to estimate paired end distributions: https://groups.google.com/d/msg/delly-users/xmia4lwOd1Q/uaajoBkahAIJ Subsamples correctly aligned reads to 100 million based on speedseq defaults and evaluations on NA12878 whole genome data: https://github.com/cc2qe/speedseq/blob/ca624ba9affb0bd0fb88834ca896e9122639ec94/bin/speedseq#L1102 XXX Currently not used as new versions of delly do not get good sensitivity with downsampled BAMs.
22,105
def bool_value(self): context = contextmod.InferenceContext() context.callcontext = contextmod.CallContext(args=[]) context.boundnode = self try: result = _infer_method_result_truth(self, BOOL_SPECIAL_METHOD, context) except (exceptions.InferenceError, exceptions.AttributeInferenceError): try: result = _infer_method_result_truth(self, "__len__", context) except (exceptions.AttributeInferenceError, exceptions.InferenceError): return True return result
Infer the truth value for an Instance The truth value of an instance is determined by these conditions: * if it implements __bool__ on Python 3 or __nonzero__ on Python 2, then its bool value will be determined by calling this special method and checking its result. * when this method is not defined, __len__() is called, if it is defined, and the object is considered true if its result is nonzero. If a class defines neither __len__() nor __bool__(), all its instances are considered true.
22,106
def add_op(state, op_func, *args, **kwargs): frameinfo = get_caller_frameinfo() kwargs[] = frameinfo for host in state.inventory: op_func(state, host, *args, **kwargs)
Prepare & add an operation to ``pyinfra.state`` by executing it on all hosts. Args: state (``pyinfra.api.State`` obj): the deploy state to add the operation to op_func (function): the operation function from one of the modules, ie ``server.user`` args/kwargs: passed to the operation function
22,107
def netloc_no_www(url): ext = tldextract.extract(url) if ext.subdomain and ext.subdomain != : return % (ext.subdomain, ext.domain, ext.tld) else: return % (ext.domain, ext.tld)
For a given URL return the netloc with any www. striped.
22,108
def recipients(cls, bigchain): recipients = [] for public_key, voting_power in cls.get_validators(bigchain).items(): recipients.append(([public_key], voting_power)) return recipients
Convert validator dictionary to a recipient list for `Transaction`
22,109
def read_uint(data, start, length): return int.from_bytes(data[start:start+length], byteorder=)
Extract a uint from a position in a sequence.
22,110
def resizeEvent(self, event): super(XWalkthroughWidget, self).resizeEvent(event) if self.isVisible(): self.autoLayout()
Moves the widgets around the system. :param event | <QtGui.QResizeEvent>
22,111
def remove_root_vault(self, vault_id): if self._catalog_session is not None: return self._catalog_session.remove_root_catalog(catalog_id=vault_id) return self._hierarchy_session.remove_root(id_=vault_id)
Removes a root vault from this hierarchy. arg: vault_id (osid.id.Id): the ``Id`` of a vault raise: NotFound - ``vault_id`` not a parent of ``child_id`` raise: NullArgument - ``vault_id`` or ``child_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.*
22,112
def init_remote(self): new = False if not os.listdir(self.cachedir): self.repo = git.Repo.init(self.cachedir) new = True else: try: self.repo = git.Repo(self.cachedir) except git.exc.InvalidGitRepositoryError: log.error(_INVALID_REPO, self.cachedir, self.url, self.role) return new self.gitdir = salt.utils.path.join(self.repo.working_dir, ) self.enforce_git_config() return new
Initialize/attach to a remote using GitPython. Return a boolean which will let the calling function know whether or not a new repo was initialized by this function.
22,113
def _get_horoscope(self, day=): if not is_valid_day(day): raise HoroscopeException("Invalid day. Allowed days: [today|yesterday|tomorrow]" ) horoscope = .join([str(s).strip() for s in self.tree.xpath( % day)]) if day is : date = self.date_today - timedelta(days=1) elif day is : date = self.date_today elif day is : date = self.date_today + timedelta(days=1) return { : date.strftime("%Y-%m-%d"), : self.sunsign.capitalize(), : horoscope + "(c) Kelli Fox, The Astrologer, http://new.theastrologer.com", : self._get_horoscope_meta(day), : }
gets a horoscope from site html :param day: day for which to get horoscope. Default is 'today' :returns: dictionary of horoscope details
22,114
def select_all(self, serial_numbers): sheet = self.table col = self.db_sheet_cols.id rows = sheet.loc[:, col].isin(serial_numbers) return sheet.loc[rows, :]
Select rows for identification for a list of serial_number. Args: serial_numbers: list (or ndarray) of serial numbers Returns: pandas.DataFrame
22,115
def filter_batch(self, batch): for item in batch: if self.filter(item): yield item else: self.set_metadata(, self.get_metadata() + 1) self.total += 1 self._log_progress()
Receives the batch, filters it, and returns it.
22,116
def _set_transmitted_stp_type(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u: {: 1}, u: {: 3}, u: {: 4}, u: {: 6}, u: {: 5}, u: {: 2}},), is_leaf=True, yang_name="transmitted-stp-type", rest_name="transmitted-stp-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, namespace=, defining_module=, yang_type=, is_config=True) except (TypeError, ValueError): raise ValueError({ : , : "brocade-xstp-ext:stp-type", : , }) self.__transmitted_stp_type = t if hasattr(self, ): self._set()
Setter method for transmitted_stp_type, mapped from YANG variable /brocade_xstp_ext_rpc/get_stp_mst_detail/output/cist/port/transmitted_stp_type (stp-type) If this variable is read-only (config: false) in the source YANG file, then _set_transmitted_stp_type is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_transmitted_stp_type() directly. YANG Description: Transmitted (tx) stp type
22,117
def rlmb_ppo_quick(): hparams = rlmb_ppo_base() hparams.epochs = 2 hparams.model_train_steps = 25000 hparams.ppo_epochs_num = 700 hparams.ppo_epoch_length = 50 return hparams
Base setting but quicker with only 2 epochs.
22,118
def expect(self, use_proportions=True): changed = self.get_changed(self.partition, self.prev_partition) lk_table = self.generate_lktable(self.partition, changed, use_proportions) self.table = self.likelihood_table_to_probs(lk_table)
The Expectation step of the CEM algorithm
22,119
def get_identities(self, item): item = item[] if in item: user = self.get_sh_identity(item[][0]) yield user rsvps = item.get(, []) for rsvp in rsvps: user = self.get_sh_identity(rsvp[]) yield user for comment in item[]: user = self.get_sh_identity(comment[]) yield user
Return the identities from an item
22,120
def Chisholm_Armand(x, rhol, rhog): r alpha_h = homogeneous(x, rhol, rhog) return alpha_h/(alpha_h + (1-alpha_h)**0.5)
r'''Calculates void fraction in two-phase flow according to the model presented in [1]_ based on that of [2]_ as shown in [3]_, [4]_, and [5]_. .. math:: \alpha = \frac{\alpha_h}{\alpha_h + (1-\alpha_h)^{0.5}} Parameters ---------- x : float Quality at the specific tube interval [] rhol : float Density of the liquid [kg/m^3] rhog : float Density of the gas [kg/m^3] Returns ------- alpha : float Void fraction (area of gas / total area of channel), [-] Notes ----- Examples -------- >>> Chisholm_Armand(.4, 800, 2.5) 0.9357814394262114 References ---------- .. [1] Chisholm, Duncan. Two-Phase Flow in Pipelines and Heat Exchangers. Institution of Chemical Engineers, 1983. .. [2] Armand, Aleksandr Aleksandrovich. The Resistance During the Movement of a Two-Phase System in Horizontal Pipes. Atomic Energy Research Establishment, 1959. .. [3] Xu, Yu, and Xiande Fang. "Correlations of Void Fraction for Two- Phase Refrigerant Flow in Pipes." Applied Thermal Engineering 64, no. 1-2 (March 2014): 242–51. doi:10.1016/j.applthermaleng.2013.12.032. .. [4] Dalkilic, A. S., S. Laohalertdecha, and S. Wongwises. "Effect of Void Fraction Models on the Two-Phase Friction Factor of R134a during Condensation in Vertical Downward Flow in a Smooth Tube." International Communications in Heat and Mass Transfer 35, no. 8 (October 2008): 921-27. doi:10.1016/j.icheatmasstransfer.2008.04.001. .. [5] Woldesemayat, Melkamu A., and Afshin J. Ghajar. "Comparison of Void Fraction Correlations for Different Flow Patterns in Horizontal and Upward Inclined Pipes." International Journal of Multiphase Flow 33, no. 4 (April 2007): 347-370. doi:10.1016/j.ijmultiphaseflow.2006.09.004.
22,121
def analysis(self): if self._analysis is None: with open(self.path, ) as f: self.read_analysis(f) return self._analysis
Get ANALYSIS segment of the FCS file.
22,122
def gross_lev(positions): exposure = positions.drop(, axis=1).abs().sum(axis=1) return exposure / positions.sum(axis=1)
Calculates the gross leverage of a strategy. Parameters ---------- positions : pd.DataFrame Daily net position values. - See full explanation in tears.create_full_tear_sheet. Returns ------- pd.Series Gross leverage.
22,123
def _get_connection(self, uri, headers=None): connection = None if uri.scheme == : if not uri.port: connection = httplib.HTTPSConnection(uri.host) else: connection = httplib.HTTPSConnection(uri.host, int(uri.port)) else: if not uri.port: connection = httplib.HTTPConnection(uri.host) else: connection = httplib.HTTPConnection(uri.host, int(uri.port)) return connection
Opens a socket connection to the server to set up an HTTP request. Args: uri: The full URL for the request as a Uri object. headers: A dict of string pairs containing the HTTP headers for the request.
22,124
def zyz_circuit(t0: float, t1: float, t2: float, q0: Qubit) -> Circuit: circ = Circuit() circ += TZ(t0, q0) circ += TY(t1, q0) circ += TZ(t2, q0) return circ
Circuit equivalent of 1-qubit ZYZ gate
22,125
def set_hr_widths(result): mw = 0 hrs = [] if not hr_marker in result: return result for line in result.splitlines(): if hr_marker in line: hrs.append(line) continue if len(line) < mw: continue l = len(clean_ansi(line)) if l > mw: mw = l for hr in hrs: hcl = clean_ansi(hr) ind = len(hcl) - len(hcl.split(hr_marker, 1)[1]) - 1 w = min(term_columns, mw) - 2 * ind hrf = hr.replace(hr_marker, hr_sep * w) result = result.replace(hr, hrf) return result
We want the hrs indented by hirarchy... A bit 2 much effort to calc, maybe just fixed with 10 style seps would have been enough visually: β—ˆβ”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β”€β—ˆ
22,126
def _timestamp(when): return (time.mktime(when.timetuple()) if sys.version_info < (3,) else when.timestamp())
Python 2 compatibility for `datetime.timestamp()`.
22,127
def convert(self, value, *args, **kwargs): value = os.path.expanduser(value) return super(ExpandPath, self).convert(value, *args, **kwargs)
Take a path with $HOME variables and resolve it to full path.
22,128
def description(self): name = None try: name = self._TYPE_NAMES[self.audioObjectType] except IndexError: pass if name is None: return if self.sbrPresentFlag == 1: name += "+SBR" if self.psPresentFlag == 1: name += "+PS" return text_type(name)
string or None if unknown
22,129
def valid_input(val): ip_value = re.compile(r) all_num = re.compile(r) sections_comments = re.compile(r, re.VERBOSE) options_values = re.compile(r) line_num = 0 warning_str = error_str = trimmed_val = [] for entry in val.split(): line_num += 1 trimmed_val.append(re.sub(r, , entry).strip()) if entry.strip() == : continue if options_values.match(entry): value = entry.split(, 1)[1] for val in value.split(): val = val.strip() if val == : error_str += \ + str(line_num) + if re.match(, val): if val.find() >= 0 and not ip_value.match(val): error_str += \ \ + str(line_num) + elif val.find() < 0 and not all_num.match(val): warning_str += \ + \ str(line_num) + elif val.find() >= 0: for num in val.strip().split(): num = int(num) if num > 255 or num < 0: error_str += \ \ \ + str(line_num) + if re.search(, val): error_str += \ + str(line_num) + elif not sections_comments.match(entry): lit_val = try: opt_val = entry.split(, 1) if opt_val[0].strip() == : error_str += \ + str(line_num) + else: lit_val = opt_val[1].strip() except IndexError: lit_val = error_str += \ + str(line_num) + if lit_val: try: ast.literal_eval(lit_val) except SyntaxError: error_str += \ + str(line_num) + \ if error_str: npyscreen.notify_confirm( " can\n-\nError in inputYou have may have some error(s) that you want to check before proceeding:\n-\n\n-\nDo you want to continue?Double check\n\n'.join(trimmed_val))
Ensure the input the user gave is of a valid format
22,130
def remove_allocated_node_name(self, name): if name in self._allocated_node_names: self._allocated_node_names.remove(name)
Removes an allocated node name :param name: allocated node name
22,131
def retrieve(self, request, *args, **kwargs): return super(UserViewSet, self).retrieve(request, *args, **kwargs)
User fields can be updated by account owner or user with staff privilege (is_staff=True). Following user fields can be updated: - organization (deprecated, use `organization plugin <http://waldur_core-organization.readthedocs.org/en/stable/>`_ instead) - full_name - native_name - job_title - phone_number - email Can be done by **PUT**ing a new data to the user URI, i.e. */api/users/<UUID>/* by staff user or account owner. Valid request example (token is user specific): .. code-block:: http PUT /api/users/e0c058d06864441fb4f1c40dee5dd4fd/ HTTP/1.1 Content-Type: application/json Accept: application/json Authorization: Token c84d653b9ec92c6cbac41c706593e66f567a7fa4 Host: example.com { "email": "[email protected]", "organization": "Bells organization", }
22,132
def check_qt(): qt_infos = dict(pyqt5=("PyQt5", "5.6")) try: import qtpy package_name, required_ver = qt_infos[qtpy.API] actual_ver = qtpy.PYQT_VERSION if LooseVersion(actual_ver) < LooseVersion(required_ver): show_warning("Please check Spyder installation requirements:\n" "%s %s+ is required (found v%s)." % (package_name, required_ver, actual_ver)) except ImportError: show_warning("Failed to import qtpy.\n" "Please check Spyder installation requirements:\n\n" "qtpy 1.2.0+ and\n" "%s %s+\n\n" "are required to run Spyder." % (qt_infos[]))
Check Qt binding requirements
22,133
def sign_extend(self, new_length): msb = self.extract(self.bits - 1, self.bits - 1).eval(2) if msb == [ 0 ]: return self.zero_extend(new_length) if msb == [ 1 ]: si = self.copy() si._bits = new_length mask = (2 ** new_length - 1) - (2 ** self.bits - 1) si._lower_bound |= mask si._upper_bound |= mask else: numbers = self._nsplit() all_resulting_intervals = list() assert len(numbers) > 0 for n in numbers: a, b = n.lower_bound, n.upper_bound mask_a = 0 mask_b = 0 mask_n = ((1 << (new_length - n.bits)) - 1) << n.bits if StridedInterval._get_msb(a, n.bits) == 1: mask_a = mask_n if StridedInterval._get_msb(b, n.bits) == 1: mask_b = mask_n si_ = StridedInterval(bits=new_length, stride=n.stride, lower_bound=a | mask_a, upper_bound=b | mask_b) all_resulting_intervals.append(si_) si = StridedInterval.least_upper_bound(*all_resulting_intervals).normalize() si.uninitialized = self.uninitialized return si
Unary operation: SignExtend :param new_length: New length after sign-extension :return: A new StridedInterval
22,134
def keep_folder(raw_path): keep = True for pattern in DIR_EXCLUDE_PATTERNS: if pattern in raw_path: LOGGER.debug(, raw_path) keep = False return keep
Keep only folders that don't contain patterns in `DIR_EXCLUDE_PATTERNS`.
22,135
def eval_permission(self, token, resource, scope, submit_request=False): return self.eval_permissions( token=token, resource_scopes_tuples=[(resource, scope)], submit_request=submit_request )
Evalutes if user has permission for scope on resource. :param str token: client access token :param str resource: resource to access :param str scope: scope on resource :param boolean submit_request: submit request if not allowed to access? rtype: boolean
22,136
def paths(self): if self.format is None: raise ArcanaFileFormatError( "Cannot get paths of fileset ({}) that hasn't had its format " "set".format(self)) if self.format.directory: return chain(*((op.join(root, f) for f in files) for root, _, files in os.walk(self.path))) else: return chain([self.path], self.aux_files.values())
Iterates through all files in the set
22,137
def log(self, message, severity=INFO, tag=u""): entry = _LogEntry( severity=severity, time=datetime.datetime.now(), tag=tag, indentation=self.indentation, message=self._sanitize(message) ) self.entries.append(entry) if self.tee: gf.safe_print(entry.pretty_print(show_datetime=self.tee_show_datetime)) return entry.time
Add a given message to the log, and return its time. :param string message: the message to be added :param severity: the severity of the message :type severity: :class:`~aeneas.logger.Logger` :param string tag: the tag associated with the message; usually, the name of the class generating the entry :rtype: datetime
22,138
def parse(args): parser = make_parser() if not len(args): parser.print_help() sys.exit(1) parsed_args = parser.parse_args(args) if parsed_args.error == : parsed_args.error = None version = parsed_args.version if version is not None: version = str(version).upper() if not parsed_args.micro and version in (, , , ): parsed_args.micro = None return _AttrDict(vars(parsed_args))
\ Parses the arguments and returns the result.
22,139
def filter_queryset(self, request, queryset, view): if request.user.is_superuser: return queryset return queryset.filter(status__user=request.user)
Filter out any artifacts which the requesting user does not have permission to view.
22,140
def remove_stale_javascripts(portal): logger.info("Removing stale javascripts ...") for js in JAVASCRIPTS_TO_REMOVE: logger.info("Unregistering JS %s" % js) portal.portal_javascripts.unregisterResource(js)
Removes stale javascripts
22,141
def visit_NameConstant(self, node: AST, dfltChaining: bool = True) -> str: return str(node.value)
Return `node`s name as string.
22,142
def plot_magnitude_time_scatter( catalogue, plot_error=False, fmt_string=, filename=None, figure_size=(8, 6), filetype=, dpi=300, ax=None): if ax is None: fig, ax = plt.subplots(figsize=figure_size) else: fig = ax.get_figure() dtime = catalogue.get_decimal_time() if len(catalogue.data[]) == 0: print() plot_error = False if plot_error: ax.errorbar(dtime, catalogue.data[], xerr=None, yerr=catalogue.data[], fmt=fmt_string) else: ax.plot(dtime, catalogue.data[], fmt_string) ax.set_xlabel() ax.set_ylabel() ax.set_title() _save_image(fig, filename, filetype, dpi)
Creates a simple scatter plot of magnitude with time :param catalogue: Earthquake catalogue as instance of :class: openquake.hmtk.seismicity.catalogue.Catalogue :param bool plot_error: Choose to plot error bars (True) or not (False) :param str fmt_string: Symbology of plot
22,143
def createDbusProxyObject(bus_name, object_path, bus=None): bus = bus or dbus.SessionBus.get_session() return bus.get_object(bus_name, object_path)
Create dbus proxy object
22,144
def data(self, index, role): if index.column() == 0 and role == QtCore.Qt.DecorationRole: if self.isPyz(index): with ZipFile(str(self.filePath(index)), ) as myzip: try: myzip.extract(, self._tmp_dir_work) p = os.path.join(self._tmp_dir_work, ) return QtGui.QIcon(p) except KeyError: pass return super(_FileSystemModel, self).data(index, role)
use zipped icon.png as icon
22,145
def std_ratio(sim=None, obs=None, node=None, skip_nan=False): sim, obs = prepare_arrays(sim, obs, node, skip_nan) return numpy.std(sim)/numpy.std(obs)-1.
Calculate the ratio between the standard deviation of the simulated and the observed values. >>> from hydpy import round_ >>> from hydpy import std_ratio >>> round_(std_ratio(sim=[1.0, 2.0, 3.0], obs=[1.0, 2.0, 3.0])) 0.0 >>> round_(std_ratio(sim=[1.0, 1.0, 1.0], obs=[1.0, 2.0, 3.0])) -1.0 >>> round_(std_ratio(sim=[0.0, 3.0, 6.0], obs=[1.0, 2.0, 3.0])) 2.0 See the documentation on function |prepare_arrays| for some additional instructions for use of function |std_ratio|.
22,146
def remove_file(path, conn=None): if conn is None: conn = init() log.debug(, path) os.remove(path)
Remove a single file from the file system
22,147
def zero_extend(self, duration_s=None, num_samples=None): if duration_s is not None and num_samples is not None: raise ValueError("`duration_s` and `num_samples` cannot both be specified.") elif duration_s is not None: num_samples = self.frame_rate * duration_s seg = AudioSegment(self.seg, self.name) zeros = silent(duration=num_samples / self.frame_rate, frame_rate=self.frame_rate) return zeros.overlay(seg)
Adds a number of zeros (digital silence) to the AudioSegment (returning a new one). :param duration_s: The number of seconds of zeros to add. If this is specified, `num_samples` must be None. :param num_samples: The number of zeros to add. If this is specified, `duration_s` must be None. :returns: A new AudioSegment object that has been zero extended. :raises: ValueError if duration_s and num_samples are both specified.
22,148
def _parse(data: str) -> list: if isinstance(data, bytes): data = data.decode() lines = ( item for item in (item.strip() for item in data.split()) if len(item) and not item.startswith() ) rules = [] for line in lines: rules.append( Rule.parse(line) ) return rules
Parses the given data string and returns a list of rule objects.
22,149
def log_verbose(self, message): if self.get_verbosity() >= Output.VERBOSITY_VERBOSE: self.writeln(message)
Logs a message only when logging level is verbose. :param str|list[str] message: The message.
22,150
def update_conversation(self, conversation): new_state = conversation.self_conversation_state old_state = self._conversation.self_conversation_state self._conversation = conversation if not new_state.delivery_medium_option: new_state.delivery_medium_option.extend( old_state.delivery_medium_option ) old_timestamp = old_state.self_read_state.latest_read_timestamp new_timestamp = new_state.self_read_state.latest_read_timestamp if new_timestamp == 0: new_state.self_read_state.latest_read_timestamp = old_timestamp for new_entry in conversation.read_state: tstamp = parsers.from_timestamp(new_entry.latest_read_timestamp) if tstamp == 0: continue uid = parsers.from_participantid(new_entry.participant_id) if uid not in self._watermarks or self._watermarks[uid] < tstamp: self._watermarks[uid] = tstamp
Update the internal state of the conversation. This method is used by :class:`.ConversationList` to maintain this instance. Args: conversation: ``Conversation`` message.
22,151
def load_stylesheet(self, id, path): self.add_child(HeadLink(id=id, link_type="stylesheet", path=path))
Proper way to dynamically inject a stylesheet in a page. :param path: Path of the stylesheet to inject.
22,152
def send_file_external(self, url_json, chunk): http_verb = url_json[] host = url_json[] url = url_json[] http_headers = url_json[] resp = self._send_file_external_with_retry(http_verb, host, url, http_headers, chunk) if resp.status_code != 200 and resp.status_code != 201: raise ValueError("Failed to send file to external store. Error:" + str(resp.status_code) + host + url)
Send chunk to external store specified in url_json. Raises ValueError on upload failure. :param data_service: data service to use for sending chunk :param url_json: dict contains where/how to upload chunk :param chunk: data to be uploaded
22,153
def normalize(l): s = float(sum(l)) if s == 0: raise ValueError("Cannot normalize list with sum 0") return [x / s for x in l]
Normalizes input list. Parameters ---------- l: list The list to be normalized Returns ------- The normalized list or numpy array Raises ------ ValueError, if the list sums to zero
22,154
def load_raw(args): path = args.ceph_conf or .format(cluster=args.cluster) try: with open(path) as ceph_conf: return ceph_conf.read() except (IOError, OSError) as e: raise exc.ConfigError( "%s; has `ceph-deploy new` been run in this directory?" % e )
Read the actual file *as is* without parsing/modifiying it so that it can be written maintaining its same properties. :param args: Will be used to infer the proper configuration name :paran path: alternatively, use a path for any configuration file loading
22,155
def _maybe_download_corpus(tmp_dir, vocab_type): filename = os.path.basename(PTB_URL) compressed_filepath = generator_utils.maybe_download( tmp_dir, filename, PTB_URL) ptb_files = [] ptb_char_files = [] with tarfile.open(compressed_filepath, "r:gz") as tgz: files = [] for m in tgz.getmembers(): if "ptb" in m.name and ".txt" in m.name: if "char" in m.name: ptb_char_files += [m.name] else: ptb_files += [m.name] files += [m] tgz.extractall(tmp_dir, members=files) if vocab_type == text_problems.VocabType.CHARACTER: return ptb_char_files else: return ptb_files
Download and unpack the corpus. Args: tmp_dir: directory containing dataset. vocab_type: which vocabulary are we using. Returns: The list of names of files.
22,156
def p_expr_LT_expr(p): p[0] = make_binary(p.lineno(2), , p[1], p[3], lambda x, y: x < y)
expr : expr LT expr
22,157
def get_html(grafs): html = [format_html(, p) for p in grafs] html = [p.replace("\n", "<br>") for p in html] return format_html(six.text_type(.join(html)))
Renders the grafs provided in HTML by wrapping them in <p> tags. Linebreaks are replaced with <br> tags.
22,158
def insertDatasetWOannex(self, dataset, blockcontent, otptIdList, conn, insertDataset = True, migration = False): tran = conn.begin() try: if insertDataset: dataset[] = self.datasetid.execute(conn, dataset[]) if dataset[] <= 0: dataset[] = self.sm.increment(conn, "SEQ_DS") if not migration: dataset[] = dbsUtils().getCreateBy() dataset[] = dbsUtils().getCreateBy() dataset[] = dataset.get(, dbsUtils().getTime()) dataset[] = dataset.get(, dbsUtils().getTime()) dataset[] = dataset.get(, None) dataset[] = dataset.get(, None) try: self.datasetin.execute(conn, dataset, tran) except exceptions.IntegrityError as ei: if str(ei).find("ORA-00001") != -1 or str(ei).lower().find("duplicate") !=-1: if conn.closed: conn = self.dbi.connection() dataset[] = self.datasetid.execute(conn, dataset[]) if dataset[] <= 0: if tran:tran.rollback() if conn:conn.close() dbsExceptionHandler(, , self.logger.exception, + str(ei)) elif str(ei).find("ORA-01400") > -1: if tran:tran.rollback() if conn:conn.close() dbsExceptionHandler(, , self.logger.exception, + str(ei)) else: if tran: tran.rollback() if conn: conn.close() dbsExceptionHandler(, , self.logger.exception, + str(ei)) except Exception: if tran:tran.rollback() if conn:conn.close() raise for c in otptIdList: try: dcObj = { : dataset[], : c } self.dcin.execute(conn, dcObj, tran) except exceptions.IntegrityError as ei: if (str(ei).find("ORA-00001") != -1 and str(ei).find("TUC_DC_1") != -1) or \ str(ei).lower().find("duplicate")!=-1: pass else: if tran:tran.rollback() if conn:conn.close() dbsExceptionHandler(, , self.logger.exception, + str(ei)) except Exception as ex: if tran:tran.rollback() if conn:conn.close() raise tran.commit() except exceptions.IntegrityError as ei:
_insertDatasetOnly_ Insert the dataset and only the dataset Meant to be called after everything else is put into place. The insertDataset flag is set to false if the dataset already exists
22,159
def serialize(exc): return { : type(exc).__name__, : get_module_path(type(exc)), : list(map(safe_for_serialization, exc.args)), : safe_for_serialization(exc), }
Serialize `self.exc` into a data dictionary representing it.
22,160
def delete_record(self, instance): adapter = self.get_adapter_from_instance(instance) adapter.delete_record(instance)
Deletes the record.
22,161
def perpendicular_vector(n): dim = len(n) if dim == 2: return n[::-1] for ix in range(dim): _ = N.zeros(dim) _[dim-ix-1] = 1 v1 = N.cross(n,_) if N.linalg.norm(v1) != 0: return v1 raise ValueError("Cannot find perpendicular vector")
Get a random vector perpendicular to the given vector
22,162
def get_utm_zone(longitude): zone = int((math.floor((longitude + 180.0) / 6.0) + 1) % 60) if zone == 0: zone = 60 return zone
Return utm zone.
22,163
def delete(self): req = self.request(self.mist_client.uri + + self.id) req.delete() self.mist_client.update_clouds()
Delete the cloud from the list of added clouds in mist.io service. :returns: A list of mist.clients' updated clouds.
22,164
def _grid_distance(self, index): dimensions = np.cumprod(self.map_dimensions[1::][::-1])[::-1] coord = [] for idx, dim in enumerate(dimensions): if idx != 0: value = (index % dimensions[idx-1]) // dim else: value = index // dim coord.append(value) coord.append(index % self.map_dimensions[-1]) for idx, (width, row) in enumerate(zip(self.map_dimensions, coord)): x = np.abs(np.arange(width) - row) ** 2 dims = self.map_dimensions[::-1] if idx: dims = dims[:-idx] x = np.broadcast_to(x, dims).T if idx == 0: distance = np.copy(x) else: distance += x.T return distance
Calculate the distance grid for a single index position. This is pre-calculated for fast neighborhood calculations later on (see _calc_influence).
22,165
def save(self, path=None, filter_name=None): if path is None: try: self._target.store() except _IOException as e: raise IOError(e.Message) return url = uno.systemPathToFileUrl(os.path.abspath(path)) if filter_name: format_filter = uno.createUnoStruct() format_filter.Name = format_filter.Value = filter_name filters = (format_filter,) else: filters = () try: self._target.storeToURL(url, filters) except _IOException as e: raise IOError(e.Message)
Saves this document to a local file system. The optional first argument defaults to the document's path. Accept optional second argument which defines type of the saved file. Use one of FILTER_* constants or see list of available filters at http://wakka.net/archives/7 or http://www.oooforum.org/forum/viewtopic.phtml?t=71294.
22,166
def isMember(userid, password, group): try: certfile = getCert(userid, password) group_url = getGroupsURL(certfile, group) logging.debug("group url: %s" % ( group_url)) con = httplib.HTTPSConnection(_SERVER, 443, key_file=certfile.name, cert_file=certfile.name, timeout=600) con.connect() con.request("GET", group_url) resp = con.getresponse() if resp.status == 200: return True except Exception as e: logging.error(str(e)) return False
Test to see if the given userid/password combo is an authenticated member of group. userid: CADC Username (str) password: CADC Password (str) group: CADC GMS group (str)
22,167
def sample_stats_prior_to_xarray(self): prior = self.prior data = get_sample_stats(prior) return dict_to_dataset(data, library=self.pystan, coords=self.coords, dims=self.dims)
Extract sample_stats_prior from prior.
22,168
def parse_favorites(self, favorites_page): character_info = self.parse_sidebar(favorites_page) second_col = favorites_page.find(u, {: }).find(u).find(u).find_all(u, recursive=False)[1] try: character_info[u] = [] favorite_links = second_col.find_all(, recursive=False) for link in favorite_links: character_info[u].append(self.session.user(username=link.text)) except: if not self.session.suppress_parse_exceptions: raise return character_info
Parses the DOM and returns character favorites attributes. :type favorites_page: :class:`bs4.BeautifulSoup` :param favorites_page: MAL character favorites page's DOM :rtype: dict :return: Character favorites attributes.
22,169
def send(self, sender: PytgbotApiBot): return sender.send_game( game_short_name=self.game_short_name, chat_id=self.receiver, reply_to_message_id=self.reply_id, disable_notification=self.disable_notification, reply_markup=self.reply_markup )
Send the message via pytgbot. :param sender: The bot instance to send with. :type sender: pytgbot.bot.Bot :rtype: PytgbotApiMessage
22,170
def literal(node): rendered = [] try: if node.info is not None: l = Lexer(node.literal, node.info, tokennames="long") for _ in l: rendered.append(node.inline(classes=_[0], text=_[1])) except: pass classes = [] if node.info is not None: classes.append(node.info) if len(rendered) > 0: o = nodes.literal(classes=classes) for element in rendered: o += element else: o = nodes.literal(text=node.literal, classes=classes) for n in MarkDown(node): o += n return o
Inline code
22,171
def create_dampening(self, trigger_id, dampening): data = self._serialize_object(dampening) url = self._service_url([, trigger_id, ]) return Dampening(self._post(url, data))
Create a new dampening. :param trigger_id: TriggerId definition attached to the dampening :param dampening: Dampening definition to be created. :type dampening: Dampening :return: Created dampening
22,172
def draw_path_collection(self, paths, path_coordinates, path_transforms, offsets, offset_coordinates, offset_order, styles, mplobj=None): if offset_order == "before": raise NotImplementedError("offset before transform") for tup in self._iter_path_collection(paths, path_transforms, offsets, styles): (path, path_transform, offset, ec, lw, fc) = tup vertices, pathcodes = path path_transform = transforms.Affine2D(path_transform) vertices = path_transform.transform(vertices) if path_coordinates == "figure": path_coordinates = "points" style = {"edgecolor": utils.color_to_hex(ec), "facecolor": utils.color_to_hex(fc), "edgewidth": lw, "dasharray": "10,0", "alpha": styles[], "zorder": styles[]} self.draw_path(data=vertices, coordinates=path_coordinates, pathcodes=pathcodes, style=style, offset=offset, offset_coordinates=offset_coordinates, mplobj=mplobj)
Draw a collection of paths. The paths, offsets, and styles are all iterables, and the number of paths is max(len(paths), len(offsets)). By default, this is implemented via multiple calls to the draw_path() function. For efficiency, Renderers may choose to customize this implementation. Examples of path collections created by matplotlib are scatter plots, histograms, contour plots, and many others. Parameters ---------- paths : list list of tuples, where each tuple has two elements: (data, pathcodes). See draw_path() for a description of these. path_coordinates: string the coordinates code for the paths, which should be either 'data' for data coordinates, or 'figure' for figure (pixel) coordinates. path_transforms: array_like an array of shape (*, 3, 3), giving a series of 2D Affine transforms for the paths. These encode translations, rotations, and scalings in the standard way. offsets: array_like An array of offsets of shape (N, 2) offset_coordinates : string the coordinates code for the offsets, which should be either 'data' for data coordinates, or 'figure' for figure (pixel) coordinates. offset_order : string either "before" or "after". This specifies whether the offset is applied before the path transform, or after. The matplotlib backend equivalent is "before"->"data", "after"->"screen". styles: dictionary A dictionary in which each value is a list of length N, containing the style(s) for the paths. mplobj : matplotlib object the matplotlib plot element which generated this collection
22,173
def boolean(value): if isinstance(value, bool): return value if not value: raise ValueError("boolean type must be non-null") value = value.lower() if value in (, ,): return True if value in (, ,): return False raise ValueError("Invalid literal for boolean(): {0}".format(value))
Parse the string ``"true"`` or ``"false"`` as a boolean (case insensitive). Also accepts ``"1"`` and ``"0"`` as ``True``/``False`` (respectively). If the input is from the request JSON body, the type is already a native python boolean, and will be passed through without further parsing.
22,174
def parse(str_, lsep=",", avsep=":", vssep=",", avssep=";"): if avsep in str_: return parse_attrlist(str_, avsep, vssep, avssep) if lsep in str_: return parse_list(str_, lsep) return parse_single(str_)
Generic parser
22,175
def extension_context(extension_name=, **kw): from nnabla import logger logger.warn( ) from nnabla.ext_utils import get_extension_context return get_extension_context(extension_name, **kw)
Get the context of the specified extension. All extension's module must provide `context(**kw)` function. Args: extension_name (str) : Module path relative to `nnabla_ext`. kw (dict) : Additional keyword arguments for context function in a extension module. Returns: :class:`nnabla.Context`: The current extension context. Note: Deprecated. Use :function:`nnabla.ext_utils.get_extension_context` instead. Example: .. code-block:: python ctx = extension_context('cuda.cudnn', device_id=0) nn.set_default_context(ctx)
22,176
def qc_to_rec(samples): samples = [utils.to_single_data(x) for x in samples] samples = cwlutils.assign_complex_to_samples(samples) to_analyze, extras = _split_samples_by_qc(samples) recs = cwlutils.samples_to_records([utils.to_single_data(x) for x in to_analyze + extras]) return [[x] for x in recs]
CWL: Convert a set of input samples into records for parallelization.
22,177
def url(self, host): path = .join(str(v) for v in self._path) return .format(host, path)
Generate url for coap client.
22,178
def activate_output(self, universe: int) -> None: check_universe(universe) if universe in self._outputs: return new_output = Output(DataPacket(cid=self.__CID, sourceName=self.source_name, universe=universe)) self._outputs[universe] = new_output
Activates a universe that's then starting to sending every second. See http://tsp.esta.org/tsp/documents/docs/E1-31-2016.pdf for more information :param universe: the universe to activate
22,179
def clear(self, database, callback=None): token = self._get_token() self._enqueue(self._PendingItem(token, BlobCommand(token=token, database=database, content=ClearCommand()), callback))
Wipe the given database. This only affects items inserted remotely; items inserted on the watch (e.g. alarm clock timeline pins) are not removed. :param database: The database to wipe. :type database: .BlobDatabaseID :param callback: A callback to be called on success or failure.
22,180
def _download_article(self, article_number, max_retries=10): log.debug(.format(article_number, self.name)) _connection = self.session.connections.get() try: i = 0 while True: if i >= max_retries: return False try: _connection.group(self.name) resp = _connection.article(article_number) log.debug(.format(article_number, self.name)) return resp except EOFError: log.warning( .format(article_number, self.name)) self.session.refresh_connection(_connection) time.sleep(2) _connection = self.session.connections.get() except nntplib.NNTPError as exc: log.warning( .format(exc, article_number, self.name)) if any(s in exc.response for s in [, ]): i = max_retries else: i += 1 except: self.session.refresh_connection(_connection) time.sleep(2) _connection = self.session.connections.get() finally: self.session.connections.put(_connection)
Download a given article. :type article_number: str :param article_number: the article number to download. :type group: str :param group: the group that contains the article to be downloaded. :returns: nntplib article response object if successful, else False.
22,181
def get_signing_key(self, key_type="", owner="", kid=None, **kwargs): return self.get("sig", key_type, owner, kid, **kwargs)
Shortcut to use for signing keys only. :param key_type: Type of key (rsa, ec, oct, ..) :param owner: Who is the owner of the keys, "" == me (default) :param kid: A Key Identifier :param kwargs: Extra key word arguments :return: A possibly empty list of keys
22,182
def running_covar(xx=True, xy=False, yy=False, remove_mean=False, symmetrize=False, sparse_mode=, modify_data=False, column_selection=None, diag_only=False, nsave=5): return RunningCovar(compute_XX=xx, compute_XY=xy, compute_YY=yy, sparse_mode=sparse_mode, modify_data=modify_data, remove_mean=remove_mean, symmetrize=symmetrize, column_selection=column_selection, diag_only=diag_only, nsave=nsave)
Returns a running covariance estimator Returns an estimator object that can be fed chunks of X and Y data, and that can generate on-the-fly estimates of mean, covariance, running sum and second moment matrix. Parameters ---------- xx : bool Estimate the covariance of X xy : bool Estimate the cross-covariance of X and Y yy : bool Estimate the covariance of Y remove_mean : bool Remove the data mean in the covariance estimation symmetrize : bool Use symmetric estimates with sum defined by sum_t x_t + y_t and second moment matrices defined by X'X + Y'Y and Y'X + X'Y. modify_data : bool If remove_mean=True, the mean will be removed in the input data, without creating an independent copy. This option is faster but should only be selected if the input data is not used elsewhere. sparse_mode : str one of: * 'dense' : always use dense mode * 'sparse' : always use sparse mode if possible * 'auto' : automatic column_selection: ndarray(k, dtype=int) or None Indices of those columns that are to be computed. If None, all columns are computed. diag_only: bool If True, the computation is restricted to the diagonal entries (autocorrelations) only. nsave : int Depth of Moment storage. Moments computed from each chunk will be combined with Moments of similar statistical weight using the pairwise combination algorithm described in [1]_. References ---------- .. [1] http://i.stanford.edu/pub/cstr/reports/cs/tr/79/773/CS-TR-79-773.pdf
22,183
def stop_notifications(self): with self._notifications_lock: if not self.has_active_notification_thread: return thread = self._notifications_thread self._notifications_thread = None stopping = thread.stop() api = self._get_api(mds.NotificationsApi) api.delete_long_poll_channel() return stopping.wait()
Stop the notifications thread. :returns:
22,184
def __is_current(filepath): if not __DOWNLOAD_PARAMS[]: return True if not os.path.isfile(filepath): return False return datetime.datetime.utcfromtimestamp(os.path.getmtime(filepath)) \ > __get_last_update_time()
Checks whether file is current
22,185
def convert_json_node(self, json_input): if type(json_input) in text_types: if self.escape: return cgi.escape(text(json_input)) else: return text(json_input) if hasattr(json_input, ): return self.convert_object(json_input) if hasattr(json_input, ) and hasattr(json_input, ): return self.convert_list(json_input) return text(json_input)
Dispatch JSON input according to the outermost type and process it to generate the super awesome HTML format. We try to adhere to duck typing such that users can just pass all kinds of funky objects to json2html that *behave* like dicts and lists and other basic JSON types.
22,186
def initialize_connection(self): tries = self.tries if self.socket is not None: self.socket.close() self.socket = None for callback in self._request_callbacks.values(): callback[].set() self.app_namespaces = [] self.destination_id = None self.session_id = None self._request_id = 0 self._request_callbacks = {} self._open_channels = [] self.connecting = True retry_log_fun = self.logger.error retries = {} def mdns_backoff(service, retry): now = time.time() retry[] = now + retry[] retry[] = min(retry[]*2, 300) retries[service] = retry while not self.stop.is_set() and (tries is None or tries > 0): retries = {key: retries[key] for key in self.services if ( key is not None and key in retries)} for service in self.services.copy(): now = time.time() retry = retries.get( service, {: self.retry_wait, : now}) if service and now < retry[]: continue try: self.socket = new_socket() self.socket.settimeout(self.timeout) self._report_connection_status( ConnectionStatus(CONNECTION_STATUS_CONNECTING, NetworkAddress(self.host, self.port))) mdns_backoff(service, retry) else: retry_log_fun( "[%s:%s] Failed to connect, retrying in %.1fs", self.fn or self.host, self.port, self.retry_wait) retry_log_fun = self.logger.debug if tries is None or tries > 1: self.logger.debug( "[%s:%s] Not connected, sleeping for %.1fs. Services: %s", self.fn or self.host, self.port, self.retry_wait, self.services) time.sleep(self.retry_wait) if tries: tries -= 1 self.stop.set() self.logger.error("[%s:%s] Failed to connect. No retries.", self.fn or self.host, self.port) raise ChromecastConnectionError("Failed to connect")
Initialize a socket to a Chromecast, retrying as necessary.
22,187
def to_json(self): a_per = self.analysis_period.to_json() if self.analysis_period else None return {: self.data_type.to_json(), : self.unit, : a_per, : self.metadata}
Return a header as a dictionary.
22,188
async def run(*cmd): stdout = await checked_run(*cmd) log_path = os.path.join(FLAGS.base_dir, get_cmd_name(cmd) + ) with gfile.Open(log_path, ) as f: f.write(expand_cmd_str(cmd)) f.write() f.write(stdout) f.write() return stdout.split()
Run the given subprocess command in a coroutine. Args: *cmd: the command to run and its arguments. Returns: The output that the command wrote to stdout as a list of strings, one line per element (stderr output is piped to stdout). Raises: RuntimeError: if the command returns a non-zero result.
22,189
def _get_color(self): if self.clicked and self.hovered: color = mix(self.color, BLACK, 0.8) elif self.hovered and not self.flags & self.NO_HOVER: color = mix(self.color, BLACK, 0.93) else: color = self.color self.text.bg_color = color return color
Return the color of the button, depending on its state
22,190
def do_add_signature(input_file, output_file, signature_file): signature = open(signature_file, ).read() if len(signature) == 256: hash_algo = elif len(signature) == 512: hash_algo = else: raise ValueError() with open(output_file, ) as dst: with open(input_file, ) as src: add_signature_block(src, dst, hash_algo, signature)
Add a signature to the MAR file.
22,191
def generate_by_deltas(cls, options, width, put_inner_lte_delta, call_inner_lte_delta): raise Exception("Not Implemented starting at the 0.3.0 release") put_options_unsorted = list( filter(lambda x: x[] == , options)) put_options = cls.sort_by_strike_price(put_options_unsorted) deltas_as_strings = [x[] for x in put_options] deltas = cls.strings_to_np_array(deltas_as_strings) put_inner_index = np.argmin(deltas >= put_inner_lte_delta) - 1 put_outer_index = put_inner_index - width put_inner_leg = cls.gen_leg( put_options[put_inner_index]["instrument"], "sell") put_outer_leg = cls.gen_leg( put_options[put_outer_index]["instrument"], "buy") call_options_unsorted = list( filter(lambda x: x[] == , options)) call_options = cls.sort_by_strike_price(call_options_unsorted) deltas_as_strings = [x[] for x in call_options] x = np.array(deltas_as_strings) deltas = x.astype(np.float) ) ic_options = [ put_options[put_outer_index], put_options[put_inner_index], call_options[call_inner_index], call_options[call_outer_index] ] max_bid_ask_spread = cls.max_bid_ask_spread(ic_options) return {"legs": legs, "price": price, "max_bid_ask_spread": max_bid_ask_spread}
totally just playing around ideas for the API. this IC sells - credit put spread - credit call spread the approach - set width for the wing spread (eg, 1, ie, 1 unit width spread) - set delta for inner leg of the put credit spread (eg, -0.2) - set delta for inner leg of the call credit spread (eg, 0.1)
22,192
def init_app(self, app): if self.path: self.register_endpoint(self.path, app) if self._export_defaults: self.export_defaults( self.buckets, self.group_by, self._defaults_prefix, app )
This callback can be used to initialize an application for the use with this prometheus reporter setup. This is usually used with a flask "app factory" configuration. Please see: http://flask.pocoo.org/docs/1.0/patterns/appfactories/ Note, that you need to use `PrometheusMetrics(app=None, ...)` for this mode, otherwise it is called automatically. :param app: the Flask application
22,193
def load_plugins(): plugin_cls = {} for entry_point in pkg_resources.iter_entry_points(): cls = entry_point.load() assert cls.COMMANDS is not None, \ "plugin does not define its commands" % entry_point.name assert cls.ORDER is not None, \ "plugin does not define its priority" % entry_point.name plugin_cls[entry_point.name] = cls return plugin_cls
Load all availabe plugins. Returns ------- plugin_cls : dict mapping from plugin names to plugin classes
22,194
def save_raw_pickle(hwr_objects): converted_hwr = [] translate = {} translate_id = {} model_path = pkg_resources.resource_filename(, ) translation_csv = os.path.join(model_path, ) arguments = {: , : } with open(translation_csv, , **arguments) as csvfile: contents = csvfile.read() lines = contents.split("\n") for csvrow in lines: csvrow = csvrow.split() if len(csvrow) == 1: writemathid = csvrow[0] latex = "" else: writemathid, latex = int(csvrow[0]), csvrow[1:] latex = .join(latex) translate[latex] = writemathid translate_id[writemathid] = latex for hwr in hwr_objects: hwr.formula_in_latex = translate_id[hwr.formula_id] formula_id2latex = {} for el in hwr_objects: if el.formula_id not in formula_id2latex: formula_id2latex[el.formula_id] = el.formula_in_latex for hwr in hwr_objects: hwr.formula_in_latex = translate_id[hwr.formula_id] hwr.raw_data_id = 42 converted_hwr.append({: 0, : hwr.formula_id, : hwr, : 42, : hwr.formula_in_latex}) with open(, ) as f: pickle.dump({: formula_id2latex, : converted_hwr}, f, protocol=pickle.HIGHEST_PROTOCOL)
Parameters ---------- hwr_objects : list of hwr objects
22,195
def convert(self): c = self.config c &= (~MCP342x._continuous_mode_mask & 0x7f) c |= MCP342x._not_ready_mask logger.debug( + hex(self.address) + + bin(c)) self.bus.write_byte(self.address, c)
Initiate one-shot conversion. The current settings are used, with the exception of continuous mode.
22,196
def has_no_checked_field(self, locator, **kwargs): kwargs["checked"] = True return self.has_no_selector("field", locator, **kwargs)
Checks if the page or current node has no radio button or checkbox with the given label, value, or id that is currently checked. Args: locator (str): The label, name, or id of a checked field. **kwargs: Arbitrary keyword arguments for :class:`SelectorQuery`. Returns: bool: Whether it doesn't exist.
22,197
def set_value_all(self, twig=None, value=None, check_default=False, **kwargs): if twig is not None and value is None: if not isinstance(twig, str): value = twig twig = None elif not len(self.filter(twig=twig, check_default=check_default, **kwargs)): value = twig twig = None params = self.filter(twig=twig, check_default=check_default, **kwargs).to_list() if not kwargs.pop(, False) and not len(params): raise ValueError("no parameters found") for param in params: if "index" in kwargs.keys(): return self.get_parameter(twig=twig, **kwargs).set_index_value(value=value, **kwargs) param.set_value(value=value, **kwargs)
Set the value of all returned :class:`Parameter`s in this ParameterSet. Any :class:`Parameter` that would be included in the resulting ParameterSet from a :func:`filter` call with the same arguments will have their value set. Note: setting the value of a Parameter in a ParameterSet WILL change that Parameter across any parent ParameterSets (including the :class:`phoebe.frontend.bundle.Bundle`) :parameter str twig: the twig to search for the parameter :parameter value: the value to set. Provide units, if necessary, by sending a Quantity object (ie 2.4*u.rad) :parameter bool check_default: whether to exclude any default values. Defaults to False (unlike all filtering). Note that this acts on the current ParameterSet so any filtering done before this call will EXCLUDE defaults by default. :parameter **kwargs: meta-tags to search
22,198
def get_event_attendee(self, id, attendee_id, **data): return self.get("/events/{0}/attendees/{0}/".format(id,attendee_id), data=data)
GET /events/:id/attendees/:attendee_id/ Returns a single :format:`attendee` by ID, as the key ``attendee``.
22,199
def require_parents(packages): found = [] for pkg in packages: base, sep, child = pkg.rpartition() if base and base not in found: continue found.append(pkg) yield pkg
Exclude any apparent package that apparently doesn't include its parent. For example, exclude 'foo.bar' if 'foo' is not present.