code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def merge_all_sections(prnt_sctns, child_sctns, style): doc = [] prnt_only_raises = prnt_sctns["Raises"] and not (prnt_sctns["Returns"] or prnt_sctns["Yields"]) if prnt_only_raises and (child_sctns["Returns"] or child_sctns["Yields"]): prnt_sctns["Raises"] = None for key in prnt_sctns: sect = merge_section(key, prnt_sctns[key], child_sctns[key], style) if sect is not None: doc.append(sect) return "\n\n".join(doc) if doc else None
Merge the doc-sections of the parent's and child's attribute into a single docstring. Parameters ---------- prnt_sctns: OrderedDict[str, Union[None,str]] child_sctns: OrderedDict[str, Union[None,str]] Returns ------- str Output docstring of the merged docstrings.
def default(self): if ZONE_NAME: log.info("Getting or creating default environment for zone with name '{0}'".format(DEFAULT_ENV_NAME())) zone_id = self.organization.zones[ZONE_NAME].id return self.organization.get_or_create_environment(name=DEFAULT_ENV_NAME(), zone=zone_id) def_envs = [env_j["id"] for env_j in self.json() if env_j["isDefault"]] if len(def_envs) > 1: log.warning('Found more than one default environment. Picking last.') return self[def_envs[-1]] elif len(def_envs) == 1: return self[def_envs[0]] raise exceptions.NotFoundError('Unable to get default environment')
Returns environment marked as default. When Zone is set marked default makes no sense, special env with proper Zone is returned.
def add_items_to_message(msg, log_dict): out = msg for key, value in log_dict.items(): out += " {}={}".format(key, value) return out
Utility function to add dictionary items to a log message.
def is_already_locked(request, get_username=get_username_from_request, username=None): user_blocked = is_user_already_locked(username or get_username(request)) ip_blocked = is_source_ip_already_locked(get_ip(request)) if config.LOCKOUT_BY_IP_USERNAME: return ip_blocked and user_blocked return ip_blocked or user_blocked
Parse the username & IP from the request, and see if it's already locked.
def load(self, json_file): cart_file = os.path.join(CART_LOCATION, json_file) try: cart_body = juicer.utils.read_json_document(cart_file) except IOError as e: juicer.utils.Log.log_error('an error occured while accessing %s:' % cart_file) raise JuicerError(e.message) self.cart_name = cart_body['_id'] if cart_body['current_env'] == '': self.current_env = juicer.utils.get_login_info()[1]['start_in'] else: self.current_env = cart_body['current_env'] for repo, items in cart_body['repos_items'].iteritems(): self.add_repo(repo, items)
Build a cart from a json file
def _remove_unit_rule(g, rule): new_rules = [x for x in g.rules if x != rule] refs = [x for x in g.rules if x.lhs == rule.rhs[0]] new_rules += [build_unit_skiprule(rule, ref) for ref in refs] return Grammar(new_rules)
Removes 'rule' from 'g' without changing the langugage produced by 'g'.
def to_obj(self, ns_info=None): if ns_info: ns_info.collect(self) if not hasattr(self, "_binding_class"): return None entity_obj = self._binding_class() for field, val in six.iteritems(self._fields): if isinstance(val, EntityList) and len(val) == 0: val = None elif field.multiple: if val: val = [_objectify(field, x, ns_info) for x in val] else: val = [] else: val = _objectify(field, val, ns_info) setattr(entity_obj, field.name, val) self._finalize_obj(entity_obj) return entity_obj
Convert to a GenerateDS binding object. Subclasses can override this function. Returns: An instance of this Entity's ``_binding_class`` with properties set from this Entity.
def hpo_genes(phenotype_ids, username, password): if phenotype_ids: try: results = query_phenomizer.query(username, password, phenotype_ids) return [result for result in results if result['p_value'] is not None] except SystemExit, RuntimeError: pass return None
Return list of HGNC symbols matching HPO phenotype ids. Args: phenotype_ids (list): list of phenotype ids username (str): username to connect to phenomizer password (str): password to connect to phenomizer Returns: query_result: a list of dictionaries on the form { 'p_value': float, 'gene_id': str, 'omim_id': int, 'orphanet_id': int, 'decipher_id': int, 'any_id': int, 'mode_of_inheritance': str, 'description': str, 'raw_line': str }
def get_broadcast_transactions(coin_symbol='btc', limit=10, api_key=None): url = make_url(coin_symbol, 'txs') params = {} if api_key: params['token'] = api_key if limit: params['limit'] = limit r = requests.get(url, params=params, verify=True, timeout=TIMEOUT_IN_SECONDS) response_dict = get_valid_json(r) unconfirmed_txs = [] for unconfirmed_tx in response_dict: unconfirmed_tx['received'] = parser.parse(unconfirmed_tx['received']) unconfirmed_txs.append(unconfirmed_tx) return unconfirmed_txs
Get a list of broadcast but unconfirmed transactions Similar to bitcoind's getrawmempool method
def OnCellSelected(self, event): key = row, col, tab = event.Row, event.Col, self.grid.current_table cell_attributes = self.grid.code_array.cell_attributes merging_cell = cell_attributes.get_merging_cell(key) if merging_cell is not None and merging_cell != key: post_command_event(self.grid, self.grid.GotoCellMsg, key=merging_cell) if cell_attributes[merging_cell]["button_cell"]: self.grid.EnableCellEditControl() return if not self.grid.IsEditable(): return self.grid.ForceRefresh() self.grid.lock_entry_line( self.grid.code_array.cell_attributes[key]["locked"]) self.grid.update_entry_line(key) self.grid.update_attribute_toolbar(key) self.grid._last_selected_cell = key event.Skip()
Cell selection event handler
def generateDHCPOptionsTemplate(self, address_family): from ns1.ipam import DHCPOptions options = {} for option in DHCPOptions.OPTIONS[address_family]: options[option] = "" return options
Generate boilerplate dictionary to hold dhcp options :param str address_family: dhcpv4 or dhcpv6 :return: dict containing valid option set for address family
def doseigs(s): A = s2a(s) tau, V = tauV(A) Vdirs = [] for v in V: Vdir = cart2dir(v) if Vdir[1] < 0: Vdir[1] = -Vdir[1] Vdir[0] = (Vdir[0] + 180.) % 360. Vdirs.append([Vdir[0], Vdir[1]]) return tau, Vdirs
convert s format for eigenvalues and eigenvectors Parameters __________ s=[x11,x22,x33,x12,x23,x13] : the six tensor elements Return __________ tau : [t1,t2,t3] tau is an list of eigenvalues in decreasing order: V : [[V1_dec,V1_inc],[V2_dec,V2_inc],[V3_dec,V3_inc]] is an list of the eigenvector directions
def crc_ihex(hexstr): crc = sum(bytearray(binascii.unhexlify(hexstr))) crc &= 0xff crc = ((~crc + 1) & 0xff) return crc
Calculate the CRC for given Intel HEX hexstring.
def maximum_size_estimated(self, sz): if not isinstance(sz, str): sz = str(sz) self._attributes["sz"] = sz
Set the CoRE Link Format sz attribute of the resource. :param sz: the CoRE Link Format sz attribute
def get_works(self): Work = self._session.get_class(surf.ns.EFRBROO['F1_Work']) return list(Work.all())
Return the author's works. :return: a list of `HucitWork` instances.
def id(self): def normalize(distro_id, table): distro_id = distro_id.lower().replace(' ', '_') return table.get(distro_id, distro_id) distro_id = self.os_release_attr('id') if distro_id: return normalize(distro_id, NORMALIZED_OS_ID) distro_id = self.lsb_release_attr('distributor_id') if distro_id: return normalize(distro_id, NORMALIZED_LSB_ID) distro_id = self.distro_release_attr('id') if distro_id: return normalize(distro_id, NORMALIZED_DISTRO_ID) distro_id = self.uname_attr('id') if distro_id: return normalize(distro_id, NORMALIZED_DISTRO_ID) return ''
Return the distro ID of the OS distribution, as a string. For details, see :func:`distro.id`.
def startswith(text, ignore_case=True): if ignore_case: compiled = re.compile( "^%s" % text.replace("\\", "\\\\"), re.IGNORECASE) else: compiled = re.compile("^%s" % text.replace("\\", "\\\\")) return {"$regex": compiled}
Test if a string-field start with ``text``. Example:: filters = {"path": Text.startswith(r"C:\\")}
def try_acquire(self, permits=1, timeout=0): check_not_negative(permits, "Permits cannot be negative!") return self._encode_invoke(semaphore_try_acquire_codec, permits=permits, timeout=to_millis(timeout))
Tries to acquire one or the given number of permits, if they are available, and returns immediately, with the value ``true``, reducing the number of available permits by the given amount. If there are insufficient permits and a timeout is provided, the current thread becomes disabled for thread scheduling purposes and lies dormant until one of following happens: * some other thread invokes the release() method for this semaphore and the current thread is next to be assigned a permit, or * some other thread interrupts the current thread, or * the specified waiting time elapses. If there are insufficient permits and no timeout is provided, this method will return immediately with the value ``false`` and the number of available permits is unchanged. :param permits: (int), the number of permits to acquire (optional). :param timeout: (long), the maximum time in seconds to wait for the permit(s) (optional). :return: (bool), ``true`` if desired amount of permits was acquired, ``false`` otherwise.
def p0(self): if self._p0 is None: raise ValueError("initial positions not set; run set_p0") p0 = {param: self._p0[..., k] for (k, param) in enumerate(self.sampling_params)} return p0
A dictionary of the initial position of the walkers. This is set by using ``set_p0``. If not set yet, a ``ValueError`` is raised when the attribute is accessed.
def imethodcallPayload(self, methodname, localnsp, **kwargs): param_list = [pywbem.IPARAMVALUE(x[0], pywbem.tocimxml(x[1])) for x in kwargs.items()] payload = cim_xml.CIM( cim_xml.MESSAGE( cim_xml.SIMPLEREQ( cim_xml.IMETHODCALL( methodname, cim_xml.LOCALNAMESPACEPATH( [cim_xml.NAMESPACE(ns) for ns in localnsp.split('/')]), param_list)), '1001', '1.0'), '2.0', '2.0') return self.xml_header + payload.toxml()
Generate the XML payload for an intrinsic methodcall.
def getProvIden(self, provstack): iden = _providen(provstack) misc, frames = provstack dictframes = [(typ, {k: v for (k, v) in info}) for (typ, info) in frames] bytz = s_msgpack.en((misc, dictframes)) didwrite = self.slab.put(iden, bytz, overwrite=False, db=self.db) if didwrite: self.provseq.save([iden]) return iden
Returns the iden corresponding to a provenance stack and stores if it hasn't seen it before
def scene_velocity(frames): reader = MessageReader(frames) results = reader.string("command").uint32("scene_id").uint32("velocity").assert_end().get() if results.command != "scene.velocity": raise MessageParserError("Command is not 'scene.velocity'") return (results.scene_id, results.velocity/1000)
parse a scene.velocity message
def available(): proc = popen_multiple( COMMANDS, ['-version'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, creationflags=PROC_FLAGS, ) proc.wait() return (proc.returncode == 0)
Detect if the FFmpeg backend can be used on this system.
def parse_geometry(geometry, ratio=None): if "%" not in geometry: return xy_geometry_parser(geometry, ratio) return float(geometry.strip("%")) / 100.0
Enhanced parse_geometry parser with percentage support.
def set(self, key, value, expires=None, future=None): with self._lock: try: self._dict[key].set(value, expires=expires, future=future) except KeyError: self._dict[key] = moment(value, expires=expires, future=future, lock=self._lock) return value
Set a value
def updateAARText(self): 'Updates the displayed airspeed, altitude, climb rate Text' self.airspeedText.set_text('AR: %.1f m/s' % self.airspeed) self.altitudeText.set_text('ALT: %.1f m ' % self.relAlt) self.climbRateText.set_text('CR: %.1f m/s' % self.climbRate)
Updates the displayed airspeed, altitude, climb rate Text
def receive(self, sequence, args): if not self._reorder: self._callback(*args) return if self._next_expected is not None and sequence < self._next_expected: print("Dropping out of order packet, seq=%d" % sequence) return self._out_of_order.append((sequence, args)) self._out_of_order.sort(key=lambda x: x[0]) while len(self._out_of_order) > 0: seq, args = self._out_of_order[0] if self._next_expected is not None and seq != self._next_expected: return self._callback(*args) self._out_of_order.pop(0) self._next_expected = seq+1
Receive one packet If the sequence number is one we've already seen before, it is dropped. If it is not the next expected sequence number, it is put into the _out_of_order queue to be processed once the holes in sequence number are filled in. Args: sequence (int): The sequence number of the received packet args (list): The list of packet contents that will be passed to callback as callback(*args)
def list_buckets( self, max_results=None, page_token=None, prefix=None, projection="noAcl", fields=None, project=None, ): if project is None: project = self.project if project is None: raise ValueError("Client project not set: pass an explicit project.") extra_params = {"project": project} if prefix is not None: extra_params["prefix"] = prefix extra_params["projection"] = projection if fields is not None: extra_params["fields"] = fields return page_iterator.HTTPIterator( client=self, api_request=self._connection.api_request, path="/b", item_to_value=_item_to_bucket, page_token=page_token, max_results=max_results, extra_params=extra_params, )
Get all buckets in the project associated to the client. This will not populate the list of blobs available in each bucket. .. literalinclude:: snippets.py :start-after: [START list_buckets] :end-before: [END list_buckets] This implements "storage.buckets.list". :type max_results: int :param max_results: Optional. The maximum number of buckets to return. :type page_token: str :param page_token: Optional. If present, return the next batch of buckets, using the value, which must correspond to the ``nextPageToken`` value returned in the previous response. Deprecated: use the ``pages`` property of the returned iterator instead of manually passing the token. :type prefix: str :param prefix: Optional. Filter results to buckets whose names begin with this prefix. :type projection: str :param projection: (Optional) Specifies the set of properties to return. If used, must be 'full' or 'noAcl'. Defaults to 'noAcl'. :type fields: str :param fields: (Optional) Selector specifying which fields to include in a partial response. Must be a list of fields. For example to get a partial response with just the next page token and the language of each bucket returned: 'items/id,nextPageToken' :type project: str :param project: (Optional) the project whose buckets are to be listed. If not passed, uses the project set on the client. :rtype: :class:`~google.api_core.page_iterator.Iterator` :raises ValueError: if both ``project`` is ``None`` and the client's project is also ``None``. :returns: Iterator of all :class:`~google.cloud.storage.bucket.Bucket` belonging to this project.
def remote_pdb_handler(signum, frame): try: from remote_pdb import RemotePdb rdb = RemotePdb(host="127.0.0.1", port=0) rdb.set_trace(frame=frame) except ImportError: log.warning( "remote_pdb unavailable. Please install remote_pdb to " "allow remote debugging." ) signal.signal(signum, remote_pdb_handler)
Handler to drop us into a remote debugger upon receiving SIGUSR1
def _read_header(filename): with filename.open('rb') as f: h = f.read(HDR_LENGTH).decode() header = {} for line in h.split('\n'): if '=' in line: key, value = line.split(' = ') key = key.strip()[7:] value = value.strip()[:-1] header[key] = value return header
Read the text header for each file Parameters ---------- channel_file : Path path to single filename with the header Returns ------- dict header
def make_response(response): if isinstance(response, unicode) or \ isinstance(response, str): response = (response, 'text/html') return response
Make response tuple Potential features to be added - Parameters validation
def delete_vlan_entry(self, vlan_id): with self.session.begin(subtransactions=True): try: self.session.query(ucsm_model.PortProfile).filter_by( vlan_id=vlan_id).delete() except orm.exc.NoResultFound: return
Deletes entry for a vlan_id if it exists.
def has_group(user, group_name): if user.groups.filter(name=group_name).exists(): return True return False
This allows specification group-based permissions in templates. In most instances, creating model-based permissions and giving them to the desired group is preferable.
def getFilepaths(self, filename): return (os.path.join(os.environ['HOME'], filename), os.path.join(self.mackup.mackup_folder, filename))
Get home and mackup filepaths for given file Args: filename (str) Returns: home_filepath, mackup_filepath (str, str)
def get_sources(self, plate, plate_value, sources=None): if sources is None: sources = [] if self.sources: for si, source in enumerate(self.sources): if len(source.streams) == 1 and None in source.streams: sources.append(source.streams[None]) elif plate_value in source.streams: sources.append(source.streams[plate_value]) else: pass if not plate.is_root: parent_plate_value = tuple(pv for pv in plate_value if pv[0] != plate.meta_data_id) sources = self.get_sources(plate.parent, parent_plate_value, sources) return sources
Gets the source streams for a given plate value on a plate. Also populates with source streams that are valid for the parent plates of this plate, with the appropriate meta-data for the parent plate. :param plate: The plate being operated on :param plate_value: The specific plate value of interest :param sources: The currently found sources (for recursion) :return: The appropriate source streams :type plate: Plate :type plate_value: tuple :type sources: list[Stream] | None
def _get_by_id(cls, id, parent=None, **ctx_options): return cls._get_by_id_async(id, parent=parent, **ctx_options).get_result()
Returns an instance of Model class by ID. This is really just a shorthand for Key(cls, id, ...).get(). Args: id: A string or integer key ID. parent: Optional parent key of the model to get. namespace: Optional namespace. app: Optional app ID. **ctx_options: Context options. Returns: A model instance or None if not found.
def deriv2(self,p): numer = -(1 + 2 * self.alpha * p) denom = (p + self.alpha * p**2)**2 return numer / denom
Second derivative of the negative binomial link function. Parameters ---------- p : array-like Mean parameters Returns ------- g''(p) : array The second derivative of the negative binomial transform link function Notes ----- g''(x) = -(1+2*alpha*x)/(x+alpha*x^2)^2
def dewpoint_temperature(temp, hum): assert(temp.shape == hum.shape) vap_press = vapor_pressure(temp, hum) positives = np.array(temp >= 273.15) dewpoint_temp = temp.copy() * np.nan dewpoint_temp[positives] = 243.12 * np.log(vap_press[positives] / 6.112) / (17.62 - np.log(vap_press[positives] / 6.112)) dewpoint_temp[~positives] = 272.62 * np.log(vap_press[~positives] / 6.112) / (22.46 - np.log(vap_press[~positives] / 6.112)) return dewpoint_temp + 273.15
computes the dewpoint temperature Parameters ---- temp : temperature [K] hum : relative humidity Returns dewpoint temperature in K
def __argument(self, ttype, tvalue): if ttype in ["multiline", "string"]: return self.__curcommand.check_next_arg("string", tvalue.decode("utf-8")) if ttype in ["number", "tag"]: return self.__curcommand.check_next_arg(ttype, tvalue.decode("ascii")) if ttype == "left_bracket": self.__cstate = self.__stringlist self.__curstringlist = [] self.__set_expected("string") return True condition = ( ttype in ["left_cbracket", "comma"] and self.__curcommand.non_deterministic_args ) if condition: self.__curcommand.reassign_arguments() self.lexer.pos -= 1 return True return False
Argument parsing method This method acts as an entry point for 'argument' parsing. Syntax: string-list / number / tag :param ttype: current token type :param tvalue: current token value :return: False if an error is encountered, True otherwise
def get(self): data = dict() for label, entry in zip(self.keys, self.values): data[label.cget('text')] = entry.get() return data
Retrieve the GUI elements for program use. :return: a dictionary containing all \ of the data from the key/value entries
def ListOf(element_type, element_none_value=None): from pyws.functions.args.types import TypeFactory element_type = TypeFactory(element_type) return type(element_type.__name__ + 'List', (List,), { 'element_type': element_type, 'element_none_value': element_none_value})
This function creates a list type with element type ``element_type`` and an empty element value ``element_none_value``. >>> from pyws.functions.args import Integer, ListOf >>> lst = ListOf(int) >>> issubclass(lst, List) True >>> lst.__name__ 'IntegerList' >>> lst.element_type == Integer True
def read_locations(filename): data = ConfigParser() if filename == '-': data.read_file(sys.stdin) else: data.read(filename) if not data.sections(): logging.debug('Config file is empty') locations = {} for name in data.sections(): if data.has_option(name, 'locator'): latitude, longitude = utils.from_grid_locator(data.get(name, 'locator')) else: latitude = data.getfloat(name, 'latitude') longitude = data.getfloat(name, 'longitude') locations[name] = (latitude, longitude) return locations
Pull locations from a user's config file. Args: filename (str): Config file to parse Returns: dict: List of locations from config file
def make_directory(path): try: makedirs(path) logging.debug('Directory created: {0}'.format(path)) except OSError as e: if e.errno != errno.EEXIST: raise
Create directory if that not exists.
def _get_description(self, args: Tuple, kwargs: Dict[str, Any]) -> Dict[str, Any]: return { 'id': uuid1().hex, 'args': args, 'kwargs': kwargs, 'module': self._module_name, 'function': self.f.__name__, 'sender_hostname': socket.gethostname(), 'sender_pid': os.getpid(), 'sender_cmd': ' '.join(sys.argv), 'sender_timestamp': datetime.utcnow().isoformat()[:19], }
Return the dictionary to be sent to the queue.
def fromOctetString(cls, value, internalFormat=False, prepend=None, padding=0): value = SizedInteger(integer.from_bytes(value) >> padding).setBitLength(len(value) * 8 - padding) if prepend is not None: value = SizedInteger( (SizedInteger(prepend) << len(value)) | value ).setBitLength(len(prepend) + len(value)) if not internalFormat: value = cls(value) return value
Create a |ASN.1| object initialized from a string. Parameters ---------- value: :class:`str` (Py2) or :class:`bytes` (Py3) Text string like '\\\\x01\\\\xff' (Py2) or b'\\\\x01\\\\xff' (Py3)
def release(ctx, deploy=False, test=False, version=''): if test: run("python setup.py check") run("python setup.py register sdist upload --dry-run") if deploy: run("python setup.py check") if version: run("git checkout master") run("git tag -a v{ver} -m 'v{ver}'".format(ver=version)) run("git push") run("git push origin --tags") run("python setup.py sdist bdist_wheel") run("twine upload --skip-existing dist/*") else: print("- Have you updated the version?") print("- Have you updated CHANGELOG.md, README.md, and AUTHORS.md?") print("- Have you fixed any last minute bugs?") print("- Have you merged changes for release into the master branch?") print("If you answered yes to all of the above questions,") print("then run `inv release --deploy -vX.YY.ZZ` to:") print("- Checkout master") print("- Tag the git release with provided vX.YY.ZZ version") print("- Push the master branch and tags to repo")
Tag release, run Travis-CI, and deploy to PyPI
def dumps(d, indent=4, spacer=" ", quote='"', newlinechar="\n", end_comment=False, **kwargs): return _pprint(d, indent, spacer, quote, newlinechar, end_comment, **kwargs)
Output a Mapfile dictionary as a string Parameters ---------- d: dict A Python dictionary based on the the mappyfile schema indent: int The number of ``spacer`` characters to indent structures in the Mapfile spacer: string The character to use for indenting structures in the Mapfile. Typically spaces or tab characters (``\\t``) quote: string The quote character to use in the Mapfile (double or single quotes) newlinechar: string The character used to insert newlines in the Mapfile end_comment: bool Add a comment with the block type at each closing END statement e.g. END # MAP Returns ------- string The Mapfile as a string Example ------- To open a Mapfile from a string, and then print it back out as a string using tabs:: s = '''MAP NAME "TEST" END''' d = mappyfile.loads(s) print(mappyfile.dumps(d, indent=1, spacer="\\t"))
def collect_publications(self): pubs = list(self.sub_publications) for sub_tree in self.sub_trees: pubs.extend(sub_tree.collect_publications()) return pubs
Recursively collect list of all publications referenced in this tree and all sub-trees. Returns: list: List of UUID strings.
def _combine_nt_vals(lst0_lstn, flds, dflt_null): vals = [] for fld in flds: fld_seen = False for nt_curr in lst0_lstn: if hasattr(nt_curr, fld): vals.append(getattr(nt_curr, fld)) fld_seen = True break if fld_seen is False: vals.append(dflt_null) return vals
Given a list of lists of nts, return a single namedtuple.
def value(self): result = self._value if result is None and self._svalue is not None: try: result = self._value = self.resolve() except Exception as e: reraise( Parameter.Error, Parameter.Error('Call the method "resolve" first.') ) return result
Get parameter value. If this cached value is None and this serialized value is not None, calculate the new value from the serialized one. :return: parameter value. :raises: TypeError if serialized value is not an instance of self ptype . ParserError if parsing step raised an error.
def rsum(self, time_period: str, num_col: str="Number", dateindex: str=None): try: df = self._resample_("sum", time_period, num_col, dateindex) self.df = df if df is None: self.err("Can not sum data") except Exceptions as e: self.err(e, "Can not sum data")
Resample and add a sum the main dataframe to a time period :param time_period: unit + period: periods are Y, M, D, H, Min, S :param time_period: str :param num_col: number of the new column, defaults to "Number" :param num_col: str, optional :param dateindex: column name to use as date index, defaults to None :param dateindex: str, optional :example: ``ds.rsum("1D")``
def _seek_to_extent(self, extent): self._cdfp.seek(extent * self.pvd.logical_block_size())
An internal method to seek to a particular extent on the input ISO. Parameters: extent - The extent to seek to. Returns: Nothing.
def port_manager(self): if self._port_manager is None: self._port_manager = PortManager.instance() return self._port_manager
Returns the port manager. :returns: Port manager
def pack(self): self.headers.setdefault('content-length', len(self.body)) headerparts = ("{0}:{1}\n".format(key, value) for key, value in self.headers.items()) return six.b("{0}\n{1}\n".format(self.cmd, "".join(headerparts))) + (self.body if isinstance(self.body, six.binary_type) else six.b(self.body)) + six.b('\x00')
Create a string representation from object state. @return: The string (bytes) for this stomp frame. @rtype: C{str}
def devices(self): devices = [] count = self.lib.tdGetNumberOfDevices() for i in range(count): device = DeviceFactory(self.lib.tdGetDeviceId(i), lib=self.lib) devices.append(device) return devices
Return all known devices. :return: list of :class:`Device` or :class:`DeviceGroup` instances.
def run(self): self._initialize_run() stimuli = self.protocol_model.allTests() self.acq_thread = threading.Thread(target=self._worker, args=(stimuli,), ) if self.save_data: info = {'calibration_used': self.calname, 'calibration_range': self.cal_frange} self.datafile.set_metadata(self.current_dataset_name, info) self.start_time = time.time() self.last_tick = self.start_time - (self.interval/1000) self.acq_thread.start() return self.acq_thread
Runs the acquisition
def get_legacy_storage_path(self): config_dir = os.path.dirname( self.py3_wrapper.config.get("i3status_config_path", "/tmp") ) storage_path = os.path.join(config_dir, "py3status.data") if os.path.exists(storage_path): return storage_path else: return None
Detect and return existing legacy storage path.
def _get_rest_doc(self, request, start_response): api = request.body_json['api'] version = request.body_json['version'] generator = discovery_generator.DiscoveryGenerator(request=request) services = [s for s in self._backend.api_services if s.api_info.name == api and s.api_info.api_version == version] doc = generator.pretty_print_config_to_json(services) if not doc: error_msg = ('Failed to convert .api to discovery doc for ' 'version %s of api %s') % (version, api) _logger.error('%s', error_msg) return util.send_wsgi_error_response(error_msg, start_response) return self._send_success_response(doc, start_response)
Sends back HTTP response with API directory. This calls start_response and returns the response body. It will return the discovery doc for the requested api/version. Args: request: An ApiRequest, the transformed request sent to the Discovery API. start_response: A function with semantics defined in PEP-333. Returns: A string, the response body.
def set_status(self, status): self.status = status for callback in self._update_status_callbacks: callback(self)
Save the new status and call all defined callbacks
def __plain_bfs(adj, source): seen = set() nextlevel = {source} while nextlevel: thislevel = nextlevel nextlevel = set() for v in thislevel: if v not in seen: yield v seen.add(v) nextlevel.update(adj[v])
modified NX fast BFS node generator
def compose_capability(base, *classes): if _debug: compose_capability._debug("compose_capability %r %r", base, classes) if not issubclass(base, Collector): raise TypeError("base must be a subclass of Collector") for cls in classes: if not issubclass(cls, Capability): raise TypeError("%s is not a Capability subclass" % (cls,)) bases = (base,) + classes name = base.__name__ for cls in classes: name += '+' + cls.__name__ return type(name, bases, {})
Create a new class starting with the base and adding capabilities.
def tableNames(self, dbName=None): if dbName is None: return [name for name in self._ssql_ctx.tableNames()] else: return [name for name in self._ssql_ctx.tableNames(dbName)]
Returns a list of names of tables in the database ``dbName``. :param dbName: string, name of the database to use. Default to the current database. :return: list of table names, in string >>> sqlContext.registerDataFrameAsTable(df, "table1") >>> "table1" in sqlContext.tableNames() True >>> "table1" in sqlContext.tableNames("default") True
def on(self, val=None): if val is False: raise ParameterError("Turning the ValuedParameter on with value " "False is the same as turning it off. Use " "another value.") elif self.IsPath: self.Value = FilePath(val) else: self.Value = val
Turns the MixedParameter ON by setting its Value to val An attempt to turn the parameter on with value 'False' will result in an error, since this is the same as turning the parameter off. Turning the MixedParameter ON without a value or with value 'None' will let the parameter behave as a flag.
def add_paths_argument(cls, group, argname, dest=None, help_=None): prefixed = '%s-%s' % (cls.argument_prefix, argname) if dest is None: dest = prefixed.replace('-', '_') final_dest = dest[len(cls.argument_prefix) + 1:] else: final_dest = dest dest = '%s_%s' % (cls.argument_prefix, dest) group.add_argument('--%s' % prefixed, action='store', nargs='+', dest=dest, help=help_) cls.paths_arguments[dest] = final_dest
Subclasses may call this to expose a paths argument. Args: group: arparse.ArgumentGroup, the extension argument group argname: str, the name of the argument, will be namespaced. dest: str, similar to the `dest` argument of `argparse.ArgumentParser.add_argument`, will be namespaced. help_: str, similar to the `help` argument of `argparse.ArgumentParser.add_argument`.
def items(self): request = get(str(self.url), headers={'User-Agent' : "Magic Browser","origin_req_host" : "thepiratebay.se"}) root = html.fromstring(request.text) items = [self._build_torrent(row) for row in self._get_torrent_rows(root)] for item in items: yield item
Request URL and parse response. Yield a ``Torrent`` for every torrent on page.
def local_scope(self): self.scope = self.scope.new_child() try: yield self.scope finally: self.scope = self.scope.parents
Assign symbols to local variables.
def main(argv): p = argparse.ArgumentParser() p.add_argument('-s', '--scope', nargs='+') p.add_argument('-o', '--oauth-service', default='google') p.add_argument('-i', '--client-id') p.add_argument('-x', '--client-secret') p.add_argument('-r', '--redirect-uri') p.add_argument('-f', '--client-secrets') args = p.parse_args(argv) client_args = (args.client_id, args.client_secret, args.client_id) if any(client_args) and not all(client_args): print('Must provide none of client-id, client-secret and redirect-uri;' ' or all of them.') p.print_usage() return 1 print args.scope if not args.scope: print('Scope must be provided.') p.print_usage() return 1 config = WizardClientConfig() config.scope = ' '.join(args.scope) print(run_local(UserOAuth2(config))['access_token']) return 0
Entry point for command line script to perform OAuth 2.0.
def compute_hash(func, string): h = func() h.update(string) return h.hexdigest()
compute hash of string using given hash function
def _split_repo_str(repo): split = sourceslist.SourceEntry(repo) return split.type, split.architectures, split.uri, split.dist, split.comps
Return APT source entry as a tuple.
def intersect(self, range_): self.solver.intersection_broad_tests_count += 1 if range_.is_any(): return self if self.solver.optimised: if range_ in self.been_intersected_with: return self if self.pr: self.pr.passive("intersecting %s wrt range '%s'...", self, range_) self.solver.intersection_tests_count += 1 with self.solver.timed(self.solver.intersection_time): entries = [x for x in self.entries if x.version in range_] if not entries: return None elif len(entries) < len(self.entries): copy_ = self._copy(entries) copy_.been_intersected_with.add(range_) return copy_ else: self.been_intersected_with.add(range_) return self
Remove variants whose version fall outside of the given range.
def unsigned_request(self, path, payload=None): headers = {"content-type": "application/json", "accept": "application/json", "X-accept-version": "2.0.0"} try: if payload: response = requests.post(self.uri + path, verify=self.verify, data=json.dumps(payload), headers=headers) else: response = requests.get(self.uri + path, verify=self.verify, headers=headers) except Exception as pro: raise BitPayConnectionError('Connection refused') return response
generic bitpay usigned wrapper passing a payload will do a POST, otherwise a GET
def command_line_runner(): parser = get_parser() args = vars(parser.parse_args()) if args['version']: print(__version__) return if args['clear_cache']: utils.clear_cache() print('Cleared {0}.'.format(utils.CACHE_DIR)) return if not args['query']: parser.print_help() return if not os.getenv('SCRAPE_DISABLE_CACHE'): utils.enable_cache() if os.getenv('SCRAPE_DISABLE_IMGS'): args['no_images'] = True prompt_filetype(args) prompt_save_images(args) scrape(args)
Handle command-line interaction.
def _firestore_api(self): if self._firestore_api_internal is None: self._firestore_api_internal = firestore_client.FirestoreClient( credentials=self._credentials ) return self._firestore_api_internal
Lazy-loading getter GAPIC Firestore API. Returns: ~.gapic.firestore.v1beta1.firestore_client.FirestoreClient: The GAPIC client with the credentials of the current client.
def request_halt(self, req, msg): f = Future() @gen.coroutine def _halt(): req.reply("ok") yield gen.moment self.stop(timeout=None) raise AsyncReply self.ioloop.add_callback(lambda: chain_future(_halt(), f)) return f
Halt the device server. Returns ------- success : {'ok', 'fail'} Whether scheduling the halt succeeded. Examples -------- :: ?halt !halt ok
def get_devices(self): devices = [] for element in self.get_device_elements(): device = FritzhomeDevice(self, node=element) devices.append(device) return devices
Get the list of all known devices.
def retrieve_all(self, subset=None): get_object = self.factory.get_object obj_class = self.obj_class full_objects = [get_object(obj_class, list_obj.id, subset) for list_obj in self] return JSSObjectList(self.factory, obj_class, full_objects)
Return a list of all JSSListData elements as full JSSObjects. This can take a long time given a large number of objects, and depending on the size of each object. Subsetting to only include the data you need can improve performance. Args: subset: For objects which support it, a list of sub-tags to request, or an "&" delimited string, (e.g. "general&purchasing"). Default to None.
def save(self, filename): options = conf.lib.clang_defaultSaveOptions(self) result = int(conf.lib.clang_saveTranslationUnit(self, filename, options)) if result != 0: raise TranslationUnitSaveError(result, 'Error saving TranslationUnit.')
Saves the TranslationUnit to a file. This is equivalent to passing -emit-ast to the clang frontend. The saved file can be loaded back into a TranslationUnit. Or, if it corresponds to a header, it can be used as a pre-compiled header file. If an error occurs while saving, a TranslationUnitSaveError is raised. If the error was TranslationUnitSaveError.ERROR_INVALID_TU, this means the constructed TranslationUnit was not valid at time of save. In this case, the reason(s) why should be available via TranslationUnit.diagnostics(). filename -- The path to save the translation unit to.
def mutualReceptions(self, idA, idB): AB = self.receives(idA, idB) BA = self.receives(idB, idA) return [(a,b) for a in AB for b in BA]
Returns all pairs of dignities in mutual reception.
def parse_problem(self, problem_content): del problem_content["@order"] return self.task_factory.get_problem_types().get(problem_content["type"]).parse_problem(problem_content)
Parses a problem, modifying some data
def _autodiscover(self): if not getattr(self, '_registerable_class', None): raise ImproperlyConfigured('You must set a ' '"_registerable_class" property ' 'in order to use autodiscovery.') for mod_name in ('dashboard', 'panel'): for app in settings.INSTALLED_APPS: mod = import_module(app) try: before_import_registry = copy.copy(self._registry) import_module('%s.%s' % (app, mod_name)) except Exception: self._registry = before_import_registry if module_has_submodule(mod, mod_name): raise
Discovers modules to register from ``settings.INSTALLED_APPS``. This makes sure that the appropriate modules get imported to register themselves with Horizon.
def load_tag_library(libname): from django.template.backends.django import get_installed_libraries from django.template.library import InvalidTemplateLibrary try: lib = get_installed_libraries()[libname] lib = importlib.import_module(lib).register return lib except (InvalidTemplateLibrary, KeyError): return None
Load a templatetag library on multiple Django versions. Returns None if the library isn't loaded.
def _get_generic_schema(self): schema = Schema( identifier=ID(stored=True), type=ID(stored=True), name=NGRAM(phrase=True, stored=True, minsize=2, maxsize=8)) return schema
Returns whoosh's generic schema.
def spin1x_from_xi1_phi_a_phi_s(xi1, phi_a, phi_s): phi1 = phi1_from_phi_a_phi_s(phi_a, phi_s) return xi1 * numpy.cos(phi1)
Returns x-component spin for primary mass.
def serialize( self, value, state ): if not value and self.required: state.raise_error( MissingValue, 'Missing required aggregate "{}"'.format(self.element_path) ) start_element, end_element = _element_path_create_new(self.element_path) self._serialize(end_element, value, state) return start_element
Serialize the value to a new element and return the element.
def upgrade_account(self, account=None, **kwargs): if not account: if "default_account" in self.config: account = self.config["default_account"] if not account: raise ValueError("You need to provide an account") account = Account(account, blockchain_instance=self) op = operations.Account_upgrade( **{ "fee": {"amount": 0, "asset_id": "1.3.0"}, "account_to_upgrade": account["id"], "upgrade_to_lifetime_member": True, "prefix": self.prefix, } ) return self.finalizeOp(op, account["name"], "active", **kwargs)
Upgrade an account to Lifetime membership :param str account: (optional) the account to allow access to (defaults to ``default_account``)
def add(workflow_definition: dict, templates_root: str): schema_path = join(dirname(__file__), 'schema', 'workflow_definition.json') with open(schema_path, 'r') as file: schema = json.loads(file.read()) jsonschema.validate(workflow_definition, schema) _id = workflow_definition['id'] _version = workflow_definition['version'] _load_templates(workflow_definition, templates_root) workflow_id = workflow_definition['id'] version = workflow_definition['version'] name = "workflow_definitions:{}:{}".format(workflow_id, version) if DB.get_keys(name): raise KeyError('Workflow definition already exists: {}'.format(name)) DB.save_dict(name, workflow_definition, hierarchical=False)
Add a workflow definition to the Configuration Database. Templates are expected to be found in a directory tree with the following structure: - workflow_id: |- workflow_version |- stage_id |- stage_version |- <templates> Args: workflow_definition (dict): Workflow definition. templates_root (str): Workflow templates root path
def to_hg_scheme_url(cls, url): regexes = cls._get_url_scheme_regexes() for scheme_key, pattern, regex in regexes: match = regex.match(url) if match is not None: groups = match.groups() if len(groups) == 2: return u''.join( scheme_key, '://', pattern.replace('{1}', groups[0]), groups[1]) elif len(groups) == 1: return u''.join( scheme_key, '://', pattern, groups[0])
Convert a URL to local mercurial URL schemes Args: url (str): URL to map to local mercurial URL schemes example:: # schemes.gh = git://github.com/ >> remote_url = git://github.com/westurner/dotfiles' >> to_hg_scheme_url(remote_url) << gh://westurner/dotfiles
def get_repo_parent(path): if is_repo(path): return Local(path) elif not os.path.isdir(path): _rel = '' while path and path != '/': if is_repo(path): return Local(path) else: _rel = os.path.join(os.path.basename(path), _rel) path = os.path.dirname(path) return path
Returns parent repo or input path if none found. :return: grit.Local or path
def sortByIndex(self, index): self.table_level.horizontalHeader().setSortIndicatorShown(True) sort_order = self.table_level.horizontalHeader().sortIndicatorOrder() self.table_index.model().sort(index, sort_order) self._sort_update()
Implement a Index sort.
def __log_number_of_constants(self): n_id = len(self._labels) n_widths = len(self._constants) - n_id self._io.writeln('') self._io.text('Number of constants based on column widths: {0}'.format(n_widths)) self._io.text('Number of constants based on database IDs : {0}'.format(n_id))
Logs the number of constants generated.
def get_geotiff_area_def(filename, crs): from osgeo import gdal from pyresample.geometry import AreaDefinition from pyresample.utils import proj4_str_to_dict fid = gdal.Open(filename) geo_transform = fid.GetGeoTransform() pcs_id = fid.GetProjection().split('"')[1] min_x = geo_transform[0] max_y = geo_transform[3] x_size = fid.RasterXSize y_size = fid.RasterYSize max_x = min_x + geo_transform[1] * x_size min_y = max_y + geo_transform[5] * y_size area_extent = [min_x, min_y, max_x, max_y] area_def = AreaDefinition('geotiff_area', pcs_id, pcs_id, proj4_str_to_dict(crs), x_size, y_size, area_extent) return area_def
Read area definition from a geotiff.
def masked_middle_mfcc(self): begin, end = self._masked_middle_begin_end() return (self.masked_mfcc)[:, begin:end]
Return the MFCC speech frames in the MIDDLE portion of the wave. :rtype: :class:`numpy.ndarray` (2D)
async def get_speaker_settings(self) -> List[Setting]: speaker_settings = await self.services["audio"]["getSpeakerSettings"]({}) return [Setting.make(**x) for x in speaker_settings]
Return speaker settings.
def exists(self): client = self._instance._client try: client.instance_admin_client.get_cluster(name=self.name) return True except NotFound: return False
Check whether the cluster already exists. For example: .. literalinclude:: snippets.py :start-after: [START bigtable_check_cluster_exists] :end-before: [END bigtable_check_cluster_exists] :rtype: bool :returns: True if the table exists, else False.
def get_owner_asset_ids(self, address): block_filter = self._get_event_filter(owner=address) log_items = block_filter.get_all_entries(max_tries=5) did_list = [] for log_i in log_items: did_list.append(id_to_did(log_i.args['_did'])) return did_list
Get the list of assets owned by an address owner. :param address: ethereum account address, hex str :return:
def people(self): if self.cache['people']: return self.cache['people'] people_xml = self.bc.people_within_project(self.id) for person_node in ET.fromstring(people_xml).findall('person'): p = Person(person_node) self.cache['people'][p.id] = p return self.cache['people']
Dictionary of people on the project, keyed by id
def get_methods(*objs): return set( attr for obj in objs for attr in dir(obj) if not attr.startswith('_') and callable(getattr(obj, attr)) )
Return the names of all callable attributes of an object
def reference(cls, value): from sqlpuzzle._common.utils import force_text value = force_text(value) parts = re.split(r'{quote}([^{quote}]+){quote}|\.'.format(quote=cls.reference_quote), value) parts = ('{quote}{i}{quote}'.format(quote=cls.reference_quote, i=i) if i != '*' else i for i in parts if i) return '.'.join(parts)
Convert as reference on column. table => "table" table.column => "table"."column" db.table.column => "db"."table"."column" table."col.umn" => "table"."col.umn" "table"."col.umn" => "table"."col.umn"
def connection_factory_absent(name, both=True, server=None): ret = {'name': name, 'result': None, 'comment': None, 'changes': {}} pool_name = '{0}-Connection-Pool'.format(name) pool_ret = _do_element_absent(pool_name, 'connector_c_pool', {'cascade': both}, server) if not pool_ret['error']: if __opts__['test'] and pool_ret['delete']: ret['comment'] = 'Connection Factory set to be deleted' elif pool_ret['delete']: ret['result'] = True ret['comment'] = 'Connection Factory deleted' else: ret['result'] = True ret['comment'] = 'Connection Factory doesn\'t exist' else: ret['result'] = False ret['comment'] = 'Error: {0}'.format(pool_ret['error']) return ret
Ensures the transaction factory is absent. name Name of the connection factory both Delete both the pool and the resource, defaults to ``true``
def get_mailbox_stats(self): resp = self.request_single('GetMailboxStats') ret = {} for k, v in resp.items(): ret[k] = int(v) return ret
Get global stats about mailboxes Parses <stats numMboxes="6" totalSize="141077"/> :returns: dict with stats