code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def order(self, order): order = order if isinstance(order, Order) else Order(order) order.object_getattr = self.object_getattr self.orders.append(order) return self
Adds an Order to this query. Args: see :py:class:`Order <datastore.query.Order>` constructor Returns self for JS-like method chaining:: query.order('+age').order('-home')
def getAggregator(cls, instanceId, name): parent = cls.parentMap.get(instanceId) while parent: stat = cls.getStat(parent, name) if stat: return stat, parent parent = cls.parentMap.get(statsId(parent))
Gets the aggregate stat for the given stat.
def accept_quality(accept, default=1): quality = default if accept and ";" in accept: accept, rest = accept.split(";", 1) accept_quality = RE_ACCEPT_QUALITY.search(rest) if accept_quality: quality = float(accept_quality.groupdict().get('quality', quality).strip()) return (quality, accept.strip())
Separates out the quality score from the accepted content_type
async def delay(source, delay): await asyncio.sleep(delay) async with streamcontext(source) as streamer: async for item in streamer: yield item
Delay the iteration of an asynchronous sequence.
def _transfers(reaction, delta, elements, result, epsilon): left = set(c for c, _ in reaction.left) right = set(c for c, _ in reaction.right) for c1, c2 in product(left, right): items = {} for e in elements: v = result.get_value(delta[c1, c2, e]) nearest_int = round(v) if abs(v - nearest_int) < epsilon: v = int(nearest_int) if v >= epsilon: items[e] = v if len(items) > 0: yield (c1, c2), Formula(items)
Yield transfers obtained from result.
def equal_length(*args): for i, arg in enumerate(args): if not isinstance(arg, Sized): raise ExpectedTypeError(arg, ["Sized"]) if i >= 1 and len(arg) != len(args[0]): raise DifferentLengthError(args, arg)
Check that arguments have the same length.
def load_config(config, expand_env=False, force=False): if not os.path.exists(config): raise ConfigException('Unable to find configuration file: %s' % config) file_extension = os.path.splitext(config)[1][1:] conf = kaptan.Kaptan(handler=kaptan.HANDLER_EXT.get(file_extension)) if expand_env: with open(config, 'r') as file_handler: config = Template(file_handler.read()) config = config.substitute(os.environ) conf.import_config(config) return get_repos(conf.export('dict') or {}, force)
Return repos from a directory and fnmatch. Not recursive. :param config: paths to config file :type config: str :param expand_env: True to expand environment varialbes in the config. :type expand_env: bool :param bool force: True to aggregate even if repo is dirty. :returns: expanded config dict item :rtype: iter(dict)
def module_can_run_parallel(test_module: unittest.TestSuite) -> bool: for test_class in test_module: if hasattr(unittest.loader, '_FailedTest'): if isinstance(test_class, unittest.loader._FailedTest): continue if not isinstance(test_class, collections.Iterable): raise TestClassNotIterable() for test_case in test_class: return not getattr(sys.modules[test_case.__module__], "__no_parallel__", False)
Checks if a given module of tests can be run in parallel or not :param test_module: the module to run :return: True if the module can be run on parallel, False otherwise
def _record_call(func): @wraps(func) def wrapper(*args, **kwargs): global global_error_context if global_error_context is not None: key = CallLogKey(name=func.__name__, args=[serialize_object_for_logging(arg) for arg in args], kwargs={k: serialize_object_for_logging(v) for k, v in kwargs.items()}) pre_entry = CallLogValue(timestamp_in=datetime.utcnow(), timestamp_out=None, return_value=None) global_error_context.log[key] = pre_entry val = func(*args, **kwargs) if global_error_context is not None: post_entry = CallLogValue(timestamp_in=pre_entry.timestamp_in, timestamp_out=datetime.utcnow(), return_value=serialize_object_for_logging(val)) global_error_context.log[key] = post_entry return val return wrapper
A decorator that logs a call into the global error context. This is probably for internal use only.
def clean(): os.chdir(os.path.join(project_root, 'docs')) sh("make clean") os.chdir(project_root) sh("rm -rf pyoauth2.egg-info")
Clean up previous garbage
def blocks(self): for block_addr, block in self._local_blocks.items(): try: yield self._get_block(block_addr, size=block.size, byte_string=block.bytestr if isinstance(block, BlockNode) else None) except (SimEngineError, SimMemoryError): pass
An iterator of all local blocks in the current function. :return: angr.lifter.Block instances.
def dump_dict(cfg, f, indent=0): for key in cfg: if not isstr(key): raise ConfigSerializeError("Dict keys must be strings: %r" % (key,)) dump_value(key, cfg[key], f, indent) f.write(u';\n')
Save a dictionary of attributes
def get_parameters(params=None, path='', grad_only=True): global current_scope if params is None: params = OrderedDict() for k, v in iteritems(current_scope): if isinstance(v, dict): with parameter_scope(k): params = get_parameters( params, '/'.join([path, k]) if path else k, grad_only=grad_only) else: assert isinstance(v, nn.Variable) if not grad_only or v.need_grad: params['/'.join([path, k]) if path else k] = v return params
Get parameter Variables under the current parameter scope. Args: params (dict): Internal use. User doesn't set it manually. path (str): Internal use. User doesn't set it manually. grad_only (bool): Retrieve all parameters under the current scope if False, while only parameters with need_grad=True are retrieved if True. Returns: dict: {:obj:`str` : :obj:`~nnabla.Variable`}
def export(self): top = self._top_element() properties = self._properties_element(top) self._fill_requirements(top) self._fill_lookup_prop(properties) return utils.prettify_xml(top)
Returns requirements XML.
def create_pull(self, title, base, head, body=None): data = {'title': title, 'body': body, 'base': base, 'head': head} return self._create_pull(data)
Create a pull request of ``head`` onto ``base`` branch in this repo. :param str title: (required) :param str base: (required), e.g., 'master' :param str head: (required), e.g., 'username:branch' :param str body: (optional), markdown formatted description :returns: :class:`PullRequest <github3.pulls.PullRequest>` if successful, else None
def plot(self): import pylab as p p.clf() fig = p.figure(1) nspw = len(self.gain[0]) ext = n.ceil(n.sqrt(nspw)) for spw in range(len(self.gain[0])): ax = fig.add_subplot(ext, ext, spw+1) for pol in [0,1]: ax.scatter(range(len(self.gain)), n.abs(self.gain.data[:,spw,pol]), color=n.array(['k','y']).take(self.gain.mask[:,spw,pol]), marker=['x','.'][pol]) fig.show()
Quick visualization of calibration solution.
def cyan(cls): "Make the text foreground color cyan." wAttributes = cls._get_text_attributes() wAttributes &= ~win32.FOREGROUND_MASK wAttributes |= win32.FOREGROUND_CYAN cls._set_text_attributes(wAttributes)
Make the text foreground color cyan.
def _load_custom(self, settings_name='localsettings.py'): if settings_name[-3:] == '.py': settings_name = settings_name[:-3] new_settings = {} try: settings = importlib.import_module(settings_name) new_settings = self._convert_to_dict(settings) except ImportError: log.info("No override settings found") for key in new_settings: if key in self.my_settings: item = new_settings[key] if isinstance(item, dict) and \ isinstance(self.my_settings[key], dict): for key2 in item: self.my_settings[key][key2] = item[key2] else: self.my_settings[key] = item else: self.my_settings[key] = new_settings[key]
Load the user defined settings, overriding the defaults
def hypergeometric_like(x, n, m, N): R return flib.hyperg(x, n, m, N)
R""" Hypergeometric log-likelihood. Discrete probability distribution that describes the number of successes in a sequence of draws from a finite population without replacement. .. math:: f(x \mid n, m, N) = \frac{\left({ \begin{array}{c} {m} \\ {x} \\ \end{array} }\right)\left({ \begin{array}{c} {N-m} \\ {n-x} \\ \end{array}}\right)}{\left({ \begin{array}{c} {N} \\ {n} \\ \end{array}}\right)} :Parameters: - `x` : [int] Number of successes in a sample drawn from a population. - `n` : [int] Size of sample drawn from the population. - `m` : [int] Number of successes in the population. - `N` : [int] Total number of units in the population. .. note:: :math:`E(X) = \frac{n n}{N}`
def convert_camel_case_string(name: str) -> str: string = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name) return re.sub("([a-z0-9])([A-Z])", r"\1_\2", string).lower()
Convert camel case string to snake case
def find_all_declarations( declarations, decl_type=None, name=None, parent=None, recursive=True, fullname=None): if recursive: decls = make_flatten(declarations) else: decls = declarations return list( filter( algorithm.match_declaration_t( decl_type=decl_type, name=name, fullname=fullname, parent=parent), decls))
Returns a list of all declarations that match criteria, defined by developer. For more information about arguments see :class:`match_declaration_t` class. :rtype: [ matched declarations ]
def cmd_connection_type(self): https = 0 non_https = 0 for line in self._valid_lines: if line.is_https(): https += 1 else: non_https += 1 return https, non_https
Generates statistics on how many requests are made via HTTP and how many are made via SSL. .. note:: This only works if the request path contains the default port for SSL (443). .. warning:: The ports are hardcoded, they should be configurable.
def is_alive(self): running = False if not self.instance_id: return False try: log.debug("Getting information for instance %s", self.instance_id) running = self._cloud_provider.is_instance_running( self.instance_id) except Exception as ex: log.debug("Ignoring error while looking for vm id %s: %s", self.instance_id, str(ex)) if running: log.debug("node `%s` (instance id %s) is up and running", self.name, self.instance_id) self.update_ips() else: log.debug("node `%s` (instance id `%s`) still building...", self.name, self.instance_id) return running
Checks if the current node is up and running in the cloud. It only checks the status provided by the cloud interface. Therefore a node might be running, but not yet ready to ssh into it.
def build_idx_set(branch_id, start_date): code_set = branch_id.split("/") code_set.insert(3, "Rates") idx_set = { "sec": "/".join([code_set[0], code_set[1], "Sections"]), "mag": "/".join([code_set[0], code_set[1], code_set[2], "Magnitude"])} idx_set["rate"] = "/".join(code_set) idx_set["rake"] = "/".join([code_set[0], code_set[1], "Rake"]) idx_set["msr"] = "-".join(code_set[:3]) idx_set["geol"] = code_set[0] if start_date: idx_set["grid_key"] = "_".join( branch_id.replace("/", "_").split("_")[:-1]) else: idx_set["grid_key"] = branch_id.replace("/", "_") idx_set["total_key"] = branch_id.replace("/", "|") return idx_set
Builds a dictionary of keys based on the branch code
def split_thousands(s): if s is None: return "0" if isinstance(s, basestring): s = float(s) if isinstance(s, float) and s.is_integer(): s = int(s) result = "{:,}".format(s) result = result.replace(',', "'") return result
Splits a number on thousands. >>> split_thousands(1000012) "1'000'012"
def _connect_nntp (self, nntpserver): tries = 0 nntp = None while tries < 2: tries += 1 try: nntp = nntplib.NNTP(nntpserver, usenetrc=False) except nntplib.NNTPTemporaryError: self.wait() except nntplib.NNTPPermanentError as msg: if re.compile("^50[45]").search(str(msg)): self.wait() else: raise if nntp is None: raise LinkCheckerError( _("NNTP server too busy; tried more than %d times.") % tries) if log.is_debug(LOG_CHECK): nntp.set_debuglevel(1) self.add_info(nntp.getwelcome()) return nntp
This is done only once per checking task. Also, the newly introduced error codes 504 and 505 (both inclining "Too busy, retry later", are caught.
def create_chunked_body_end(trailers=None): chunk = [] chunk.append('0\r\n') if trailers: for name, value in trailers: chunk.append(name) chunk.append(': ') chunk.append(value) chunk.append('\r\n') chunk.append('\r\n') return s2b(''.join(chunk))
Create the ending that terminates a chunked body.
def pipe_yql(context=None, _INPUT=None, conf=None, **kwargs): url = "http://query.yahooapis.com/v1/public/yql" conf = DotDict(conf) query = conf['yqlquery'] for item in _INPUT: item = DotDict(item) yql = utils.get_value(query, item, **kwargs) r = requests.get(url, params={'q': yql}, stream=True) tree = parse(r.raw) if context and context.verbose: print "pipe_yql loading xml:", yql root = tree.getroot() results = root.find('results') for element in results.getchildren(): yield utils.etree_to_dict(element) if item.get('forever'): break
A source that issues YQL queries. Loopable. Parameters ---------- context : pipe2py.Context object _INPUT : pipeforever pipe or an iterable of items or fields conf : yqlquery -- YQL query # todo: handle envURL Yields ------ _OUTPUT : query results
def GetMemActiveMB(self): counter = c_uint() ret = vmGuestLib.VMGuestLib_GetMemActiveMB(self.handle.value, byref(counter)) if ret != VMGUESTLIB_ERROR_SUCCESS: raise VMGuestLibException(ret) return counter.value
Retrieves the amount of memory the virtual machine is actively using its estimated working set size.
def extensions(): import numpy from Cython.Build import cythonize ext = [ Extension('phydmslib.numutils', ['phydmslib/numutils.pyx'], include_dirs=[numpy.get_include()], extra_compile_args=['-Wno-unused-function']), ] return cythonize(ext)
Returns list of `cython` extensions for `lazy_cythonize`.
def get_confirmations_per_page(self, per_page=1000, page=1, params=None): return self._get_resource_per_page(resource=CONFIRMATIONS, per_page=per_page, page=page, params=params)
Get confirmations per page :param per_page: How many objects per page. Default: 1000 :param page: Which page. Default: 1 :param params: Search parameters. Default: {} :return: list
def close(self): if self.connected: obj = [self.proto.max_id, [[2], self.proto.send_count]] ARBITRATOR.send_sync_message(self.proto, f"4{to_json(obj)}") self.proto.send_count += 1 ARBITRATOR.close(self.proto) self.listeners.clear() self.proto.connected = False super().close() del self.room del self.proto
Closes connection pair
def onChange(self, min_changed_pixels=None, handler=None): if isinstance(min_changed_pixels, int) and (callable(handler) or handler is None): return self._observer.register_event( "CHANGE", pattern=(min_changed_pixels, self.getBitmap()), handler=handler) elif (callable(min_changed_pixels) or min_changed_pixels is None) and (callable(handler) or handler is None): handler = min_changed_pixels or handler return self._observer.register_event( "CHANGE", pattern=(Settings.ObserveMinChangedPixels, self.getBitmap()), handler=handler) else: raise ValueError("Unsupported arguments for onChange method")
Registers an event to call ``handler`` when at least ``min_changed_pixels`` change in this region. (Default for min_changed_pixels is set in Settings.ObserveMinChangedPixels) The ``handler`` function should take one parameter, an ObserveEvent object (see below). This event is ignored in the future unless the handler calls the repeat() method on the provided ObserveEvent object. Returns the event's ID as a string.
def reset_logging_framework(): logging._lock = threading.RLock() for name in [None] + list(logging.Logger.manager.loggerDict): for handler in logging.getLogger(name).handlers: handler.createLock() root = logging.getLogger() root.handlers = [ handler for handler in root.handlers if not isinstance(handler, mitogen.core.LogHandler) ]
After fork, ensure any logging.Handler locks are recreated, as a variety of threads in the parent may have been using the logging package at the moment of fork. It is not possible to solve this problem in general; see https://github.com/dw/mitogen/issues/150 for a full discussion.
def required_from_env(key): val = os.environ.get(key) if not val: raise ValueError( "Required argument '{}' not supplied and not found in environment variables".format(key)) return val
Retrieve a required variable from the current environment variables. Raises a ValueError if the env variable is not found or has no value.
def toggle_template_selector(self): if self.search_directory_radio.isChecked(): self.template_combo.setEnabled(True) else: self.template_combo.setEnabled(False) if self.search_on_disk_radio.isChecked(): self.template_path.setEnabled(True) self.template_chooser.setEnabled(True) else: self.template_path.setEnabled(False) self.template_chooser.setEnabled(False)
Slot for template selector elements behaviour. .. versionadded: 4.3.0
def merge_lists(l, base): for i in base: if i not in l: l.append(i)
Merge in undefined list entries from given list. @param l: List to be merged into. @type l: list @param base: List to be merged into. @type base: list
def _list_fields(self): response = self.__proxy__.list_fields() return [s for s in response['value'] if not s.startswith("_")]
Get the current settings of the model. The keys depend on the type of model. Returns ------- out : list A list of fields that can be queried using the ``get`` method.
def _build_url(self, host, handler): scheme = 'https' if self.use_https else 'http' return '%s://%s/%s' % (scheme, host, handler)
Build a url for our request based on the host, handler and use_http property
def getctime(self, path): try: file_obj = self.filesystem.resolve(path) except IOError: self.filesystem.raise_os_error(errno.ENOENT) return file_obj.st_ctime
Returns the creation time of the fake file. Args: path: the path to fake file. Returns: (int, float) the creation time of the fake file in number of seconds since the epoch. Raises: OSError: if the file does not exist.
def get_asset_url(self, path): url = self.root_url + '/assets/' + path if path in self.asset_hash: url += '?' + self.asset_hash[path] return url
Get the URL of an asset. If asset hashes are added and one exists for the path, it will be appended as a query string. Args: path (str): Path to the file, relative to your "assets" directory.
def create( self, path, value='', acl=None, ephemeral=False, sequence=False, makepath=False ): _log.debug("ZK: Creating node " + path) return self.zk.create(path, value, acl, ephemeral, sequence, makepath)
Creates a Zookeeper node. :param: path: The zookeeper node path :param: value: Zookeeper node value :param: acl: ACL list :param: ephemeral: Boolean indicating where this node is tied to this session. :param: sequence: Boolean indicating whether path is suffixed with a unique index. :param: makepath: Whether the path should be created if it doesn't exist.
def upgrade_all(self): for pkg in self.installed_package_names: self.install(pkg, upgrade=True)
Upgrades all installed packages to their latest versions.
def disable_key(key_id, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) r = {} try: key = conn.disable_key(key_id) r['result'] = True except boto.exception.BotoServerError as e: r['result'] = False r['error'] = __utils__['boto.get_error'](e) return r
Mark key as disabled. CLI example:: salt myminion boto_kms.disable_key 'alias/mykey'
def handle_namespace_invalid(self, line: str, position: int, tokens: ParseResults) -> None: name = tokens[NAME] raise NakedNameWarning(self.get_line_number(), line, position, name)
Raise an exception when parsing a name missing a namespace.
def softwareUpdateAvailable(self, timeout=1): namespace = System.getServiceType("softwareUpdateAvailable") uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetInfo", timeout=timeout) return bool(int(results["NewUpgradeAvailable"]))
Returns if a software update is available :return: if a software update is available :rtype: bool
def entropy_variance(data, class_attr=None, method=DEFAULT_CONTINUOUS_METRIC): assert method in CONTINUOUS_METRICS, "Unknown entropy variance metric: %s" % (method,) assert (class_attr is None and isinstance(data, dict)) \ or (class_attr is not None and isinstance(data, list)) if isinstance(data, dict): lst = data else: lst = [record.get(class_attr) for record in data] return get_variance(lst)
Calculates the variance fo a continuous class attribute, to be used as an entropy metric.
def participation_policy_changed(ob, event): workspace = IWorkspace(ob) old_group_name = workspace.group_for_policy(event.old_policy) old_group = api.group.get(old_group_name) for member in old_group.getAllGroupMembers(): groups = workspace.get(member.getId()).groups groups -= set([event.old_policy.title()]) groups.add(event.new_policy.title())
Move all the existing users to a new group
def parse_yaml(self, node): self.group_id = y['groupId'] self._members = [] if 'members' in y: for m in y.get('members'): self._members.append(TargetComponent().parse_yaml(m)) return self
Parse a YAML specification of a component group into this object.
def write(self, pos, size, **kwargs): if type(pos) is str: raise TypeError("SimFileDescriptor.write takes an address and size. Did you mean write_data?") if self.state.solver.symbolic(size): try: passed_max_size = self.state.solver.max(size, extra_constraints=(size < self.state.libc.max_packet_size,)) except SimSolverError: passed_max_size = self.state.solver.min(size) l.warning("Symbolic write size is too large for threshold - concretizing to min (%d)", passed_max_size) self.state.solver.add(size == passed_max_size) else: passed_max_size = self.state.solver.eval(size) if passed_max_size > 2**13: l.warning("Program performing extremely large write") data = self.state.memory.load(pos, passed_max_size) return self.write_data(data, size, **kwargs)
Writes some data, loaded from the state, into the file. :param pos: The address to read the data to write from in memory :param size: The requested size of the write :return: The real length of the write
def branches_containing(commit): lines = run('branch --contains %s' % commit).splitlines() return [l.lstrip('* ') for l in lines]
Return a list of branches conatining that commit
def storage_get(self, key): if not self._module: return self._storage_init() module_name = self._module.module_full_name return self._storage.storage_get(module_name, key)
Retrieve a value for the module.
def execute(self, transition): self._transitions.append(transition) if self._thread is None or not self._thread.isAlive(): self._thread = threading.Thread(target=self._transition_loop) self._thread.setDaemon(True) self._thread.start()
Queue a transition for execution. :param transition: The transition
def read_file_to_string(path): bytes_string = tf.gfile.Open(path, 'r').read() return dlutils.python_portable_string(bytes_string)
Read a file into a string.
def _request_internal(self, command, **kwargs): args = dict(kwargs) if self.ssid: args['ssid'] = self.ssid method = getattr(self.api, command) response = method(**args) if response and 'status' in response: if response['status'] == 'error': raise SubregError( message=response['error']['errormsg'], major=response['error']['errorcode']['major'], minor=response['error']['errorcode']['minor'] ) if response['status'] == 'ok': return response['data'] if 'data' in response else dict() raise Exception("Invalid status found in SOAP response") raise Exception('Invalid response')
Make request parse response
def _check_signal(self, s): r s = np.asanyarray(s) if s.shape[0] != self.n_vertices: raise ValueError('First dimension must be the number of vertices ' 'G.N = {}, got {}.'.format(self.N, s.shape)) return s
r"""Check if signal is valid.
def mouseMoveEvent(self, event): c = self.cursorForPosition(event.pos()) block = c.block() self._link_match = None self.viewport().setCursor(QtCore.Qt.IBeamCursor) for match in self.link_regex.finditer(block.text()): if not match: continue start, end = match.span() if start <= c.positionInBlock() <= end: self._link_match = match self.viewport().setCursor(QtCore.Qt.PointingHandCursor) break self._last_hovered_block = block super(OutputWindow, self).mouseMoveEvent(event)
Handle mouse over file link.
def pages(self): if self.per_page == 0 or self.total is None: pages = 0 else: pages = int(ceil(self.total / float(self.per_page))) return pages
The total number of pages
def replant_tree(self, config=None, exclude=None): self.__init__(key=self.key, config=config, update=True, exclude=exclude)
Replant the tree with a different config setup Parameters: config (str): The config name to reload exclude (list): A list of environment variables to exclude from forced updates
def process_bulk_queue(self, es_bulk_kwargs=None): with current_celery_app.pool.acquire(block=True) as conn: consumer = Consumer( connection=conn, queue=self.mq_queue.name, exchange=self.mq_exchange.name, routing_key=self.mq_routing_key, ) req_timeout = current_app.config['INDEXER_BULK_REQUEST_TIMEOUT'] es_bulk_kwargs = es_bulk_kwargs or {} count = bulk( self.client, self._actionsiter(consumer.iterqueue()), stats_only=True, request_timeout=req_timeout, **es_bulk_kwargs ) consumer.close() return count
Process bulk indexing queue. :param dict es_bulk_kwargs: Passed to :func:`elasticsearch:elasticsearch.helpers.bulk`.
def _get_file_md5(filename): md5_data = md5() with open(filename, 'rb') as f: for chunk in iter(lambda: f.read(128*md5_data.block_size), b''): md5_data.update(chunk) return md5_data.hexdigest()
Compute the md5 checksum of a file
def _os_x_font_directories(cls): os_x_font_dirs = [ '/Library/Fonts', '/Network/Library/Fonts', '/System/Library/Fonts', ] home = os.environ.get('HOME') if home is not None: os_x_font_dirs.extend([ os.path.join(home, 'Library', 'Fonts'), os.path.join(home, '.fonts') ]) return os_x_font_dirs
Return a sequence of directory paths on a Mac in which fonts are likely to be located.
def directories(self): dirlist_p = new_gp_object("CameraList") lib.gp_camera_folder_list_folders(self._cam._cam, self.path.encode(), dirlist_p, self._cam._ctx) for idx in range(lib.gp_list_count(dirlist_p)): name = os.path.join( self.path, get_string(lib.gp_list_get_name, dirlist_p, idx)) yield Directory(name=name, parent=self, camera=self._cam) lib.gp_list_free(dirlist_p)
Get a generator that yields all subdirectories in the directory.
def pattern_input(self, question, message='Invalid entry', pattern='^[a-zA-Z0-9_ ]+$', default='',required=True): result = '' requiredFlag = True while (not result and requiredFlag): result = input('%s: ' % question) if result and pattern and not re.match(pattern, result): self.stdout.write(self.style.ERROR(message)) result = '' elif not result and default: return default elif not result and required: self.stdout.write(self.style.ERROR('Answer is required.')) elif not required: requiredFlag = False return result
Method for input disallowing special characters, with optionally specifiable regex pattern and error message.
def _emit_table_tag(self, open_open_markup, tag, style, padding, close_open_markup, contents, open_close_markup): self._emit(tokens.TagOpenOpen(wiki_markup=open_open_markup)) self._emit_text(tag) if style: self._emit_all(style) if close_open_markup: self._emit(tokens.TagCloseOpen(wiki_markup=close_open_markup, padding=padding)) else: self._emit(tokens.TagCloseOpen(padding=padding)) if contents: self._emit_all(contents) self._emit(tokens.TagOpenClose(wiki_markup=open_close_markup)) self._emit_text(tag) self._emit(tokens.TagCloseClose())
Emit a table tag.
def reverse_timezone(self, query, at_time=None, timeout=DEFAULT_SENTINEL): ensure_pytz_is_installed() location = self._coerce_point_to_string(query) timestamp = self._normalize_timezone_at_time(at_time) params = { "location": location, "timestamp": timestamp, } if self.api_key: params['key'] = self.api_key url = "?".join((self.tz_api, urlencode(params))) logger.debug("%s.reverse_timezone: %s", self.__class__.__name__, url) return self._parse_json_timezone( self._call_geocoder(url, timeout=timeout) )
Find the timezone a point in `query` was in for a specified `at_time`. .. versionadded:: 1.18.0 .. versionchanged:: 1.18.1 Previously a :class:`KeyError` was raised for a point without an assigned Olson timezone id (e.g. for Antarctica). Now this method returns None for such requests. :param query: The coordinates for which you want a timezone. :type query: :class:`geopy.point.Point`, list or tuple of (latitude, longitude), or string as "%(latitude)s, %(longitude)s" :param at_time: The time at which you want the timezone of this location. This is optional, and defaults to the time that the function is called in UTC. Timezone-aware datetimes are correctly handled and naive datetimes are silently treated as UTC. :type at_time: :class:`datetime.datetime` or None :param int timeout: Time, in seconds, to wait for the geocoding service to respond before raising a :class:`geopy.exc.GeocoderTimedOut` exception. Set this only if you wish to override, on this call only, the value set during the geocoder's initialization. :rtype: ``None`` or :class:`geopy.timezone.Timezone`
def percentage_progress(self): if self.total_progress != 0: return float(self.progress) / self.total_progress else: return self.progress
Returns a float between 0 and 1, representing the current job's progress in its task. If total_progress is not given or 0, just return self.progress. :return: float corresponding to the total percentage progress of the job.
def ycbcr2rgb(y__, cb_, cr_): kb_ = 0.114 kr_ = 0.299 r__ = 2 * cr_ / (1 - kr_) + y__ b__ = 2 * cb_ / (1 - kb_) + y__ g__ = (y__ - kr_ * r__ - kb_ * b__) / (1 - kr_ - kb_) return r__, g__, b__
Convert the three YCbCr channels to RGB channels.
def canRender(filename): name, ext = os.path.splitext(filename) ext = ext.lstrip('.').lower() if ext in ImageRenderer._extensions: return 100 else: return False
Check extensions.
def frequencies_iter(self): f = self.__matrix.mean(axis=0) for i, m in self.mappings.iteritems(): yield m, f[i]
Iterates over all non-zero frequencies of logical conjunction mappings in this list Yields ------ tuple[caspo.core.mapping.Mapping, float] The next pair (mapping,frequency)
def _unpack_episode(element: ET.Element): return Episode( epno=element.find('epno').text, type=int(element.find('epno').get('type')), length=int(element.find('length').text), titles=tuple(_unpack_episode_title(title) for title in element.iterfind('title')), )
Unpack Episode from episode XML element.
def participants(self, **kwargs): path = '%s/%s/participants' % (self.manager.path, self.get_id()) return self.manager.gitlab.http_get(path, **kwargs)
List the participants. Args: all (bool): If True, return all the items, without pagination per_page (int): Number of items to retrieve per request page (int): ID of the page to return (starts with page 1) as_list (bool): If set to False and no pagination option is defined, return a generator instead of a list **kwargs: Extra options to send to the server (e.g. sudo) Raises: GitlabAuthenticationError: If authentication is not correct GitlabListError: If the list could not be retrieved Returns: RESTObjectList: The list of participants
def parse_resources(self, resources): self.resources = {} resource_factory = ResourceFactory() for res_id, res_value in resources.items(): r = resource_factory.create_resource(res_id, res_value) if r: if r.resource_type in self.resources: self.resources[r.resource_type].append(r) else: self.resources[r.resource_type] = [r]
Parses and sets resources in the model using a factory.
def will_not_clone(self, request, *args, **kwargs): paths = request.path_info.split('/') index_of_object_id = paths.index("will_not_clone") - 1 object_id = paths[index_of_object_id] self.change_view(request, object_id) admin_wordInUrl = index_of_object_id - 3 path = '/' + '/'.join(paths[admin_wordInUrl:index_of_object_id]) return HttpResponseRedirect(path)
Add save but not clone capability in the changeview
def _start_new_warc_file(self, meta=False): if self._params.max_size and not meta and self._params.appending: while True: self._warc_filename = self._generate_warc_filename() if os.path.exists(self._warc_filename): _logger.debug('Skip {0}', self._warc_filename) self._sequence_num += 1 else: break else: self._warc_filename = self._generate_warc_filename(meta=meta) _logger.debug('WARC file at {0}', self._warc_filename) if not self._params.appending: wpull.util.truncate_file(self._warc_filename) self._warcinfo_record = WARCRecord() self._populate_warcinfo(self._params.extra_fields) self.write_record(self._warcinfo_record)
Create and set as current WARC file.
def scandir(self, relpath): if self.isignored(relpath, directory=True): self._raise_access_ignored(relpath) return self._filter_ignored(self._scandir_raw(relpath), selector=lambda e: e.path)
Return paths relative to the root, which are in the given directory and not ignored.
def _get_mixed_actions(tableaux, bases): nums_actions = tableaux[1].shape[0], tableaux[0].shape[0] num = nums_actions[0] + nums_actions[1] out = np.zeros(num) for pl, (start, stop) in enumerate(zip((0, nums_actions[0]), (nums_actions[0], num))): sum_ = 0. for i in range(nums_actions[1-pl]): k = bases[pl][i] if start <= k < stop: out[k] = tableaux[pl][i, -1] sum_ += tableaux[pl][i, -1] if sum_ != 0: out[start:stop] /= sum_ return out[:nums_actions[0]], out[nums_actions[0]:]
From `tableaux` and `bases`, extract non-slack basic variables and return a tuple of the corresponding, normalized mixed actions. Parameters ---------- tableaux : tuple(ndarray(float, ndim=2)) Tuple of two arrays containing the tableaux, of shape (n, m+n+1) and (m, m+n+1), respectively. bases : tuple(ndarray(int, ndim=1)) Tuple of two arrays containing the bases, of shape (n,) and (m,), respectively. Returns ------- tuple(ndarray(float, ndim=1)) Tuple of mixed actions as given by the non-slack basic variables in the tableaux.
def execute_command_in_process(command, shell=False, cwd=None, logger=None): if logger is None: logger = _logger logger.debug("Run shell command: {0}".format(command)) try: subprocess.Popen(command, shell=shell, cwd=cwd) return True except OSError as e: logger.error('The operating system raised an error: {}'.format(e)) return False
Executes a specific command in a separate process :param command: the command to be executed :param bool shell: Whether to use a shell :param str cwd: The working directory of the command :param logger: optional logger instance which can be handed from other module :return: None
def restriction(lam, mu, orbitals, U, beta): return 2*orbitals*fermi_dist(-(mu + lam), beta) - expected_filling(-1*lam, orbitals, U, beta)
Equation that determines the restriction on lagrange multipier
def this_year(self): start_date, end_date = get_date_range_this_year() return self.filter(date__gte=start_date, date__lte=end_date)
Get EighthBlocks from this school year only.
def on_arc_right(self, speed, radius_mm, distance_mm, brake=True, block=True): self._on_arc(speed, radius_mm, distance_mm, brake, block, True)
Drive clockwise in a circle with 'radius_mm' for 'distance_mm'
def convert_path_to_module_parts(path): module_parts = splitall(path) if module_parts[-1] in ['__init__.py', '__init__.pyc']: module_parts = module_parts[:-1] else: module_parts[-1], _ = os.path.splitext(module_parts[-1]) return module_parts
Convert path to a python file into list of module names.
def get_sample(self, md5): if len(md5) < 32: md5 = self.get_full_md5(md5, self.sample_collection) sample_info = self.database[self.sample_collection].find_one({'md5': md5}) if not sample_info: return None try: grid_fs_id = sample_info['__grid_fs'] sample_info = self.clean_for_serialization(sample_info) sample_info.update({'raw_bytes':self.gridfs_handle.get(grid_fs_id).read()}) return sample_info except gridfs.errors.CorruptGridFile: self.database[self.sample_collection].update({'md5': md5}, {'md5': None}) return None
Get the sample from the data store. This method first fetches the data from datastore, then cleans it for serialization and then updates it with 'raw_bytes' item. Args: md5: The md5 digest of the sample to be fetched from datastore. Returns: The sample dictionary or None
async def processClaim(self, schemaId: ID, claimAttributes: Dict[str, ClaimAttributeValues], signature: Claims): await self.wallet.submitContextAttr(schemaId, signature.primaryClaim.m2) await self.wallet.submitClaimAttributes(schemaId, claimAttributes) await self._initPrimaryClaim(schemaId, signature.primaryClaim) if signature.nonRevocClaim: await self._initNonRevocationClaim(schemaId, signature.nonRevocClaim)
Processes and saves a received Claim for the given Schema. :param schemaId: The schema ID (reference to claim definition schema) :param claims: claims to be processed and saved
def request_bytesize(self): return sum(len(str(e)) for elts in self._in_deque for e in elts)
The size of in bytes of the bundled field elements.
def install_scripts(distributions): try: if "__PEX_UNVENDORED__" in __import__("os").environ: from setuptools.command import easy_install else: from pex.third_party.setuptools.command import easy_install if "__PEX_UNVENDORED__" in __import__("os").environ: import pkg_resources else: import pex.third_party.pkg_resources as pkg_resources except ImportError: raise RuntimeError("'wheel install_scripts' needs setuptools.") for dist in distributions: pkg_resources_dist = pkg_resources.get_distribution(dist) install = get_install_command(dist) command = easy_install.easy_install(install.distribution) command.args = ['wheel'] command.finalize_options() command.install_egg_scripts(pkg_resources_dist)
Regenerate the entry_points console_scripts for the named distribution.
def start_pan(self, x, y, button): bd = self.viewer.get_bindings() data_x, data_y = self.viewer.get_data_xy(x, y) event = PointEvent(button=button, state='down', data_x=data_x, data_y=data_y, viewer=self.viewer) if button == 1: bd.ms_pan(self.viewer, event, data_x, data_y) elif button == 3: bd.ms_zoom(self.viewer, event, data_x, data_y)
Called when a pan operation has started. *x*, *y* are the mouse coordinates in display coords. button is the mouse button number: * 1: LEFT * 2: MIDDLE * 3: RIGHT .. note:: Intended to be overridden by new projection types.
def _do_create(di): track = di['track'].strip() artists = di['artist'] if isinstance(artists, StringType): artists = [artists] tracks = Track.objects.filter(title=track, state='published') if tracks: track = tracks[0] track_created = False else: track = Track.objects.create(title=track, state='published') track_created = True last_played = di.get('last_played', None) if last_played and (track.last_played != last_played): track.last_played = last_played track.save() if track_created: track.length = di.get('length', 240) track.sites = Site.objects.all() track.save(set_image=False) for artist in artists: track.create_credit(artist.strip(), 'artist') track.set_image()
Function that interprets a dictionary and creates objects
def is_disjoint(self,other): if self.is_empty() or other.is_empty(): return True if self.bounds[0] < other.bounds[0]: i1,i2 = self,other elif self.bounds[0] > other.bounds[0]: i2,i1 = self,other else: if self.is_discrete() and not other.included[0]: return True elif other.is_discrete() and not self.included[0]: return True else: return False return not i2.bounds[0] in i1
Check whether two Intervals are disjoint. :param Interval other: The Interval to check disjointedness with.
def assertNone(expr, message=None): if expr is not None: raise TestStepFail( format_message(message) if message is not None else "Assert: %s != None" % str(expr))
Assert that expr is None. :param expr: expression. :param message: Message set to raised Exception :raises: TestStepFail if expr is not None.
def get_session(ec=None, create=True): ec = ec or __default_engine__ if isinstance(ec, (str, unicode)): session = engine_manager[ec].session(create=True) elif isinstance(ec, Session): session = ec else: raise Error("Connection %r should be existed engine name or Session object" % ec) return session
ec - engine_name or connection
def _configure_detail_level(cls, detail_level): if isinstance(detail_level, six.string_types): if detail_level not in LOG_DETAIL_LEVELS: raise ValueError( _format("Invalid log detail level string: {0!A}; must be " "one of: {1!A}", detail_level, LOG_DETAIL_LEVELS)) elif isinstance(detail_level, int): if detail_level < 0: raise ValueError( _format("Invalid log detail level integer: {0}; must be a " "positive integer.", detail_level)) elif detail_level is None: detail_level = DEFAULT_LOG_DETAIL_LEVEL else: raise ValueError( _format("Invalid log detail level: {0!A}; must be one of: " "{1!A}, or a positive integer", detail_level, LOG_DETAIL_LEVELS)) return detail_level
Validate the `detail_level` parameter and return it. This accepts a string or integer for `detail_level`.
def _initSwapInfo(self): self._swapList = [] sysinfo = SystemInfo() for (swap,attrs) in sysinfo.getSwapStats().iteritems(): if attrs['type'] == 'partition': dev = self._getUniqueDev(swap) if dev is not None: self._swapList.append(dev)
Initialize swap partition to device mappings.
def handle(cls, value, context, **kwargs): try: hook_name, key = value.split("::") except ValueError: raise ValueError("Invalid value for hook_data: %s. Must be in " "<hook_name>::<key> format." % value) return context.hook_data[hook_name][key]
Returns the value of a key for a given hook in hook_data. Format of value: <hook_name>::<key>
def LinSpace(start, stop, num): return np.linspace(start, stop, num=num, dtype=np.float32),
Linspace op.
def getMask (self, ifname): if sys.platform == 'darwin': return ifconfig_inet(ifname).get('netmask') return self._getaddr(ifname, self.SIOCGIFNETMASK)
Get the netmask for an interface. @param ifname: interface name @type ifname: string
def aes_encrypt(mode, aes_key, aes_iv, *data): encryptor = Cipher( algorithms.AES(aes_key), mode(aes_iv), backend=default_backend()).encryptor() result = None for value in data: result = encryptor.update(value) encryptor.finalize() return result, None if not hasattr(encryptor, 'tag') else encryptor.tag
Encrypt data with AES in specified mode.
def _sibpath(path, sibling): return os.path.join(os.path.dirname(os.path.abspath(path)), sibling)
Return the path to a sibling of a file in the filesystem. This is useful in conjunction with the special C{__file__} attribute that Python provides for modules, so modules can load associated resource files. (Stolen from twisted.python.util)
def find_ip6_by_id(self, id_ip): if not is_valid_int_param(id_ip): raise InvalidParameterError( u'Ipv6 identifier is invalid or was not informed.') url = 'ipv6/get/' + str(id_ip) + "/" code, xml = self.submit(None, 'GET', url) return self.response(code, xml)
Get an IP6 by ID :param id_ip: IP6 identifier. Integer value and greater than zero. :return: Dictionary with the following structure: :: {'ip': {'id': < id >, 'block1': <block1>, 'block2': <block2>, 'block3': <block3>, 'block4': <block4>, 'block5': <block5>, 'block6': <block6>, 'block7': <block7>, 'block8': <block8>, 'descricao': < description >, 'equipamento': [ { all name equipamentos related} ], }} :raise IpNotAvailableError: Network dont have available IPv6. :raise NetworkIPv4NotFoundError: Network was not found. :raise UserNotAuthorizedError: User dont have permission to perform operation. :raise InvalidParameterError: IPv6 identifier is none or invalid. :raise XMLError: Networkapi failed to generate the XML response. :raise DataBaseError: Networkapi failed to access the database.
def int_to_varbyte(self, value): length = int(log(max(value, 1), 0x80)) + 1 bytes = [value >> i * 7 & 0x7F for i in range(length)] bytes.reverse() for i in range(len(bytes) - 1): bytes[i] = bytes[i] | 0x80 return pack('%sB' % len(bytes), *bytes)
Convert an integer into a variable length byte. How it works: the bytes are stored in big-endian (significant bit first), the highest bit of the byte (mask 0x80) is set when there are more bytes following. The remaining 7 bits (mask 0x7F) are used to store the value.