Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
4,500
def unzip_file(source_file, dest_dir=None, mkdir=False): if dest_dir is None: dest_dir, fname = os.path.split(source_file) elif not os.path.isdir(dest_dir): if mkdir: preparedir(dest_dir) else: created = preparedir(dest_dir, False) if not created: raise ValueError("Failed to find %s." % dest_dir) with zipfile.ZipFile(source_file) as zf: for member in zf.infolist(): words = member.filename.split() for word in words[:-1]: drive, word = os.path.splitdrive(word) head, word = os.path.split(word) if word in (os.curdir, os.pardir, ): continue dest_dir = os.path.join(dest_dir, word) zf.extract(member, dest_dir)
Unzip a compressed file. Args: source_file: Full path to a valid compressed file (e.g. c:/ladybug/testPts.zip) dest_dir: Target folder to extract to (e.g. c:/ladybug). Default is set to the same directory as the source file. mkdir: Set to True to create the directory if doesn't exist (Default: False)
4,501
def collect(self, target): statusobjects = set() callables = set() objs_from_this_obj = getattr(self, % target)() if not is_iterable(objs_from_this_obj): objs_from_this_obj = [objs_from_this_obj] if is_iterable(objs_from_this_obj): for i in (self.name_to_system_object(j) for j in objs_from_this_obj): if isinstance(i, AbstractStatusObject): statusobjects.add(i) elif isinstance(i, AbstractCallable): callables.add(i) for i in (self.name_to_system_object(j) for j in deep_iterate(callables)): if isinstance(i, AbstractCallable): statusobjects.update(getattr(i, target)) return statusobjects
Recursively collect all potential triggers/targets in this node and its children. Define targets and triggers of this particular callable in :meth:`_give_triggers` and :meth:`_give_targets`. :param str target: valid values: ``'targets'`` and ``'triggers'``
4,502
def run(self): logger = getLogger(__name__) try: config_class_name = self.arguments[0] except IndexError: raise SphinxError( .format(self.directive_name)) logger.debug(, self.directive_name, config_class_name) config_class = get_type(config_class_name) config_fields = get_task_config_fields(config_class) all_nodes = [] for field_name, field in config_fields.items(): field_id = format_configfield_id( .join((config_class.__module__, config_class.__name__)), field_name) try: format_field_nodes = get_field_formatter(field) except ValueError: logger.debug( .format(field)) continue all_nodes.append( format_field_nodes(field_name, field, field_id, self.state, self.lineno) ) if len(all_nodes) == 0: message = return [nodes.paragraph(text=message)] return all_nodes
Main entrypoint method. Returns ------- new_nodes : `list` Nodes to add to the doctree.
4,503
def visit_extslice(self, node, parent): newnode = nodes.ExtSlice(parent=parent) newnode.postinit([self.visit(dim, newnode) for dim in node.dims]) return newnode
visit an ExtSlice node by returning a fresh instance of it
4,504
def add_f95_to_env(env): try: F95Suffixes = env[] except KeyError: F95Suffixes = [] try: F95PPSuffixes = env[] except KeyError: F95PPSuffixes = [] DialectAddToEnv(env, "F95", F95Suffixes, F95PPSuffixes, support_module = 1)
Add Builders and construction variables for f95 to an Environment.
4,505
def check_cluster( cluster_config, data_path, java_home, check_replicas, batch_size, minutes, start_time, end_time, ): brokers = get_broker_list(cluster_config) broker_files = find_files(data_path, brokers, minutes, start_time, end_time) if not check_replicas: broker_files = filter_leader_files(cluster_config, broker_files) processes = [] print("Starting {n} parallel processes".format(n=len(broker_files))) try: for broker, host, files in broker_files: print( " Broker: {host}, {n} files to check".format( host=host, n=len(files)), ) p = Process( name="dump_process_" + host, target=check_files_on_host, args=(java_home, host, files, batch_size), ) p.start() processes.append(p) print("Processes running:") for process in processes: process.join() except KeyboardInterrupt: print("Terminating all processes") for process in processes: process.terminate() process.join() print("All processes terminated") sys.exit(1)
Check the integrity of the Kafka log files in a cluster. start_time and end_time should be in the format specified by TIME_FORMAT_REGEX. :param data_path: the path to the log folder on the broker :type data_path: str :param java_home: the JAVA_HOME of the broker :type java_home: str :param check_replicas: also checks the replica files :type check_replicas: bool :param batch_size: the size of the batch :type batch_size: int :param minutes: check the files modified in the last N minutes :type minutes: int :param start_time: check the files modified after start_time :type start_time: str :param end_time: check the files modified before end_time :type end_time: str
4,506
def read_config(config_path=default_config_path): config_path = os.path.expanduser(config_path) if not os.path.isfile(config_path): raise OSError(errno.ENOENT, "Artifactory configuration file not found: " % config_path) p = configparser.ConfigParser() p.read(config_path) result = {} for section in p.sections(): username = p.get(section, ) if p.has_option(section, ) else None password = p.get(section, ) if p.has_option(section, ) else None verify = p.getboolean(section, ) if p.has_option(section, ) else True cert = p.get(section, ) if p.has_option(section, ) else None result[section] = {: username, : password, : verify, : cert} return result
Read configuration file and produce a dictionary of the following structure: {'<instance1>': {'username': '<user>', 'password': '<pass>', 'verify': <True/False>, 'cert': '<path-to-cert>'} '<instance2>': {...}, ...} Format of the file: [https://artifactory-instance.local/artifactory] username = foo password = @dmin verify = false cert = ~/path-to-cert config-path - specifies where to read the config from
4,507
def plot(self, ax=None, **kwargs): ax, fig, plt = get_ax_fig_plt(ax) yy = [len(v) for v in self.values] ax.plot(self.binvals, yy, **kwargs) return fig
Plot the histogram with matplotlib, returns `matplotlib` figure.
4,508
def _set_blob_properties(self, ud): if ud.requires_non_encrypted_md5_put: digest = blobxfer.util.base64_encode_as_string(ud.md5.digest()) else: digest = None blobxfer.operations.azure.blob.set_blob_properties(ud.entity, digest) if blobxfer.util.is_not_empty(ud.entity.replica_targets): for ase in ud.entity.replica_targets: blobxfer.operations.azure.blob.set_blob_properties(ase, digest)
Set blob properties (md5, cache control) :param Uploader self: this :param blobxfer.models.upload.Descriptor ud: upload descriptor
4,509
def fetch(url, binary, outfile, noprint, rendered): s content, and output it to the console. contentbinarymimietext/htmlbinarycontentbinarycontentcontent'].encode("UTF-8"))
Fetch a specified URL's content, and output it to the console.
4,510
def _prepare_ws(self, w0, mmap, n_steps): from ..dynamics import PhaseSpacePosition if not isinstance(w0, PhaseSpacePosition): w0 = PhaseSpacePosition.from_w(w0) arr_w0 = w0.w(self._func_units) self.ndim, self.norbits = arr_w0.shape self.ndim = self.ndim//2 return_shape = (2*self.ndim, n_steps+1, self.norbits) if mmap is None: ws = np.zeros(return_shape, dtype=float) else: if mmap.shape != return_shape: raise ValueError("Shape of memory-mapped array doesn{}'".format(mmap.mode)) ws = mmap return w0, arr_w0, ws
Decide how to make the return array. If mmap is False, this returns a full array of zeros, but with the correct shape as the output. If mmap is True, return a pointer to a memory-mapped array. The latter is particularly useful for integrating a large number of orbits or integrating a large number of time steps.
4,511
def find_boundary_types(model, boundary_type, external_compartment=None): if not model.boundary: LOGGER.warning("There are no boundary reactions in this model. " "Therefore specific types of boundary reactions such " "as , or cannot be " "identified.") return [] if external_compartment is None: external_compartment = find_external_compartment(model) return model.reactions.query( lambda r: is_boundary_type(r, boundary_type, external_compartment))
Find specific boundary reactions. Arguments --------- model : cobra.Model A cobra model. boundary_type : str What boundary type to check for. Must be one of "exchange", "demand", or "sink". external_compartment : str or None The id for the external compartment. If None it will be detected automatically. Returns ------- list of cobra.reaction A list of likely boundary reactions of a user defined type.
4,512
def masked_within_block_local_attention_1d(q, k, v, block_length=64, name=None): with tf.variable_scope( name, default_name="within_local_attention_1d", values=[q, k, v]): batch, heads, length, depth_k = common_layers.shape_list(q) depth_v = common_layers.shape_list(v)[-1] if isinstance(block_length, tf.Tensor): const = tf.contrib.util.constant_value(block_length) if const is not None: block_length = int(const) original_length = length padding_size = tf.mod(-length, block_length) length += padding_size padding = [[0, 0], [0, 0], [0, padding_size], [0, 0]] q = tf.pad(q, padding) k = tf.pad(k, padding) v = tf.pad(v, padding) num_blocks = tf.div(length, block_length) q = tf.reshape(q, [batch, heads, num_blocks, block_length, depth_k]) k = tf.reshape(k, [batch, heads, num_blocks, block_length, depth_k]) v = tf.reshape(v, [batch, heads, num_blocks, block_length, depth_v]) attention = tf.matmul(q, k, transpose_b=True) attention += tf.reshape(attention_bias_lower_triangle(block_length), [1, 1, 1, block_length, block_length]) attention = tf.nn.softmax(attention) output = tf.matmul(attention, v) output = tf.reshape(output, [batch, heads, -1, depth_v]) output = tf.slice(output, [0, 0, 0, 0], [-1, -1, original_length, -1]) output.set_shape([None if isinstance(dim, tf.Tensor) else dim for dim in (batch, heads, length, depth_v)]) return output
Attention to the source and a neighborhood to the left within a block. The sequence is divided into blocks of length block_length. Attention for a given query position can only see memory positions less than or equal to the query position in the corresponding block. Args: q: a Tensor with shape [batch, heads, length, depth_k] k: a Tensor with shape [batch, heads, length, depth_k] v: a Tensor with shape [batch, heads, length, depth_v] block_length: an integer name: an optional string Returns: a Tensor of shape [batch, heads, length, depth_v]
4,513
def gpg_profile_put_key( blockchain_id, key_id, key_name=None, immutable=True, txid=None, key_url=None, use_key_server=True, key_server=None, proxy=None, wallet_keys=None, gpghome=None ): if key_name is not None: assert is_valid_keyname(key_name) if key_server is None: key_server = DEFAULT_KEY_SERVER if gpghome is None: gpghome = get_default_gpg_home() put_res = {} extra_fields = {} key_data = None if key_name is not None: extra_fields = {: key_name} if key_url is None: gpg = gnupg.GPG( homedir=gpghome ) if use_key_server: res = gpg.send_keys( key_server, key_id ) if len(res.data) > 0: log.error("GPG failed to upload key " % key_id) log.error("GPG error:\n%s" % res.stderr) return {: } key_data = gpg.export_keys( [key_id] ) if immutable: immutable_result = client.put_immutable( blockchain_id, key_id, {key_id: key_data}, proxy=proxy, txid=txid, wallet_keys=wallet_keys ) if in immutable_result: return {: % (key_id, immutable_result[])} else: put_res[] = immutable_result[] put_res[] = immutable_result[] key_url = client.make_immutable_data_url( blockchain_id, key_id, client.get_data_hash(key_data) ) else: mutable_name = key_name if key_name is None: mutable_name = key_id mutable_result = client.put_mutable( blockchain_id, key_id, {mutable_name: key_data}, proxy=proxy, wallet_keys=wallet_keys ) if in mutable_result: return {: % (key_id, mutable_result[])} key_url = client.make_mutable_data_url( blockchain_id, key_id, mutable_result[] ) put_account_res = client.put_account( blockchain_id, "pgp", key_id, key_url, proxy=proxy, wallet_keys=wallet_keys, **extra_fields ) if in put_account_res: return put_account_res else: put_account_res.update( put_res ) put_account_res[] = key_url put_account_res[] = key_id return put_account_res
Put a local GPG key into a blockchain ID's global account. If the URL is not given, the key will be replicated to the default PGP key server and to either immutable (if @immutable) or mutable data. Return {'status': True, 'key_url': key_url, 'key_id': key fingerprint, ...} on success Return {'error': ...} on error
4,514
def bucket(cls, bucket_name, connection=None): connection = cls.connection if connection == None else connection if bucket_name not in cls._buckets: connection = "{connection}/{bucket_name}".format(connection=connection, bucket_name=bucket_name) if cls.password: cls._buckets[connection] = Bucket(connection, password=cls.password) else: cls._buckets[connection] = Bucket(connection) return cls._buckets[connection]
Gives the bucket from couchbase server. :param bucket_name: Bucket name to fetch. :type bucket_name: str :returns: couchbase driver's Bucket object. :rtype: :class:`couchbase.client.Bucket` :raises: :exc:`RuntimeError` If the credentials wasn't set.
4,515
def from_group(cls, group): if not group: return tag_items = group.split(";") return list(map(cls.parse, tag_items))
Construct tags from the regex group
4,516
def from_extension(extension): if not extension.startswith(): raise ValueError("Extensions must begin with a period.") try: return EXTENSION_TO_TYPE[extension.lower()] except KeyError: raise UnknownExtensionError( "seqmagick does not know how to handle " + "files with extensions like this: " + extension)
Look up the BioPython file type corresponding with input extension. Look up is case insensitive.
4,517
def IsSocket(self): if self._stat_object is None: self._stat_object = self._GetStat() if self._stat_object is not None: self.entry_type = self._stat_object.type return self.entry_type == definitions.FILE_ENTRY_TYPE_SOCKET
Determines if the file entry is a socket. Returns: bool: True if the file entry is a socket.
4,518
def hash_data(data, hashlen=None, alphabet=None): r if alphabet is None: alphabet = ALPHABET_27 if hashlen is None: hashlen = HASH_LEN2 if isinstance(data, stringlike) and len(data) == 0: text = (alphabet[0] * hashlen) else: hasher = hashlib.sha512() _update_hasher(hasher, data) text = hasher.hexdigest() hashstr2 = convert_hexstr_to_bigbase(text, alphabet, bigbase=len(alphabet)) text = hashstr2[:hashlen] return text
r""" Get a unique hash depending on the state of the data. Args: data (object): any sort of loosely organized data hashlen (None): (default = None) alphabet (None): (default = None) Returns: str: text - hash string CommandLine: python -m utool.util_hash hash_data Example: >>> # ENABLE_DOCTEST >>> from utool.util_hash import * # NOQA >>> import utool as ut >>> counter = [0] >>> failed = [] >>> def check_hash(input_, want=None): >>> count = counter[0] = counter[0] + 1 >>> got = ut.hash_data(input_) >>> print('({}) {}'.format(count, got)) >>> if want is not None and not got.startswith(want): >>> failed.append((got, input_, count, want)) >>> check_hash('1', 'wuvrng') >>> check_hash(['1'], 'dekbfpby') >>> check_hash(tuple(['1']), 'dekbfpby') >>> check_hash(b'12', 'marreflbv') >>> check_hash([b'1', b'2'], 'nwfs') >>> check_hash(['1', '2', '3'], 'arfrp') >>> check_hash(['1', np.array([1,2,3]), '3'], 'uyqwcq') >>> check_hash('123', 'ehkgxk') >>> check_hash(zip([1, 2, 3], [4, 5, 6]), 'mjcpwa') >>> import numpy as np >>> rng = np.random.RandomState(0) >>> check_hash(rng.rand(100000), 'bdwosuey') >>> for got, input_, count, want in failed: >>> print('failed {} on {}'.format(count, input_)) >>> print('got={}, want={}'.format(got, want)) >>> assert not failed
4,519
def generate_project(self): if not self.name or not self.destdir or \ not os.path.isdir(self.destdir): raise ValueError("Empty or invalid property values: run with command") _log("Generating project " % self.name) _log("Destination directory is: " % self.destdir) top = os.path.join(self.destdir, self.name) src = os.path.join(top, self.src_name) resources = os.path.join(top, self.res_name) utils = os.path.join(src, "utils") if self.complex: models = os.path.join(src, "models") ctrls = os.path.join(src, "ctrls") views = os.path.join(src, "views") else: models = ctrls = views = src res = self.__generate_tree(top, src, resources, models, ctrls, views, utils) res = self.__generate_classes(models, ctrls, views) or res res = self.__mksrc(os.path.join(utils, "globals.py"), templates.glob) or res if self.complex: self.templ.update({ : "from models.application import ApplModel", : "from ctrls.application import ApplCtrl", : "from views.application import ApplView"}) else: self.templ.update({ : "from ApplModel import ApplModel", : "from ApplCtrl import ApplCtrl", : "from ApplView import ApplView"}) res = self.__mksrc(os.path.join(top, "%s.py" % self.name), templates.main) or res if self.builder: res = self.__generate_builder(resources) or res if self.dist_gtkmvc3: res = self.__copy_framework(os.path.join(resources, "external")) or res if not res: _log("No actions were taken") else: _log("Done") return res
Generate the whole project. Returns True if at least one file has been generated, False otherwise.
4,520
def ext_pillar(minion_id, pillar, *args, **kwargs): if minion_id == : log.info(s no data to collect from NetBox for the Masterapi_url/api_tokensite_detailssite_prefixesproxy_usernameproxy_returnAuthorizationToken {}{api_url}/{app}/{endpoint}dcimdevicesnameerrorAPI query failed for "%s", status code: %dstatuserrordictresultsnetboxMore than one device found for "%s"Unable to pull NetBox data for "%s"netboxsiteidnetboxsitenameRetrieving site details for "%s" - site %s (ID %d){api_url}/{app}/{endpoint}/{site_id}/dcimsiteserrorUnable to retrieve site details for %s (ID %d)Status code: %d, error: %sstatuserrornetboxsitedictRetrieving site prefixes for "%s" - site %s (ID %d){api_url}/{app}/{endpoint}ipamprefixessite_iderrorUnable to retrieve site prefixes for %s (ID %d)Status code: %d, error: %sstatuserrornetboxsiteprefixesdictresultsnetboxplatformurlerrorAPI query failed for "%s": %serrordictnapalm_driverproxyhostnetboxprimary_ip4addressdriverproxytypenapalmproxyusernameCould not create proxy config data for "%s"', minion_id) return ret
Query NetBox API for minion data
4,521
def deserialize(self, data): ct_in_map = { : self._form_loader, : salt.utils.json.loads, : salt.utils.yaml.safe_load, : salt.utils.yaml.safe_load, return ct_in_map[value](tornado.escape.native_str(data)) except KeyError: self.send_error(406) except ValueError: self.send_error(400)
Deserialize the data based on request content type headers
4,522
def createHeaderMenu(self, index): menu = QtGui.QMenu(self) act = menu.addAction("Hide " % self.columnOf(index)) act.triggered.connect( self.headerHideColumn ) menu.addSeparator() act = menu.addAction() act.setIcon(QtGui.QIcon(resources.find())) act.triggered.connect( self.headerSortAscending ) act = menu.addAction() act.setIcon(QtGui.QIcon(resources.find())) act.triggered.connect( self.headerSortDescending ) act = menu.addAction() act.setIcon(QtGui.QIcon(resources.find())) act.triggered.connect( self.resizeToContents ) menu.addSeparator() colmenu = menu.addMenu( ) colmenu.setIcon(QtGui.QIcon(resources.find())) colmenu.addAction() colmenu.addAction() colmenu.addSeparator() hitem = self.headerItem() columns = self.columns() for column in sorted(columns): col = self.column(column) action = colmenu.addAction(column) action.setCheckable(True) action.setChecked(not self.isColumnHidden(col)) colmenu.triggered.connect( self.toggleColumnByAction ) menu.addSeparator() exporters = self.exporters() if exporters: submenu = menu.addMenu() submenu.setIcon(QtGui.QIcon(resources.find())) for exporter in exporters: act = submenu.addAction(exporter.name()) act.setData(wrapVariant(exporter.filetype())) submenu.triggered.connect(self.exportAs) return menu
Creates a new header menu to be displayed. :return <QtGui.QMenu>
4,523
def l2traceroute_result_input_session_id(self, **kwargs): config = ET.Element("config") l2traceroute_result = ET.Element("l2traceroute_result") config = l2traceroute_result input = ET.SubElement(l2traceroute_result, "input") session_id = ET.SubElement(input, "session-id") session_id.text = kwargs.pop() callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code
4,524
def parse(self, filename=None, file=None, debuglevel=0): self.scope.push() if not file: file = filename else: if hasattr(file, ): if filename is not None: raise AssertionError( ) filename = file.name else: filename = self.target = filename if self.verbose and not self.fail_with_exc: print( % filename, file=sys.stderr) self.result = self.parser.parse(file, lexer=self.lex, debug=debuglevel) self.post_parse() self.register.close()
Parse file. kwargs: filename (str): File to parse debuglevel (int): Parser debuglevel
4,525
def undo(self): if self.canundo(): undoable = self._undos.pop() with self._pausereceiver(): try: undoable.undo() except: self.clear() raise else: self._redos.append(undoable) self.undocallback()
Undo the last action.
4,526
def _qteRunQueuedMacro(self, macroName: str, widgetObj: QtGui.QWidget=None, keysequence: QtmacsKeysequence=None): app = qteGetAppletFromWidget(widgetObj) if app is not None: if sip.isdeleted(app): msg = msg += .format(macroName) self.qteLogger.warning(msg) return macroObj = self.qteGetMacroObject(macroName, widgetObj) if macroObj is None: msg = msg = msg.format(macroName, app.qteAppletSignature(), widgetObj._qteAdmin.widgetSignature) self.qteLogger.warning(msg) return self.qteDefVar(, keysequence, doc="Last valid key sequence that triggered a macro.") if app is None: macroObj.qteApplet = macroObj.qteWidget = None else: macroObj.qteApplet = app macroObj.qteWidget = widgetObj macroObj.qtePrepareToRun()
Execute the next macro in the macro queue. This method is triggered by the ``timerEvent`` in conjunction with the focus manager to ensure the event loop updates the GUI in between any two macros. .. warning:: Never call this method directly. |Args| * ``macroName`` (**str**): name of macro * ``widgetObj`` (**QWidget**): widget (if any) for which the macro applies * ``keysequence* (**QtmacsKeysequence**): key sequence that triggered the macro. |Returns| * **None** |Raises| * **QtmacsArgumentError** if at least one argument has an invalid type.
4,527
def _parse(fileobj): fileobj.seek(0) try: part = fileobj.read(2) except UnicodeDecodeError: part = "" if part == " shebang = shlex.split(fileobj.readline().strip()) if (platform.system() == "Windows" and len(shebang) and os.path.basename(shebang[0]) == "env"): return shebang[1:] return shebang return []
Parse fileobj for a shebang.
4,528
def get_face_mask(self, subdomain): if subdomain is None: return numpy.s_[:] if subdomain not in self.subdomains: self._mark_vertices(subdomain) is_in = self.subdomains[subdomain]["vertices"][self.idx_hierarchy] n = len(is_in.shape) is_inside = numpy.all(is_in, axis=tuple(range(n - 2))) if subdomain.is_boundary_only: is_inside = is_inside & self.is_boundary_facet return is_inside
Get faces which are fully in subdomain.
4,529
def circlescan(x0, y0, r1, r2): if r1 < 0: raise ValueError("Initial radius must be non-negative") if r2 < 0: raise ValueError("Final radius must be non-negative") previous = [] rstep = 1 if r2 >= r1 else -1 for distance in range(r1, r2 + rstep, rstep): if distance == 0: yield x0, y0 else: a = 0.707107 rotations = {0: [[ 1, 0], [ 0, 1]], 1: [[ a, a], [-a, a]], 2: [[ 0, 1], [-1, 0]], 3: [[-a, a], [-a,-a]], 4: [[-1, 0], [ 0,-1]], 5: [[-a,-a], [ a,-a]], 6: [[ 0,-1], [ 1, 0]], 7: [[ a,-a], [ a, a]]} nangles = len(rotations) current = [] for angle in range(nangles): x = 0 y = distance d = 1 - distance while x < y: xr = rotations[angle][0][0]*x + rotations[angle][0][1]*y yr = rotations[angle][1][0]*x + rotations[angle][1][1]*y xr = x0 + xr yr = y0 + yr point = (int(round(xr)), int(round(yr))) if point not in previous: yield xr, yr current.append(point) if (d < 0): d += 3 + 2 * x else: d += 5 - 2 * (y-x) y -= 1 x += 1 previous = current
Scan pixels in a circle pattern around a center point :param x0: Center x-coordinate :type x0: float :param y0: Center y-coordinate :type y0: float :param r1: Initial radius :type r1: float :param r2: Final radius :type r2: float :returns: Coordinate generator :rtype: function
4,530
def parseDockerAppliance(appliance): appliance = appliance.lower() if in appliance: tag = appliance.split()[-1] appliance = appliance[:-(len( + tag))] else: tag = registryName = imageName = appliance if in appliance and in appliance.split()[0]: registryName = appliance.split()[0] imageName = appliance[len(registryName):] registryName = registryName.strip() imageName = imageName.strip() return registryName, imageName, tag
Takes string describing a docker image and returns the parsed registry, image reference, and tag for that image. Example: "quay.io/ucsc_cgl/toil:latest" Should return: "quay.io", "ucsc_cgl/toil", "latest" If a registry is not defined, the default is: "docker.io" If a tag is not defined, the default is: "latest" :param appliance: The full url of the docker image originally specified by the user (or the default). e.g. "quay.io/ucsc_cgl/toil:latest" :return: registryName, imageName, tag
4,531
def prefetch_docker_image_on_private_agents( image, timeout=timedelta(minutes=5).total_seconds()): agents = len(shakedown.get_private_agents()) app = { "id": "/prefetch", "instances": agents, "container": { "type": "DOCKER", "docker": {"image": image} }, "cpus": 0.1, "mem": 128 } client = marathon.create_client() client.add_app(app) shakedown.deployment_wait(timeout) shakedown.delete_all_apps() shakedown.deployment_wait(timeout)
Given a docker image. An app with the image is scale across the private agents to ensure that the image is prefetched to all nodes. :param image: docker image name :type image: str :param timeout: timeout for deployment wait in secs (default: 5m) :type password: int
4,532
def get_mode(device): * ret = {} cmd = .format(device) out = __salt__[](cmd, python_shell=False) for line in out.splitlines(): comps = line.strip().split() if comps[3] not in ret: if comps[0].startswith(): if comps[1].startswith(): ret[comps[4]] = continue elif comps[1].startswith(): ret[device] = return ret continue ret[comps[3]] = { : comps[4].replace(, ).replace(, ), } ret[comps[3]][comps[0]] = comps[6] return ret
Report whether the quota system for this device is on or off CLI Example: .. code-block:: bash salt '*' quota.get_mode
4,533
def clearkml(self): for layer in self.curlayers: self.mpstate.map.remove_object(layer) for layer in self.curtextlayers: self.mpstate.map.remove_object(layer) self.allayers = [] self.curlayers = [] self.alltextlayers = [] self.curtextlayers = [] self.menu_needs_refreshing = True
Clear the kmls from the map
4,534
def get(self, key, default=None): if self.in_memory: return self._memory_db.get(key, default) else: db = self._read_file() return db.get(key, default)
Get key value, return default if key doesn't exist
4,535
def ds_discrete(self, d_min=None, d_max=None, pts=20, limit=1e-9, method=): rlinearlogarithmicR10linearlogarithmicR10R5R2.5ISO 3310-1 R40/3ISO 3310-1 R20ISO 3310-1 R20/3ISO 3310-1ISO 3310-1 R10ASTM E11 if method[0] not in (, ): if d_min is None: d_min = self.dn(limit) if d_max is None: d_max = self.dn(1.0 - limit) return psd_spacing(d_min=d_min, d_max=d_max, pts=pts, method=method)
r'''Create a particle spacing mesh to perform calculations with, according to one of several ways. The allowable meshes are 'linear', 'logarithmic', a geometric series specified by a Renard number such as 'R10', or the meshes available in one of several sieve standards. Parameters ---------- d_min : float, optional The minimum diameter at which the mesh starts, [m] d_max : float, optional The maximum diameter at which the mesh ends, [m] pts : int, optional The number of points to return for the mesh (note this is not respected by sieve meshes), [-] limit : float If `d_min` or `d_max` is not specified, it will be calculated as the `dn` at which this limit or 1-limit exists (this is ignored for Renard numbers), [-] method : str, optional Either 'linear', 'logarithmic', a Renard number like 'R10' or 'R5' or'R2.5', or one of the sieve standards 'ISO 3310-1 R40/3', 'ISO 3310-1 R20', 'ISO 3310-1 R20/3', 'ISO 3310-1', 'ISO 3310-1 R10', 'ASTM E11', [-] Returns ------- ds : list[float] The generated mesh diameters, [m] Notes ----- Note that when specifying a Renard series, only one of `d_min` or `d_max` can be respected! Provide only one of those numbers. Note that when specifying a sieve standard the number of points is not respected! References ---------- .. [1] ASTM E11 - 17 - Standard Specification for Woven Wire Test Sieve Cloth and Test Sieves. .. [2] ISO 3310-1:2016 - Test Sieves -- Technical Requirements and Testing -- Part 1: Test Sieves of Metal Wire Cloth.
4,536
def create_png(cls_name, meth_name, graph, dir_name=): m_name = .join(x for x in meth_name if x.isalnum()) name = .join((cls_name.split()[-1][:-1], , m_name)) graph.draw(name, dir_name)
Creates a PNG from a given :class:`~androguard.decompiler.dad.graph.Graph`. :param str cls_name: name of the class :param str meth_name: name of the method :param androguard.decompiler.dad.graph.Graph graph: :param str dir_name: output directory
4,537
def heartbeat_encode(self, type, autopilot, base_mode, custom_mode, system_status, mavlink_version=2): return MAVLink_heartbeat_message(type, autopilot, base_mode, custom_mode, system_status, mavlink_version)
The heartbeat message shows that a system is present and responding. The type of the MAV and Autopilot hardware allow the receiving system to treat further messages from this system appropriate (e.g. by laying out the user interface based on the autopilot). type : Type of the MAV (quadrotor, helicopter, etc., up to 15 types, defined in MAV_TYPE ENUM) (uint8_t) autopilot : Autopilot type / class. defined in MAV_AUTOPILOT ENUM (uint8_t) base_mode : System mode bitfield, see MAV_MODE_FLAGS ENUM in mavlink/include/mavlink_types.h (uint8_t) custom_mode : A bitfield for use for autopilot-specific flags. (uint32_t) system_status : System status flag, see MAV_STATE ENUM (uint8_t) mavlink_version : MAVLink version (uint8_t)
4,538
def place_items_in_square(items, t): rows = [(t, y, []) for y in range(t)] for item in items: x = item % t y = item // t inverse_length, _, row_contents = rows[y] heapq.heappush(row_contents, (x, item)) rows[y] = inverse_length - 1, y, row_contents assert all(inv_len == t - len(rows) for inv_len, _, rows in rows) heapq.heapify(rows) return [row for row in rows if row[2]]
Returns a list of rows that are stored as a priority queue to be used with heapq functions. >>> place_items_in_square([1,5,7], 4) [(2, 1, [(1, 5), (3, 7)]), (3, 0, [(1, 1)])] >>> place_items_in_square([1,5,7], 3) [(2, 0, [(1, 1)]), (2, 1, [(2, 5)]), (2, 2, [(1, 7)])]
4,539
def get_and_set(self, value): with self._reference.get_lock(): oldval = self._reference.value self._reference.value = value return oldval
Atomically sets the value to `value` and returns the old value. :param value: The value to set.
4,540
def build_arch(self, arch): env = self.get_recipe_env(arch, with_flags_in_cc=False) for path in ( self.get_build_dir(arch.arch), join(self.ctx.python_recipe.get_build_dir(arch.arch), ), join(self.ctx.python_recipe.get_build_dir(arch.arch), )): if not exists(path): info("creating {}".format(path)) shprint(sh.mkdir, , path) cli = env[].split()[0] if in cli: cli = env[].split()[1] cc = sh.Command(cli) with current_directory(self.get_build_dir(arch.arch)): cflags = env[].split() cflags.extend([, , , , ]) shprint(cc, *cflags, _env=env) cflags = env[].split() cflags.extend([, , , , ]) cflags.extend(env[].split()) shprint(cc, *cflags, _env=env) shprint(sh.cp, , self.ctx.get_libs_dir(arch.arch))
simple shared compile
4,541
def attributes_diagram(rel_objs, obj_labels, colors, markers, filename, figsize=(8, 8), xlabel="Forecast Probability", ylabel="Observed Relative Frequency", ticks=np.arange(0, 1.05, 0.05), dpi=300, title="Attributes Diagram", legend_params=None, inset_params=None, inset_position=(0.12, 0.72, 0.25, 0.25), bootstrap_sets=None, ci=(2.5, 97.5)): if legend_params is None: legend_params = dict(loc=4, fontsize=10, framealpha=1, frameon=True) if inset_params is None: inset_params = dict(width="25%", height="25%", loc=2, axes_kwargs=dict(axisbg=)) fig, ax = plt.subplots(figsize=figsize) plt.plot(ticks, ticks, "k--") inset_hist = inset_axes(ax, **inset_params) ip = InsetPosition(ax, inset_position) inset_hist.set_axes_locator(ip) climo = rel_objs[0].climatology() no_skill = 0.5 * ticks + 0.5 * climo skill_x = [climo, climo, 1, 1, climo, climo, 0, 0, climo] skill_y = [climo, 1, 1, no_skill[-1], climo, 0, 0, no_skill[0], climo] f = ax.fill(skill_x, skill_y, "0.8") f[0].set_zorder(1) ax.plot(ticks, np.ones(ticks.shape) * climo, "k--") if bootstrap_sets is not None: for b, b_set in enumerate(bootstrap_sets): brel_curves = np.vstack([b_rel.reliability_curve()["Positive_Relative_Freq"].values for b_rel in b_set]) rel_range = np.nanpercentile(brel_curves, ci, axis=0) fb = ax.fill_between(b_rel.thresholds[:-1], rel_range[1], rel_range[0], alpha=0.5, color=colors[b]) fb.set_zorder(2) for r, rel_obj in enumerate(rel_objs): rel_curve = rel_obj.reliability_curve() ax.plot(rel_curve["Bin_Start"], rel_curve["Positive_Relative_Freq"], color=colors[r], marker=markers[r], label=obj_labels[r]) inset_hist.semilogy(rel_curve["Bin_Start"] * 100, rel_obj.frequencies["Total_Freq"][:-1], color=colors[r], marker=markers[r]) inset_hist.set_xlabel("Forecast Probability") inset_hist.set_ylabel("Frequency") ax.annotate("No Skill", (0.6, no_skill[12]), rotation=22.5) ax.set_xlabel(xlabel) ax.set_ylabel(ylabel) ax.set_xticks(ticks) ax.set_xticklabels((ticks * 100).astype(int)) ax.set_yticks(ticks) ax.set_yticklabels((ticks * 100).astype(int)) ax.legend(**legend_params) ax.set_title(title) plt.savefig(filename, dpi=dpi, bbox_inches="tight") plt.close()
Plot reliability curves against a 1:1 diagonal to determine if probability forecasts are consistent with their observed relative frequency. Also adds gray areas to show where the climatological probabilities lie and what areas result in a positive Brier Skill Score. Args: rel_objs (list): List of DistributedReliability objects. obj_labels (list): List of labels describing the forecast model associated with each curve. colors (list): List of colors for each line markers (list): List of line markers filename (str): Where to save the figure. figsize (tuple): (Width, height) of the figure in inches. xlabel (str): X-axis label ylabel (str): Y-axis label ticks (array): Tick value labels for the x and y axes. dpi (int): resolution of the saved figure in dots per inch. title (str): Title of figure legend_params (dict): Keyword arguments for the plot legend. inset_params (dict): Keyword arguments for the inset axis. inset_position (tuple): Position of the inset axis in normalized axes coordinates (left, bottom, width, height) bootstrap_sets (list): A list of arrays of bootstrapped DistributedROC objects. If not None, confidence regions will be plotted. ci (tuple): tuple of bootstrap confidence interval percentiles
4,542
def rolling_count(self, window_start, window_end): agg_op = return SArray(_proxy=self.__proxy__.builtin_rolling_apply(agg_op, window_start, window_end, 0))
Count the number of non-NULL values of different subsets over this SArray. The subset that the count is executed on is defined as an inclusive range relative to the position to each value in the SArray, using `window_start` and `window_end`. For a better understanding of this, see the examples below. Parameters ---------- window_start : int The start of the subset to count relative to the current value. window_end : int The end of the subset to count relative to the current value. Must be greater than `window_start`. Returns ------- out : SArray Examples -------- >>> import pandas >>> sa = SArray([1,2,3,None,5]) >>> series = pandas.Series([1,2,3,None,5]) A rolling count with a window including the previous 2 entries including the current: >>> sa.rolling_count(-2,0) dtype: int Rows: 5 [1, 2, 3, 2, 2] Pandas equivalent: >>> pandas.rolling_count(series, 3) 0 1 1 2 2 3 3 2 4 2 dtype: float64 A rolling count with a size of 3, centered around the current: >>> sa.rolling_count(-1,1) dtype: int Rows: 5 [2, 3, 2, 2, 1] Pandas equivalent: >>> pandas.rolling_count(series, 3, center=True) 0 2 1 3 2 2 3 2 4 1 dtype: float64 A rolling count with a window including the current and the 2 entries following: >>> sa.rolling_count(0,2) dtype: int Rows: 5 [3, 2, 2, 1, 1] A rolling count with a window including the previous 2 entries NOT including the current: >>> sa.rolling_count(-2,-1) dtype: int Rows: 5 [0, 1, 2, 2, 1]
4,543
def derive_and_set_name_fields_and_slug( self, set_name_sort=True, set_slug=True ): super(PersonCreator, self).derive_and_set_name_fields_and_slug( set_name_sort=False, set_slug=False) person_names = [ name for name in [self.name_family, self.name_given] if not is_empty(name) ] if set_name_sort and is_empty(self.name_sort): if person_names: self.name_sort = .join(person_names) else: self.name_sort = self.name_full if set_slug and is_empty(self.slug): if person_names: self.slug = slugify(.join(person_names)) else: self.slug = slugify(self.name_full)
Override this method from `CreatorBase` to handle additional name fields for Person creators. This method is called during `save()`
4,544
def _inverse_i(self, y, i): lb = self._lb[self._index(i)] ub = self._ub[self._index(i)] al = self._al[self._index(i)] au = self._au[self._index(i)] if 1 < 3: if not lb <= y <= ub: raise ValueError() if y < lb + al: return (lb - al) + 2 * (al * (y - lb))**0.5 elif y < ub - au: return y else: return (ub + au) - 2 * (au * (ub - y))**0.5
return inverse of y in component i
4,545
def build(self, paths, tags=None, wheel_version=None): if tags is None: tags = {} libkey = list(filter(lambda o: o in paths, (, )))[0] if libkey == : is_pure = default_pyver = [IMPVER] default_abi = [ABI] default_arch = [ARCH] else: is_pure = default_pyver = [PYVER] default_abi = [] default_arch = [] self.pyver = tags.get(, default_pyver) self.abi = tags.get(, default_abi) self.arch = tags.get(, default_arch) libdir = paths[libkey] name_ver = % (self.name, self.version) data_dir = % name_ver info_dir = % name_ver archive_paths = [] for key in (, , ): if key not in paths: continue path = paths[key] if os.path.isdir(path): for root, dirs, files in os.walk(path): for fn in files: p = fsdecode(os.path.join(root, fn)) rp = os.path.relpath(p, path) ap = to_posix(os.path.join(data_dir, key, rp)) archive_paths.append((ap, p)) if key == and not p.endswith(): with open(p, ) as f: data = f.read() data = self.process_shebang(data) with open(p, ) as f: f.write(data) path = libdir distinfo = None for root, dirs, files in os.walk(path): if root == path: for i, dn in enumerate(dirs): dn = fsdecode(dn) if dn.endswith(): distinfo = os.path.join(root, dn) del dirs[i] break assert distinfo, for fn in files: if fsdecode(fn).endswith((, )): continue p = os.path.join(root, fn) rp = to_posix(os.path.relpath(p, path)) archive_paths.append((rp, p)) files = os.listdir(distinfo) for fn in files: if fn not in (, , , ): p = fsdecode(os.path.join(distinfo, fn)) ap = to_posix(os.path.join(info_dir, fn)) archive_paths.append((ap, p)) wheel_metadata = [ % (wheel_version or self.wheel_version), % __version__, % is_pure, ] for pyver, abi, arch in self.tags: wheel_metadata.append( % (pyver, abi, arch)) p = os.path.join(distinfo, ) with open(p, ) as f: f.write(.join(wheel_metadata)) ap = to_posix(os.path.join(info_dir, )) archive_paths.append((ap, p)) self.write_records((distinfo, info_dir), libdir, archive_paths) pathname = os.path.join(self.dirname, self.filename) self.build_zip(pathname, archive_paths) return pathname
Build a wheel from files in specified paths, and use any specified tags when determining the name of the wheel.
4,546
def fetch_html(self, msg_nums): if not msg_nums: raise Exception("Invalid Message Number!") return self.__imap_fetch_content_type(msg_nums, self.HTML)
Given a message number that we found with imap_search, get the text/html content. @Params msg_nums - message number to get html message for @Returns HTML content of message matched by message number
4,547
def main(): config = Common.open_file(F_CONFIG) Common.clean_build(config[]) Common.make_dir(config[]) for language in config[]: Common.make_dir(config[] + language)
d
4,548
def splitread(args): p = OptionParser(splitread.__doc__) p.add_option("-n", dest="n", default=76, type="int", help="Split at N-th base position [default: %default]") p.add_option("--rc", default=False, action="store_true", help="Reverse complement second read [default: %default]") opts, args = p.parse_args(args) if len(args) != 1: sys.exit(not p.print_help()) pairsfastq, = args base = op.basename(pairsfastq).split(".")[0] fq1 = base + ".1.fastq" fq2 = base + ".2.fastq" fw1 = must_open(fq1, "w") fw2 = must_open(fq2, "w") fp = must_open(pairsfastq) n = opts.n minsize = n * 8 / 5 for name, seq, qual in FastqGeneralIterator(fp): if len(seq) < minsize: logging.error("Skipping read {0}, length={1}".format(name, len(seq))) continue name = "@" + name rec1 = FastqLite(name, seq[:n], qual[:n]) rec2 = FastqLite(name, seq[n:], qual[n:]) if opts.rc: rec2.rc() print(rec1, file=fw1) print(rec2, file=fw2) logging.debug("Reads split into `{0},{1}`".format(fq1, fq2)) fw1.close() fw2.close()
%prog splitread fastqfile Split fastqfile into two read fastqfiles, cut in the middle.
4,549
def mmap(func, iterable): if sys.version_info[0] > 2: return [i for i in map(func, iterable)] else: return map(func, iterable)
Wrapper to make map() behave the same on Py2 and Py3.
4,550
def nn_getsockopt(socket, level, option, value): if memoryview(value).readonly: raise TypeError() size_t_size = ctypes.c_size_t(len(value)) rtn = _nn_getsockopt(socket, level, option, ctypes.addressof(value), ctypes.byref(size_t_size)) return (rtn, size_t_size.value)
retrieve a socket option socket - socket number level - option level option - option value - a writable byte buffer (e.g. a bytearray) which the option value will be copied to returns - number of bytes copied or on error nunber < 0
4,551
def _start_update_server(auth_token): server = AccumulatorServer(("localhost", 0), _UpdateRequestHandler, auth_token) thread = threading.Thread(target=server.serve_forever) thread.daemon = True thread.start() return server
Start a TCP server to receive accumulator updates in a daemon thread, and returns it
4,552
def _prefix_from_ip_string(cls, ip_str): try: ip_int = cls._ip_int_from_string(ip_str) except AddressValueError: cls._report_invalid_netmask(ip_str) try: return cls._prefix_from_ip_int(ip_int) except ValueError: pass ip_int ^= cls._ALL_ONES try: return cls._prefix_from_ip_int(ip_int) except ValueError: cls._report_invalid_netmask(ip_str)
Turn a netmask/hostmask string into a prefix length Args: ip_str: The netmask/hostmask to be converted Returns: An integer, the prefix length. Raises: NetmaskValueError: If the input is not a valid netmask/hostmask
4,553
def _normalize_stmt_idx(self, block_addr, stmt_idx): if type(stmt_idx) is int: return stmt_idx if stmt_idx == DEFAULT_STATEMENT: vex_block = self.project.factory.block(block_addr).vex return len(vex_block.statements) raise AngrBackwardSlicingError( % stmt_idx)
For each statement ID, convert 'default' to (last_stmt_idx+1) :param block_addr: The block address. :param stmt_idx: Statement ID. :returns: New statement ID.
4,554
def build_data(self): if len(self.dutinformation) > 0 and (self.dutinformation.get(0).build is not None): return self.dutinformation.get(0).build.get_data() return None
get build data. :return: build data or None if not found
4,555
def _set_batch(self, batch, fg, bg, bgblend=1, nullChar=False): for (x, y), char in batch: self._set_char(x, y, char, fg, bg, bgblend)
Try to perform a batch operation otherwise fall back to _set_char. If fg and bg are defined then this is faster but not by very much. if any character is None then nullChar is True batch is a iterable of [(x, y), ch] items
4,556
def approve( self, allowed_address: Address, allowance: TokenAmount, ): log_details = { : pex(self.node_address), : pex(self.address), : pex(allowed_address), : allowance, } checking_block = self.client.get_checking_block() error_prefix = gas_limit = self.proxy.estimate_gas( checking_block, , to_checksum_address(allowed_address), allowance, ) if gas_limit: error_prefix = log.debug(, **log_details) transaction_hash = self.proxy.transact( , safe_gas_limit(gas_limit), to_checksum_address(allowed_address), allowance, ) self.client.poll(transaction_hash) receipt_or_none = check_transaction_threw(self.client, transaction_hash) transaction_executed = gas_limit is not None if not transaction_executed or receipt_or_none: if transaction_executed: block = receipt_or_none[] else: block = checking_block self.proxy.jsonrpc_client.check_for_insufficient_eth( transaction_name=, transaction_executed=transaction_executed, required_gas=GAS_REQUIRED_FOR_APPROVE, block_identifier=block, ) msg = self._check_why_approved_failed(allowance, block) error_msg = f log.critical(error_msg, **log_details) raise RaidenUnrecoverableError(error_msg) log.info(, **log_details)
Aprove `allowed_address` to transfer up to `deposit` amount of token. Note: For channel deposit please use the channel proxy, since it does additional validations.
4,557
def verify_signature(message_path: str, sigfile_path: str, cert_path: str) -> None: with tempfile.TemporaryDirectory() as pubkey_dir: pubkey_contents = subprocess.check_output( [, , , cert_path, , ]) pubkey_file = os.path.join(pubkey_dir, ) open(pubkey_file, ).write(pubkey_contents) try: verification = subprocess.check_output( [, , , , pubkey_file, , sigfile_path, message_path], stderr=subprocess.STDOUT) except subprocess.CalledProcessError as cpe: verification = cpe.output if verification.strip() == b: LOG.info(f"Verification passed from cert {cert_path}") else: LOG.error( f"Verification failed with cert {cert_path}: {verification}") raise SignatureMismatch(f)
Verify the signature (assumed, of the hash file) It is assumed that the public key for the signature is in the keyring :param message_path: The path to the message file to check :param sigfile_path: The path to the signature to check :param cert_path: The path to the certificate to check the signature with :returns True: If the signature verifies :raises SignatureMismatch: If the signature does not verify
4,558
def get_context(pid_file, daemon=False): port_file = get_context_file_name(pid_file) if not os.path.exists(port_file): return None with open(port_file, "rt") as f: json_data = f.read() try: data = json.loads(json_data) except ValueError as e: logger.error("Damaged context json data %s", json_data) return None if not daemon: pid = data.get("pid") if pid and not check_pid(int(pid)): return None return data
Get context of running notebook. A context file is created when notebook starts. :param daemon: Are we trying to fetch the context inside the daemon. Otherwise do the death check. :return: dict or None if the process is dead/not launcherd
4,559
def plot_spectra(self, nmax, convention=, unit=, base=10., maxcolumns=3, xscale=, yscale=, grid=True, xlim=(None, None), ylim=(None, None), show=True, title=True, axes_labelsize=None, tick_labelsize=None, title_labelsize=None, ax=None, fname=None): if axes_labelsize is None: axes_labelsize = _mpl.rcParams[] if tick_labelsize is None: tick_labelsize = _mpl.rcParams[] if title_labelsize is None: title_labelsize = _mpl.rcParams[] degrees = self.degrees() spectrum = self.spectra(nmax=nmax, convention=convention, unit=unit, base=base) ncolumns = min(maxcolumns, nmax) nrows = _np.ceil(nmax / ncolumns).astype(int) figsize = (_mpl.rcParams[][0], _mpl.rcParams[][0] * 0.7 * nrows / ncolumns + 0.41) if ax is None: fig, axes = _plt.subplots(nrows, ncolumns, figsize=figsize, sharex=, sharey=) else: if hasattr(ax, ) and ax.size < nmax: raise ValueError( + .format(repr(nmax)) + .format(repr(ax.size))) axes = ax if ax is None: if nrows > 1: for axtemp in axes[:-1, :].flatten(): for xlabel_i in axtemp.get_xticklabels(): xlabel_i.set_visible(False) axtemp.set_xlabel(, visible=False) for axtemp in axes[:, 1:].flatten(): for ylabel_i in axtemp.get_yticklabels(): ylabel_i.set_visible(False) axtemp.set_ylabel(, visible=False) elif nmax > 1: for axtemp in axes[1:].flatten(): for ylabel_i in axtemp.get_yticklabels(): ylabel_i.set_visible(False) axtemp.set_ylabel(, visible=False) if ylim == (None, None): upper = spectrum[:, :min(self.nmax, nmax)].max() lower = upper * 1.e-6 ylim = (lower, 5 * upper) if xlim == (None, None): if xscale == : xlim = (degrees[0], degrees[-1]) for alpha in range(min(self.nmax, nmax)): evalue = self.eigenvalues[alpha] if min(self.nmax, nmax) == 1 and ax is None: axtemp = axes elif hasattr(axes, ): axtemp = axes.flatten()[alpha] else: axtemp = axes[alpha] if (convention == ): axtemp.set_ylabel(, fontsize=axes_labelsize) else: axtemp.set_ylabel(, fontsize=axes_labelsize) if yscale == : axtemp.set_yscale(, basey=base) if xscale == : axtemp.set_xscale(, basex=base) axtemp.plot(degrees[1:], spectrum[1:, alpha], label= .format(alpha, 1-evalue)) else: axtemp.plot(degrees[0:], spectrum[0:, alpha], label= .format(alpha, 1-evalue)) axtemp.set_xlabel(, fontsize=axes_labelsize) axtemp.set(xlim=xlim, ylim=ylim) axtemp.minorticks_on() axtemp.grid(grid, which=) axtemp.tick_params(labelsize=tick_labelsize) if title is True: axtemp.set_title( .format(alpha, 1-evalue), fontsize=title_labelsize) if ax is None: fig.tight_layout(pad=0.5) if show: fig.show() if fname is not None: fig.savefig(fname) return fig, axes
Plot the spectra of the best-concentrated Slepian functions. Usage ----- x.plot_spectra(nmax, [convention, unit, base, maxcolumns, xscale, yscale, grid, xlim, ylim, show, title, axes_labelsize, tick_labelsize, title_labelsize, ax, fname]) Parameters ---------- nmax : int The number of Slepian functions to plot. convention : str, optional, default = 'power' The type of spectra to plot: 'power' for power spectrum, and 'energy' for energy spectrum. unit : str, optional, default = 'per_l' If 'per_l', return the total contribution to the spectrum for each spherical harmonic degree l. If 'per_lm', return the average contribution to the spectrum for each coefficient at spherical harmonic degree l. If 'per_dlogl', return the spectrum per log interval dlog_a(l). base : float, optional, default = 10. The logarithm base when calculating the 'per_dlogl' spectrum. maxcolumns : int, optional, default = 3 The maximum number of columns to use when plotting the spectra of multiple localization windows. xscale : str, optional, default = 'lin' Scale of the x axis: 'lin' for linear or 'log' for logarithmic. yscale : str, optional, default = 'log' Scale of the y axis: 'lin' for linear or 'log' for logarithmic. grid : bool, optional, default = True If True, plot grid lines. xlim : tuple, optional, default = (None, None) The upper and lower limits used for the x axis. ylim : tuple, optional, default = (None, None) The lower and upper limits used for the y axis. show : bool, optional, default = True If True, plot the image to the screen. title : bool, optional, default = True If True, plot a legend on top of each subplot providing the taper number and 1 minus the concentration factor. axes_labelsize : int, optional, default = None The font size for the x and y axes labels. tick_labelsize : int, optional, default = None The font size for the x and y tick labels. title_labelsize : int, optional, default = None The font size for the subplot titles. ax : matplotlib axes object, optional, default = None An array of matplotlib axes objects where the plots will appear. fname : str, optional, default = None If present, save the image to the file.
4,560
def _get_ssh_public_key(self): key = ipa_utils.generate_public_ssh_key(self.ssh_private_key_file) return .format( user=self.ssh_user, key=key.decode() )
Generate SSH public key from private key.
4,561
def allocate(self, dut_configuration_list, args=None): dut_config_list = dut_configuration_list.get_dut_configuration() try: for dut_config in dut_config_list: if not self.can_allocate(dut_config.get_requirements()): raise AllocationError("Resource type is not supported") self._allocate(dut_config) except AllocationError: raise alloc_list = AllocationContextList() res_id = None for conf in dut_config_list: if conf.get("type") == "mbed": res_id = conf.get("allocated").get("target_id") context = AllocationContext(resource_id=res_id, alloc_data=conf) alloc_list.append(context) alloc_list.set_dut_init_function("serial", init_generic_serial_dut) alloc_list.set_dut_init_function("process", init_process_dut) alloc_list.set_dut_init_function("mbed", init_mbed_dut) return alloc_list
Allocates resources from available local devices. :param dut_configuration_list: List of ResourceRequirements objects :param args: Not used :return: AllocationContextList with allocated resources
4,562
def serve(path=None, host=None, port=None, user_content=False, context=None, username=None, password=None, render_offline=False, render_wide=False, render_inline=False, api_url=None, title=None, autorefresh=True, browser=False, quiet=None, grip_class=None): app = create_app(path, user_content, context, username, password, render_offline, render_wide, render_inline, api_url, title, None, autorefresh, quiet, grip_class) app.run(host, port, open_browser=browser)
Starts a server to render the specified file or directory containing a README.
4,563
def quick_plot(cmap, fname=None, fig=None, ax=None, N=10): x = np.linspace(0, 10, N) X, _ = np.meshgrid(x, x) if ax is None: fig = plt.figure() ax = fig.add_subplot(111) mappable = ax.pcolor(X, cmap=cmap) ax.set_title(cmap.name, fontsize=14) ax.set_xticks([]) ax.set_yticks([]) plt.colorbar(mappable) plt.show() if fname is not None: plt.savefig(fname + , bbox_inches=)
Show quick test of a colormap.
4,564
def get_realms_by_explosion(self, realms): if getattr(self, , False): self.add_error("Error: there is a loop in the realm definition %s" % self.get_name()) return None self.rec_tag = True self.realm_members = sorted(self.realm_members) for member in self.realm_members: realm = realms.find_by_name(member) if not realm: self.add_unknown_members(member) continue children = realm.get_realms_by_explosion(realms) if children is None: self.all_sub_members = [] self.realm_members = [] return None return self.all_sub_members
Get all members of this realm including members of sub-realms on multi-levels :param realms: realms list, used to look for a specific one :type realms: alignak.objects.realm.Realms :return: list of members and add realm to realm_members attribute :rtype: list
4,565
def reset(self): "Close the current failed connection and prepare for a new one" log.info("resetting client") rpc_client = self._rpc_client self._addrs.append(self._peer.addr) self.__init__(self._addrs) self._rpc_client = rpc_client self._dispatcher.rpc_client = rpc_client rpc_client._client = weakref.ref(self)
Close the current failed connection and prepare for a new one
4,566
def atlas_zonefile_push_dequeue( zonefile_queue=None ): ret = None with AtlasZonefileQueueLocked(zonefile_queue) as zfq: if len(zfq) > 0: ret = zfq.pop(0) return ret
Dequeue a zonefile's information to replicate Return None if there are none queued
4,567
def generate_datafile_old(number_items=1000): from utils import get_names, generate_dataset from pprint import pprint filename = "samples.py" dataset = generate_dataset(number_items) fo = open(filename, "wb") fo.write(" fo.write(" fo.write(" fo.write("SAMPLES = ") pprint(dataset, fo) fo.close() print "%s generated with %d samples" % (filename, number_items)
Create the samples.py file
4,568
def are_dicts_equivalent(*args, **kwargs): if not args: return False if len(args) == 1: return True if not all(is_dict(x) for x in args): return False first_item = args[0] for item in args[1:]: if len(item) != len(first_item): return False for key in item: if key not in first_item: return False if not are_equivalent(item[key], first_item[key]): return False for key in first_item: if key not in item: return False if not are_equivalent(first_item[key], item[key]): return False return True
Indicate if :ref:`dicts <python:dict>` passed to this function have identical keys and values. :param args: One or more values, passed as positional arguments. :returns: ``True`` if ``args`` have identical keys/values, and ``False`` if not. :rtype: :class:`bool <python:bool>` :raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates keyword parameters passed to the underlying validator
4,569
def get_full_recirc_content(self, published=True): q = self.get_query() search = custom_search_model(Content, q, published=published, field_map={ "feature_type": "feature_type.slug", "tag": "tags.slug", "content-type": "_type" }) return search
performs es search and gets all content objects
4,570
def main(self): args = self.args parsed_pytree, pypackages = self.parse_py_tree(pytree=args.pytree) parsed_doctree = self.parse_doc_tree(doctree=args.doctree, pypackages=pypackages) return self.compare_trees(parsed_pytree=parsed_pytree, parsed_doctree=parsed_doctree)
Parse package trees and report on any discrepancies.
4,571
def gradient_black( self, text=None, fore=None, back=None, style=None, start=None, step=1, reverse=False, linemode=True, movefactor=2, rgb_mode=False): gradargs = { : step, : fore, : back, : style, : reverse, : rgb_mode, } if linemode: gradargs[] = 2 if movefactor is None else movefactor method = self._gradient_black_lines else: method = self._gradient_black_line if text: return self.__class__( .join(( self.data or , method( text, start or (255 if reverse else 232), **gradargs) )) ) return self.__class__( method( self.stripped(), start or (255 if reverse else 232), **gradargs) )
Return a black and white gradient. Arguments: text : String to colorize. This will always be greater than 0. fore : Foreground color, background will be gradient. back : Background color, foreground will be gradient. style : Name of style to use for the gradient. start : Starting 256-color number. The `start` will be adjusted if it is not within bounds. This will always be > 15. This will be adjusted to fit within a 6-length gradient, or the 24-length black/white gradient. step : Number of characters to colorize per color. This allows a "wider" gradient. linemode : Colorize each line in the input. Default: True movefactor : Factor for offset increase on each line when using linemode. Minimum value: 0 Default: 2 rgb_mode : Use true color (rgb) method and codes.
4,572
def MakeDynamicPotentialFunc(kBT_Gamma, density, SpringPotnlFunc): def PotentialFunc(xdata, Radius): mass = ((4/3)*np.pi*((Radius*10**-9)**3))*density yfit=(kBT_Gamma/mass) Y = yfit*SpringPotnlFunc(xdata) return Y return PotentialFunc
Creates the function that calculates the potential given the position (in volts) and the radius of the particle. Parameters ---------- kBT_Gamma : float Value of kB*T/Gamma density : float density of the nanoparticle SpringPotnlFunc : function Function which takes the value of position (in volts) and returns the spring potential Returns ------- PotentialFunc : function function that calculates the potential given the position (in volts) and the radius of the particle.
4,573
def build_damage_dt(dstore, mean_std=True): oq = dstore[] damage_states = [] + list( dstore.get_attr(, )) dt_list = [] for ds in damage_states: ds = str(ds) if mean_std: dt_list.append(( % ds, F32)) dt_list.append(( % ds, F32)) else: dt_list.append((ds, F32)) damage_dt = numpy.dtype(dt_list) loss_types = oq.loss_dt().names return numpy.dtype([(lt, damage_dt) for lt in loss_types])
:param dstore: a datastore instance :param mean_std: a flag (default True) :returns: a composite dtype loss_type -> (mean_ds1, stdv_ds1, ...) or loss_type -> (ds1, ds2, ...) depending on the flag mean_std
4,574
def _get_previous_mz(self, mzs): mzs = tuple(mzs) if mzs in self.lru_cache: return self.lru_cache[mzs] mz_hash = "%s-%s-%s" % (hash(mzs), sum(mzs), len(mzs)) if mz_hash in self.hashes: for mz_data in self.hashes[mz_hash]: test_mz = self._read_mz(*mz_data) if mzs == test_mz: self.lru_cache[test_mz] = mz_data return mz_data mz_data = self._encode_and_write(mzs, self.mz_dtype, self.mz_compression) self.hashes[mz_hash].append(mz_data) self.lru_cache[mzs] = mz_data return mz_data
given an mz array, return the mz_data (disk location) if the mz array was not previously written, write to disk first
4,575
def nonzero_monies(self): return [copy.copy(m) for m in self._money_obs if m.amount != 0]
Get a list of the underlying ``Money`` instances that are not zero Returns: ([Money]): A list of zero or more money instances. Currencies will be unique.
4,576
def get_db_uri(config, output_dir): db_config = config.get("results_database", {"db_uri": "default"}) if db_config[] == : return os.path.join(output_dir, "results.sqlite") return db_config[]
Process results_database parameters in config to format them for set database function :param dict config: project configuration dict :param str output_dir: output directory for results :return: string for db uri
4,577
def year_origin_filter(year_predicate=None, origin_predicate=None): def accept(cable_id, predicate): year, origin = _YEAR_ORIGIN_PATTERN.match( canonicalize_id(cable_id)).groups() return predicate(year, origin) if year_predicate and origin_predicate: return partial(accept, predicate=lambda y, o: year_predicate(y) \ and origin_predicate(o)) elif year_predicate: return partial(accept, predicate=lambda y, o: year_predicate(y)) elif origin_predicate: return partial(accept, predicate=lambda y, o: origin_predicate(o)) return lambda cable_id: True
\ Returns a predicate for cable identifiers where `year_predicate` and `origin_predicate` must hold true. If `year_predicate` and `origin_predicate` is ``None`` the returned predicate holds always true. `year_predicate` A predicate which returns ``True`` or ``False`` for a cable year. ``origin_predicate` A predicate which returns ``True`` or ``False`` for a given cable origin
4,578
def collect_fragment(event, agora_host): agora = Agora(agora_host) graph_pattern = "" for tp in __triple_patterns: graph_pattern += .format(tp) fragment, _, graph = agora.get_fragment_generator( % graph_pattern, stop_event=event, workers=4) __extract_pattern_nodes(graph) log.info( % graph_pattern) for (t, s, p, o) in fragment: collectors = __triple_patterns[str(__plan_patterns[t])] for c, args in collectors: log.debug(.format(s.n3(graph.namespace_manager), graph.qname(p), o.n3(graph.namespace_manager), c)) c((s, p, o)) if event.isSet(): raise Exception() yield (c.func_name, (t, s, p, o))
Execute a search plan for the declared graph pattern and sends all obtained triples to the corresponding collector functions (config
4,579
def parse_data_shape(data_shape_str): ds = data_shape_str.strip().split() if len(ds) == 1: data_shape = (int(ds[0]), int(ds[0])) elif len(ds) == 2: data_shape = (int(ds[0]), int(ds[1])) else: raise ValueError("Unexpected data_shape: %s", data_shape_str) return data_shape
Parse string to tuple or int
4,580
def combine_first(self, other): out = ops.fillna(self, other, join="outer", dataset_join="outer") return out
Combine two Datasets, default to data_vars of self. The new coordinates follow the normal broadcasting and alignment rules of ``join='outer'``. Vacant cells in the expanded coordinates are filled with np.nan. Parameters ---------- other : DataArray Used to fill all matching missing values in this array. Returns ------- DataArray
4,581
def delete_all(self, filter, timeout=-1, force=False): return self._helper.delete_all(filter=filter, force=force, timeout=timeout)
Deletes all Server Profile objects from the appliance that match the provided filter. Filters are supported only for the following profile attributes: name, description, serialnumber, uuid, mactype, wwntype, serialnumbertype, status, and state. Examples: >>> server_profile_client.delete_all(filter="name='Exchange Server'") # Remove all profiles that match the name "Exchange Server" >>> server_profile_client.delete_all(filter="name matches'%25Database%25'") # Remove all profiles that have the word "Database" in its name The filter function here operates similarly to the function defined for GET Server Profiles. It allows for both actual and partial matches of data in the profile. Any requests that use a wildcard match must include a %25 as illustrated in the previous example. This is how you encode that character for transmission to the appliance. Args: filter (dict): Object to delete. timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView; it just stops waiting for its completion. Returns: bool: Indicates whether the server profile was successfully deleted.
4,582
def guggenheim_katayama(target, K2, n, temperature=, critical_temperature=, critical_pressure=): r T = target[temperature] Pc = target[critical_pressure] Tc = target[critical_temperature] sigma_o = K2*Tc**(1/3)*Pc**(2/3) value = sigma_o*(1-T/Tc)**n return value
r""" Missing description Parameters ---------- target : OpenPNM Object The object for which these values are being calculated. This controls the length of the calculated array, and also provides access to other necessary thermofluid properties. K2 : scalar Fluid specific constant n : scalar Fluid specific constant temperature : string The dictionary key containing the temperature values (K) critical_temperature : string The dictionary key containing the critical temperature values (K) critical_pressure : string The dictionary key containing the critical pressure values (K)
4,583
def _auth(profile=None, api_version=1, **connection_args): if profile: prefix = profile + else: prefix = def get(key, default=None): return connection_args.get( + key, __salt__[](prefix + key, default)) user = get(, ) password = get(, None) tenant = get(, ) tenant_id = get() auth_url = get(, ) insecure = get(, False) admin_token = get() region_name = get(, None) if admin_token and api_version != 1 and not password: raise SaltInvocationError( + ) elif password: if insecure: kwargs[] = True elif api_version == 1 and admin_token: kwargs = {: admin_token, : auth_url} else: raise SaltInvocationError() token = __salt__[](profile) kwargs[] = token[] api_version, heat_endpoint, kwargs) return heatclient.client.Client(api_version, endpoint=heat_endpoint, **kwargs)
Set up heat credentials, returns `heatclient.client.Client`. Optional parameter "api_version" defaults to 1. Only intended to be used within heat-enabled modules
4,584
def generate_routes(config): routes = [] for name, config in iteritems(config): pattern = r % re.escape(config[].lstrip()) proxy = generate_proxy( prefix=config[], base_url=config[], verify_ssl=config.get(, True), middleware=config.get(), append_middleware=config.get(), cert=config.get(), timeout=config.get()) proxy_view_function = proxy.as_view() proxy_view_function.csrf_exempt = config.get(, True) routes.append(url(pattern, proxy_view_function, name=name)) return routes
Generate a list of urls that map to generated proxy views. generate_routes({ 'test_proxy': { 'base_url': 'https://google.com/', 'prefix': '/test_prefix/', 'verify_ssl': False, 'csrf_exempt: False', 'middleware': ['djproxy.proxy_middleware.AddXFF'], 'append_middleware': ['djproxy.proxy_middleware.AddXFF'], 'timeout': 3.0, 'cert': None } }) Required configuration keys: * `base_url` * `prefix` Optional configuration keys: * `verify_ssl`: defaults to `True`. * `csrf_exempt`: defaults to `True`. * `cert`: defaults to `None`. * `timeout`: defaults to `None`. * `middleware`: Defaults to `None`. Specifying `None` causes djproxy to use the default middleware set. If a list is passed, the default middleware list specified by the HttpProxy definition will be replaced with the provided list. * `append_middleware`: Defaults to `None`. `None` results in no changes to the default middleware set. If a list is specified, the list will be appended to the default middleware list specified in the HttpProxy definition or, if provided, the middleware key specificed in the config dict. Returns: [ url(r'^test_prefix/', GeneratedProxy.as_view(), name='test_proxy')), ]
4,585
def users_getPresence(self, *, user: str, **kwargs) -> SlackResponse: kwargs.update({"user": user}) return self.api_call("users.getPresence", http_verb="GET", params=kwargs)
Gets user presence information. Args: user (str): User to get presence info on. Defaults to the authed user. e.g. 'W1234567890'
4,586
def lnprior(self, X): if np.any(X < self._lower_left) or np.any(X > self._upper_right): return -np.inf else: return 0.0
Use a uniform, bounded prior.
4,587
def persist(arg, depth=Ellipsis, on_mutable=None): ed form; note that this respects laziness and itables. * If x is a list/tuple type, yields a tuple of persist()s elements is controlled by the depth option; the default behavior is to persist objects down to the point that a persistent object is found, at which its elements are not checked for persistence. Note that persist() is not guaranteed to recognize a particular object; it is intended as a utility function for basic functional-style and immutable data code in Python. In particular, it is usefl for pimmss lazy-maps and itables; pyrsistenterror from .immutable import (is_imm, imm_copy) if on_mutable is None: on_mutable = lambda x:x elif on_mutable == : def _raise(x): raise ValueError( % x) on_mutable = _raise if depth in (None, Ellipsis): depth_next = depth elif depth < 0: return arg else: depth_next = depth - 1 precur = lambda x:persist(x, depth=depth_next, on_mutable=on_mutable) if is_imm(arg): return imm_copy(arg) if is_quantity(arg): (m,u) = (mag(arg), unit(arg)) mm = precur(m) if mm is m: return arg else: return quant(mm, u) elif isinstance(arg, np.ndarray): return imm_array(arg) elif isinstance(arg, np.generic): x = type(arg)(arg) x.setflags(write=False) return x elif is_str(arg) or is_number(arg): return arg elif isinstance(arg, ps.PVector): if depth is Ellipsis or depth == 0: return arg for (k,v0) in zip(range(len(arg)), arg): v = precur(v0) if v0 is not v: arg = arg.set(k,v) return arg elif isinstance(arg, ps.PSet): if depth is Ellipsis or depth == 0: return arg for v0 in arg: v = precur(v0) if v0 is not v: arg = arg.discard(v0).add(v) return arg elif is_pmap(arg): if depth is Ellipsis or depth == 0: return arg return key_map(precur, value_map(precur, arg)) elif is_map(arg): if not is_pmap(arg): arg = ps.pmap(arg) if depth == 0: return arg return key_map(precur, value_map(precur, arg)) elif isinstance(arg, frozenset): if depth is Ellipsis or depth == 0: return frozenset(arg) a = [x for x in arg] q = [precur(x) for x in a] if all(ai is qi for (ai,qi) in zip(a,q)): return arg return frozenset(q) elif isinstance(arg, set): if depth == 0: return frozenset(arg) a = [x for x in arg] q = [precur(x) for x in a] if isinstance(arg, frozenset) and all(ai is qi for (ai,qi) in zip(a,q)): return arg return frozenset(q) elif hasattr(arg, ): if depth == 0 or (depth is Ellipsis and isinstance(arg, tuple)): return tuple(arg) q = tuple(precur(x) for x in arg) if isinstance(arg, tuple) and all(ai is qi for (ai,qi) in zip(arg,q)): return arg else: return q elif isinstance(arg, (types.FunctionType, partial)): return arg else: return on_mutable(arg)
persist(x) yields a persistent version of x if possible, or yields x itself. The transformations performed by persist(x) are as follows: * If x is an immutable object, yields x.persist() * If x is a set, yield a frozenset of of persist(u) for all u in x. * If x is a numpy array, yield imm_array(x). * If x is a map, yields a persistent version of x with all keys and values replaced with their persist()'ed form; note that this respects laziness and itables. * If x is a list/tuple type, yields a tuple of persist()'ed contents. * Otherwise, if the type of x is not recognized, yields x. The depth to which persist() searches the argument's elements is controlled by the depth option; the default behavior is to persist objects down to the point that a persistent object is found, at which its elements are not checked for persistence. Note that persist() is not guaranteed to recognize a particular object; it is intended as a utility function for basic functional-style and immutable data code in Python. In particular, it is usefl for pimms's immutable objects; dicts and mappings; pimms's lazy-maps and itables; pyrsistent's sets, vectors, and maps; sets and frozensets (which are both communted to frozensets); and anything implementing __iter__ (which are commuted to tuples). Objects that are not numbers or strings are considered potentially-mutable and will trigger the on_mutable case. The optional arguments may be passed to persist: * depth (default: Ellipsis) specifies the depth to which toe persist() function should search when persisting objects. The given argument is considered depth 0, so persist(arg, 0) will persist only arg and not its contents, if it is a collection. If None is given, then goes to any depth; if Ellipsis is given, then searches until a persistent object is found, but does not attempt to persist the elements of already-persistent containers (this is the default). * on_mutable (default: None) specifies what to do when a non-persistable object is encountered in the search. If None, then the object is left; if 'error', then an error is raised; otherwise, this must be a function that is passed the object--the return value of this function is the replacement used in the object returned from persist().
4,588
def render_to_message(self, extra_context=None, **kwargs): if extra_context is None: extra_context = {} kwargs.setdefault(, {}).update(self.headers) context = self.get_context_data(**extra_context) return self.message_class( subject=self.render_subject(context), body=self.render_body(context), **kwargs)
Renders and returns an unsent message with the provided context. Any extra keyword arguments passed will be passed through as keyword arguments to the message constructor. :param extra_context: Any additional context to use when rendering the templated content. :type extra_context: :class:`dict` :returns: A message instance. :rtype: :attr:`.message_class`
4,589
def parse(text, encoding=): if isinstance(text, six.binary_type): text = text.decode(encoding) return Query(text, split_segments(text))
Parse the querystring into a normalized form.
4,590
def get_url(self): if not self.data[self.execute_name]: raise InvalidConfig(extra_body= .format(self.name)) if not self.data.get(): raise InvalidConfig(extra_body= .format(self.name)) url = self.url_pattern.format(event=self.data[], key=self.data[self.execute_name]) return url
IFTTT Webhook url :return: url :rtype: str
4,591
def ball_pick(n, d, rng=None): def valid(r): return vector_mag_sq(r) < 1.0 return rejection_pick(L=2.0, n=n, d=d, valid=valid, rng=rng)
Return cartesian vectors uniformly picked on the unit ball in an arbitrary number of dimensions. The unit ball is the space enclosed by the unit sphere. The picking is done by rejection sampling in the unit cube. In 3-dimensional space, the fraction `\pi / 6 \sim 0.52` points are valid. Parameters ---------- n: integer Number of points to return. d: integer Number of dimensions of the space in which the ball lives Returns ------- r: array, shape (n, d) Sample cartesian vectors.
4,592
def get_records(self, name): if name in self._cache: return self._cache[name].values() else: return []
Return all the records for the given name in the cache. Args: name (string): The name which the required models are stored under. Returns: list: A list of :class:`cinder_data.model.CinderModel` models.
4,593
def publish(self, message_type, message_payload): payload = json.dumps(jsonpickle.Pickler(unpicklable=False).flatten(message_payload)) message = amqp.Message(payload) message.properties["delivery_mode"] = 2 name = % (settings.ENVIRONMENT_STAGE, self.service_name.lower(), message_type.lower()) self.channel.queue_declare(queue=name, durable=True, exclusive=False, auto_delete=False) self.channel.exchange_declare(exchange=name, type="direct", durable=True, auto_delete=False,) self.channel.queue_bind(queue=name, exchange=name, routing_key=name) self.channel.basic_publish(message, exchange=name, routing_key=name)
Publish the specified object that the function automatically converts into a JSON string representation. This function use the lowered class name of the service as the AMQP routing key. For instance, if the class ``ExampleService`` inherits from the base class ``BaseService``, the methods of an instance of this class will publish messages using the routing key named ``exampleservice``. @param message_type: string representing the type of the event, more likely ``on_something_happened`. @param message_payload: an object to convert into a JSON string representation and to publish.
4,594
def is_defined(self, objtxt, force_import=False): return isdefined(objtxt, force_import=force_import, namespace=self.locals)
Return True if object is defined
4,595
def findRequirements(): requirementsPath = os.path.join(REPO_DIR, "requirements.txt") requirements = parse_file(requirementsPath) if nupicPrereleaseInstalled(): requirements = [req for req in requirements if "nupic" not in req] if htmresearchCorePrereleaseInstalled(): requirements = [req for req in requirements if "htmresearch-core" not in req] return requirements
Read the requirements.txt file and parse into requirements for setup's install_requirements option.
4,596
def force_move(source, destination): if not os.path.exists(destination): raise RuntimeError( .format(destination=destination)) destination_folder = os.path.join(destination, os.path.split(source)[-1]) if os.path.exists(destination_folder): shutil.rmtree(destination_folder) shutil.move(source, destination)
Force the move of the source inside the destination even if the destination has already a folder with the name inside. In the case, the folder will be replaced. :param string source: path of the source to move. :param string destination: path of the folder to move the source to.
4,597
def get_by_username(cls, username): return cls.query().filter(cls.username == username).first()
Return a User by email address
4,598
def __collectGarbage(self, ignored=None): del ignored collected = [] level0, level1, level2 = gc.get_count() if level0 > 0: collected.append(gc.collect(0)) if level1 > 0: collected.append(gc.collect(1)) if level2 > 0: collected.append(gc.collect(2)) if self.__where == GCPluginConfigDialog.SILENT: return message = "" if collected: for index in range(len(collected)): if collected[index] == 0: continue if message: message += ", " message += "generation " + str(index) + ": " + \ str(collected[index]) if message: message = "GC objects: " + message else: message = "No GC objects" if not message: return if self.__where == GCPluginConfigDialog.STATUS_BAR: self.ide.showStatusBarMessage(message) else: logging.info(message)
Collects garbage
4,599
def _listChunks(self): chunks = [] for fileName in os.listdir(self.dataDir): index = ChunkedFileStore._fileNameToChunkIndex(fileName) if index is not None: chunks.append(index) return sorted(chunks)
Lists stored chunks :return: sorted list of available chunk indices