Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
16,200
def traverse_rootdistorder(self, ascending=True, leaves=True, internal=True): for node in self.root.traverse_rootdistorder(ascending=ascending, leaves=leaves, internal=internal): yield node
Perform a traversal of the ``Node`` objects in this ``Tree`` in either ascending (``ascending=True``) or descending (``ascending=False``) order of distance from the root Args: ``ascending`` (``bool``): ``True`` to perform traversal in ascending distance from the root, otherwise ``False`` for descending ``leaves`` (``bool``): ``True`` to include leaves, otherwise ``False`` ``internal`` (``bool``): ``True`` to include internal nodes, otherwise ``False``
16,201
def build(self, recipe, plugin=None): if recipe not in self.recipes.keys(): raise RecipeMissingException("Recipe %s unknown." % recipe) recipe_obj = self.recipes[recipe] if plugin is not None: if recipe_obj.plugin != plugin: raise RecipeWrongPluginException("The requested recipe does not belong to the given plugin. Use" "the app object, to retrieve the requested recipe: " "my_app.recipes.get(%s)" % recipe) recipe_obj.build()
Execute a recipe and creates new folder and files. :param recipe: Name of the recipe :param plugin: Name of the plugin, to which the recipe must belong.
16,202
def get_log_nodes(self, log_id, ancestor_levels, descendant_levels, include_siblings): return objects.LogNode(self.get_log_node_ids( log_id=log_id, ancestor_levels=ancestor_levels, descendant_levels=descendant_levels, include_siblings=include_siblings)._my_map, runtime=self._runtime, proxy=self._proxy)
Gets a portion of the hierarchy for the given log. arg: log_id (osid.id.Id): the ``Id`` to query arg: ancestor_levels (cardinal): the maximum number of ancestor levels to include. A value of 0 returns no parents in the node. arg: descendant_levels (cardinal): the maximum number of descendant levels to include. A value of 0 returns no children in the node. arg: include_siblings (boolean): ``true`` to include the siblings of the given node, ``false`` to omit the siblings return: (osid.logging.LogNode) - a log node raise: NotFound - ``log_id`` is not found raise: NullArgument - ``log_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.*
16,203
def getComicData(self, comic): if comic not in self.data: if os.path.exists(self.jsonFn(comic)): with codecs.open(self.jsonFn(comic), , self.encoding) as f: self.data[comic] = json.load(f) else: self.data[comic] = {:{}} return self.data[comic]
Return dictionary with comic info.
16,204
def create_module_item(self, course_id, module_id, module_item_type, module_item_content_id, module_item_completion_requirement_min_score=None, module_item_completion_requirement_type=None, module_item_external_url=None, module_item_indent=None, module_item_new_tab=None, module_item_page_url=None, module_item_position=None, module_item_title=None): path = {} data = {} params = {} path["course_id"] = course_id path["module_id"] = module_id if module_item_title is not None: data["module_item[title]"] = module_item_title self._validate_enum(module_item_type, ["File", "Page", "Discussion", "Assignment", "Quiz", "SubHeader", "ExternalUrl", "ExternalTool"]) data["module_item[type]"] = module_item_type data["module_item[content_id]"] = module_item_content_id if module_item_position is not None: data["module_item[position]"] = module_item_position if module_item_indent is not None: data["module_item[indent]"] = module_item_indent if module_item_page_url is not None: data["module_item[page_url]"] = module_item_page_url if module_item_external_url is not None: data["module_item[external_url]"] = module_item_external_url if module_item_new_tab is not None: data["module_item[new_tab]"] = module_item_new_tab if module_item_completion_requirement_type is not None: self._validate_enum(module_item_completion_requirement_type, ["must_view", "must_contribute", "must_submit"]) data["module_item[completion_requirement][type]"] = module_item_completion_requirement_type if module_item_completion_requirement_min_score is not None: data["module_item[completion_requirement][min_score]"] = module_item_completion_requirement_min_score self.logger.debug("POST /api/v1/courses/{course_id}/modules/{module_id}/items with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("POST", "/api/v1/courses/{course_id}/modules/{module_id}/items".format(**path), data=data, params=params, single_item=True)
Create a module item. Create and return a new module item
16,205
def getContext(self, context_name = ): if context_name == and not in self.contexts: self() return self.contexts[context_name]
Get a context by name, create the default context if it does not exist Params: context_name (string): Context name Raises: KeyError: If the context name does not exist Returns: bubbler.Bubbler: Named context
16,206
def delete_intel_notifications(self, ids, timeout=None): if not isinstance(ids, list): raise TypeError("ids must be a list") data = json.dumps(ids) try: response = requests.post( self.base + + self.api_key, data=data, proxies=self.proxies, timeout=timeout) except requests.RequestException as e: return dict(error=str(e)) return _return_response_and_status_code(response)
Programmatically delete notifications via the Intel API. :param ids: A list of IDs to delete from the notification feed. :returns: The post response.
16,207
def _prepare_put_or_patch(self, kwargs): requests_params = self._handle_requests_params(kwargs) update_uri = self._meta_data[] session = self._meta_data[]._meta_data[] read_only = self._meta_data.get(, []) return requests_params, update_uri, session, read_only
Retrieve the appropriate request items for put or patch calls.
16,208
def cleanup(self): if self._root_pipeline_key is None: raise UnexpectedPipelineError( ) if not self.is_root: return task = taskqueue.Task( params=dict(root_pipeline_key=self._root_pipeline_key), url=self.base_path + , headers={: self._root_pipeline_key}) taskqueue.Queue(self.queue_name).add(task)
Clean up this Pipeline and all Datastore records used for coordination. Only works when called on a root pipeline. Child pipelines will ignore calls to this method. After this method is called, Pipeline.from_id() and related status methods will return inconsistent or missing results. This method is fire-and-forget and asynchronous.
16,209
def demultiplex_cells(fastq, out_dir, readnumber, prefix, cb_histogram, cb_cutoff): annotations = detect_fastq_annotations(fastq) re_string = construct_transformed_regex(annotations) parser_re = re.compile(re_string) readstring = "" if not readnumber else "_R{}".format(readnumber) filestring = "{prefix}{sample}{readstring}.fq" cb_set = set() if cb_histogram: cb_set = get_cb_depth_set(cb_histogram, cb_cutoff) sample_set = set() batch = collections.defaultdict(list) parsed = 0 safe_makedir(out_dir) for read in read_fastq(fastq): parsed += 1 match = parser_re.search(read).groupdict() sample = match[] if cb_set and sample not in cb_set: continue sample_set.add(sample) batch[sample].append(read) if not parsed % 10000000: for sample, reads in batch.items(): out_file = os.path.join(out_dir, filestring.format(**locals())) with open(out_file, "a") as out_handle: for read in reads: out_handle.write(read) batch = collections.defaultdict(list) for sample, reads in batch.items(): out_file = os.path.join(out_dir, filestring.format(**locals())) with open(out_file, "a") as out_handle: for read in reads: out_handle.write(read)
Demultiplex a fastqtransformed FASTQ file into a FASTQ file for each cell.
16,210
def read_crl(crl): * text = _text_or_file(crl) text = get_pem_entry(text, pem_type=) crltempfile = tempfile.NamedTemporaryFile() crltempfile.write(salt.utils.stringutils.to_str(text)) crltempfile.flush() crlparsed = _parse_openssl_crl(crltempfile.name) crltempfile.close() return crlparsed
Returns a dict containing details of a certificate revocation list. Input can be a PEM string or file path. :depends: - OpenSSL command line tool csl: A path or PEM encoded string containing the CSL to read. CLI Example: .. code-block:: bash salt '*' x509.read_crl /etc/pki/mycrl.crl
16,211
def reader(path_or_f): if hasattr(path_or_f, "read"): return path_or_f else: path = path_or_f path = normalize_path(path) _, extension = extract_extension(path) reader_func = FILE_READERS[extension] return reader_func(path)
Turns a path to a compressed file into a file-like object of (decompressed) data. :Parameters: path : `str` the path to the dump file to read
16,212
async def ttl(self, key, param=None): identity = self._gen_identity(key, param) return await self.client.ttl(identity)
get time to live of a specific identity
16,213
def save_token(self, access_token): key = self._generate_cache_key(access_token.token) self.mc.set(key, access_token.__dict__) unique_token_key = self._unique_token_key(access_token.client_id, access_token.grant_type, access_token.user_id) self.mc.set(self._generate_cache_key(unique_token_key), access_token.__dict__) if access_token.refresh_token is not None: rft_key = self._generate_cache_key(access_token.refresh_token) self.mc.set(rft_key, access_token.__dict__)
Stores the access token and additional data in memcache. See :class:`oauth2.store.AccessTokenStore`.
16,214
def stringify(self, value): if ISuperModel.providedBy(value): return str(value) elif isinstance(value, (DateTime)): return value.ISO8601() elif safe_hasattr(value, "filename"): return value.filename elif isinstance(value, dict): return {k: self.stringify(v) for k, v in value.iteritems()} if isinstance(value, (list, tuple, LazyMap)): return map(self.stringify, value) elif safe_callable(value): return self.stringify(value()) elif isinstance(value, unicode): value = value.encode("utf8") try: return str(value) except (AttributeError, TypeError, ValueError): logger.warn("Could not convert {} to string".format(repr(value))) return None
Convert value to string This method is used to generate a simple JSON representation of the object (without dereferencing objects etc.)
16,215
def show(cmap, var, vmin=None, vmax=None): lat, lon, z, data = read(var) fig = plt.figure(figsize=(16, 12)) ax = fig.add_subplot(3, 1, 1) map1 = ax.scatter(lon, -z, c=data, cmap=, s=10, linewidths=0., vmin=vmin, vmax=vmax) plt.colorbar(map1, ax=ax) ax = fig.add_subplot(3, 1, 2) map1 = ax.scatter(lon, -z, c=data, cmap=, s=10, linewidths=0., vmin=vmin, vmax=vmax) plt.colorbar(map1, ax=ax) ax = fig.add_subplot(3, 1, 3) map1 = ax.scatter(lon, -z, c=data, cmap=cmap, s=10, linewidths=0., vmin=vmin, vmax=vmax) ax.set_xlabel() ax.set_ylabel() plt.colorbar(map1, ax=ax) plt.suptitle(var)
Show a colormap for a chosen input variable var side by side with black and white and jet colormaps. :param cmap: Colormap instance :param var: Variable to plot. :param vmin=None: Min plot value. :param vmax=None: Max plot value.
16,216
def distance(self, i, j): a, b = i, j if a.name() > b.name(): a, b = b, a return self._matrix[self._nodes[a.name()]][self._nodes[b.name()]]
Returns the distance between node i and node j Parameters ---------- i : type Descr j : type Desc Returns ------- float Distance between node i and node j.
16,217
def read(self, *, level=0, alignment=1) -> bytes: return self.mglo.read(level, alignment)
Read the content of the texture into a buffer. Keyword Args: level (int): The mipmap level. alignment (int): The byte alignment of the pixels. Returns: bytes
16,218
def wait_for_notification(self, notification_class=BaseNotification): if notification_class: if notification_class is BaseNotification: message = "No notification was shown." else: message = "{0} was not shown.".format(notification_class.__name__) self.wait.until( lambda _: isinstance(self.notification, notification_class), message=message, ) return self.notification else: self.wait.until( lambda _: self.notification is None, message="Unexpected notification shown.", )
Wait for the specified notification to be displayed. Args: notification_class (:py:class:`BaseNotification`, optional): The notification class to wait for. If `None` is specified it will wait for any notification to be closed. Defaults to `BaseNotification`. Returns: :py:class:`BaseNotification`: Firefox notification.
16,219
def namedb_accounts_vest(cur, block_height): sql = args = (block_height,) vesting_rows = namedb_query_execute(cur, sql, args) rows = [] for row in vesting_rows: tmp = {} tmp.update(row) rows.append(tmp) for row in rows: addr = row[] token_type = row[] token_amount = row[] log.debug("Vest {} with {} {}".format(addr, token_amount, token_type)) fake_txid = namedb_vesting_txid(addr, token_type, token_amount, block_height) res = namedb_account_credit(cur, addr, token_type, token_amount, block_height, 0, fake_txid) if not res: traceback.print_stack() log.fatal(.format(token_amount, token_type, addr)) os.abort() return True
Vest tokens at this block to all recipients. Goes through the vesting table and debits each account that should vest on this block.
16,220
def _CheckWindowsRegistryKeyPath( self, filename, artifact_definition, key_path): result = True key_path_segments = key_path.lower().split() if key_path_segments[0] == : result = False logging.warning(( ).format( artifact_definition.name, filename)) for segment_index, key_path_segment in enumerate(key_path_segments): if key_path_segment.startswith() and key_path_segment.endswith(): if (segment_index == 1 and key_path_segment == and key_path_segments[0] == ): continue if key_path_segment.startswith(): result = False logging.warning(( ).format( artifact_definition.name, filename, key_path_segment)) elif key_path_segment.startswith(): result = False logging.warning(( ).format( artifact_definition.name, filename, key_path_segment)) return result
Checks if a path is a valid Windows Registry key path. Args: filename (str): name of the artifacts definition file. artifact_definition (ArtifactDefinition): artifact definition. key_path (str): Windows Registry key path to validate. Returns: bool: True if the Windows Registry key path is valid.
16,221
def install(name=None, refresh=False, pkgs=None, sources=None, **kwargs): **["foo", "bar"]*[{"foo": "salt://foo.deb"},{"bar": "salt://bar.deb"}]<package>old<old-version>new<new-version> refreshdb = salt.utils.data.is_true(refresh) pkg_to_install = [] old = list_pkgs() if name and not (pkgs or sources): if in name: pkg_to_install = name.split() else: pkg_to_install = [name] if pkgs: ) return ret
Install the passed package, add refresh=True to update the apk database. name The name of the package to be installed. Note that this parameter is ignored if either "pkgs" or "sources" is passed. Additionally, please note that this option can only be used to install packages from a software repository. To install a package file manually, use the "sources" option. CLI Example: .. code-block:: bash salt '*' pkg.install <package name> refresh Whether or not to refresh the package database before installing. Multiple Package Installation Options: pkgs A list of packages to install from a software repository. Must be passed as a python list. CLI Example: .. code-block:: bash salt '*' pkg.install pkgs='["foo", "bar"]' sources A list of IPK packages to install. Must be passed as a list of dicts, with the keys being package names, and the values being the source URI or local path to the package. Dependencies are automatically resolved and marked as auto-installed. CLI Example: .. code-block:: bash salt '*' pkg.install sources='[{"foo": "salt://foo.deb"},{"bar": "salt://bar.deb"}]' install_recommends Whether to install the packages marked as recommended. Default is True. Returns a dict containing the new package names and versions:: {'<package>': {'old': '<old-version>', 'new': '<new-version>'}}
16,222
def check_bed_coords(in_file, data): if dd.get_ref_file(data): contig_sizes = {} for contig in ref.file_contigs(dd.get_ref_file(data)): contig_sizes[contig.name] = contig.size with utils.open_gzipsafe(in_file) as in_handle: for line in in_handle: if not line.startswith((" parts = line.split() if len(parts) > 3: try: end = int(parts[2]) except ValueError: continue contig = parts[0] check_size = contig_sizes.get(contig) if check_size and end > check_size: raise ValueError("Found BED coordinate off the end of the chromosome:\n%s%s\n" "Is the input BED from the right genome build?" % (line, in_file))
Ensure BED file coordinates match reference genome. Catches errors like using a hg38 BED file for an hg19 genome run.
16,223
def run_init(args): root = args.root if root is None: root = root = os.path.abspath(root) project_data = _get_package_data() project_name = project_data[] directories = [os.path.join(root, ), os.path.join(root, ), os.path.join(root, ), os.path.join(root, project_name), ] for dir_path in directories: if not os.path.exists(dir_path): os.makedirs(dir_path) script_paths = [os.path.join(root, ), os.path.join(root, ), ] for script_path in script_paths: if not os.path.exists(script_path): os.mknod(script_path) copy_samples = [(resource_filename(, ), os.path.join(root, )), (resource_filename(, ), os.path.join(root, )), (resource_filename(, ), os.path.join(root, )), (resource_filename(, ), os.path.join(root, project_name + )), (resource_filename(, ), os.path.join(root, )), (resource_filename(, ), os.path.join(root, project_name + )), ] translate = {: project_data[], : project_data[], : project_data[], : project_data[], } for source, destination in copy_samples: if not os.path.exists(destination): copyfile(source, destination) _adjust_template(destination, translate)
Run project initialization. This will ask the user for input. Parameters ---------- args : argparse named arguments
16,224
def _process(self, envelope, session, mode, **kwargs): if mode == WMessengerOnionPackerLayerProto.Mode.pack: return self.pack(envelope, session, **kwargs) else: return self.unpack(envelope, session, **kwargs)
:meth:`.WMessengerOnionLayerProto.process` implementation
16,225
def grant_permission_to_users(self, permission, **kwargs): kwargs[] = True if kwargs.get(): return self.grant_permission_to_users_with_http_info(permission, **kwargs) else: (data) = self.grant_permission_to_users_with_http_info(permission, **kwargs) return data
Grants a specific user permission to multiple users # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.grant_permission_to_users(permission, async_req=True) >>> result = thread.get() :param async_req bool :param str permission: Permission to grant to the users. Please note that 'host_tag_management' is the equivalent of the 'Source Tag Management' permission (required) :param list[str] body: list of users which should be revoked by specified permission :return: UserModel If the method is called asynchronously, returns the request thread.
16,226
def read_file(self, file: Union[IO, asyncio.StreamWriter]=None): if file: file_is_async = hasattr(file, ) while True: data = yield from self._connection.read(4096) if not data: break if file: file.write(data) if file_is_async: yield from file.drain() self._data_event_dispatcher.notify_read(data)
Read from connection to file. Args: file: A file object or a writer stream.
16,227
def upgrade(yes, dry_run, patches): patcher = _get_mongopatcher() if dry_run: patcher.discover_and_apply(directory=patches, dry_run=dry_run) else: if (yes or prompt_bool("Are you sure you want to alter %s" % green(patcher.db))): patcher.discover_and_apply(patches) else: raise SystemExit()
Upgrade the datamodel by applying recusively the patches available
16,228
def contains_point(self, pt): return (self.l < pt.x and self.r > pt.x and self.t < pt.y and self.b > pt.y)
Is the point inside this rect?
16,229
def urlForViewState(self, person, viewState): organizerURL = self._webTranslator.linkTo(self.storeID) return url.URL( netloc=, scheme=, pathsegs=organizerURL.split()[1:], querysegs=((, person.name), (, viewState)))
Return a url for L{OrganizerFragment} which will display C{person} in state C{viewState}. @type person: L{Person} @type viewState: L{ORGANIZER_VIEW_STATES} constant. @rtype: L{url.URL}
16,230
def iter_statuses(self, number=-1, etag=None): i = self._iter(int(number), self.statuses_url, DeploymentStatus, etag=etag) i.headers = Deployment.CUSTOM_HEADERS return i
Iterate over the deployment statuses for this deployment. :param int number: (optional), the number of statuses to return. Default: -1, returns all statuses. :param str etag: (optional), the ETag header value from the last time you iterated over the statuses. :returns: generator of :class:`DeploymentStatus`\ es
16,231
def reindex(args): p = OptionParser(reindex.__doc__) p.add_option("--nogaps", default=False, action="store_true", help="Remove all gap lines [default: %default]") p.add_option("--inplace", default=False, action="store_true", help="Replace input file [default: %default]") opts, args = p.parse_args(args) if len(args) != 1: sys.exit(p.print_help()) agpfile, = args inplace = opts.inplace agp = AGP(agpfile, validate=False) pf = agpfile.rsplit(".", 1)[0] newagpfile = pf + ".reindexed.agp" fw = open(newagpfile, "w") agp.transfer_header(fw) for chr, chr_agp in groupby(agp, lambda x: x.object): chr_agp = list(chr_agp) object_beg = 1 for i, b in enumerate(chr_agp): b.object_beg = object_beg b.part_number = i + 1 if opts.nogaps and b.is_gap: continue if b.is_gap: b.object_end = object_beg + b.gap_length - 1 else: b.object_end = object_beg + b.component_span - 1 object_beg = b.object_end + 1 print(str(b), file=fw) fw.close() agp = AGP(newagpfile, validate=True) if inplace: shutil.move(newagpfile, agpfile) logging.debug("Rename file `{0}` to `{1}`".format(newagpfile, agpfile)) newagpfile = agpfile return newagpfile
%prog agpfile assume the component line order is correct, modify coordinates, this is necessary mostly due to manual edits (insert/delete) that disrupts the target coordinates.
16,232
def build_from_job_list(scheme_files, templates, base_output_dir): queue = Queue() for scheme in scheme_files: queue.put(scheme) if len(scheme_files) < 40: thread_num = len(scheme_files) else: thread_num = 40 threads = [] for _ in range(thread_num): thread = Thread(target=build_single_worker, args=(queue, templates, base_output_dir)) thread.start() threads.append(thread) queue.join() for _ in range(thread_num): queue.put(None) for thread in threads: thread.join()
Use $scheme_files as a job lists and build base16 templates using $templates (a list of TemplateGroup objects).
16,233
def bin_pkg_info(path, saltenv=): ** if __salt__[](path): newpath = __salt__[](path, saltenv) if not newpath: raise CommandExecutionError( {1}\ .format(path, saltenv) ) path = newpath else: if not os.path.exists(path): raise CommandExecutionError( .format(path) ) elif not os.path.isabs(path): raise SaltInvocationError( .format(path) ) cmd = [, , path] result = __salt__[](cmd, output_loglevel=) if result[] != 0: msg = + path if result[]: msg += + result[] raise CommandExecutionError(msg) ret = {} for line in result[].splitlines(): line = line.strip() if line.startswith(): ret[] = line.split()[-1] elif line.startswith(): ret[] = line.split()[-1] elif line.startswith(): ret[] = line.split()[-1] missing = [x for x in (, , ) if x not in ret] if missing: raise CommandExecutionError( .format(.join(missing), path) ) if __grains__.get(, ) == : osarch = __grains__.get(, ) arch = ret[] if arch != and osarch == and osarch != arch: ret[] += .format(arch) return ret
.. versionadded:: 2015.8.0 Parses RPM metadata and returns a dictionary of information about the package (name, version, etc.). path Path to the file. Can either be an absolute path to a file on the minion, or a salt fileserver URL (e.g. ``salt://path/to/file.rpm``). If a salt fileserver URL is passed, the file will be cached to the minion so that it can be examined. saltenv : base Salt fileserver envrionment from which to retrieve the package. Ignored if ``path`` is a local file path on the minion. CLI Example: .. code-block:: bash salt '*' lowpkg.bin_pkg_info /root/foo-1.2.3-1ubuntu1_all.deb salt '*' lowpkg.bin_pkg_info salt://foo-1.2.3-1ubuntu1_all.deb
16,234
def activate_view(self, request, user_id, token): try: user = self.user_model.objects.get(id=user_id, is_active=False) except self.user_model.DoesNotExist: raise Http404(_("Your URL may have expired.")) if not RegistrationTokenGenerator().check_token(user, token): raise Http404(_("Your URL may have expired.")) form = self.get_form( data=request.POST or None, files=request.FILES or None, instance=user ) if form.is_valid(): form.instance.is_active = True user = form.save() user.set_password(form.cleaned_data["password"]) user.save() self.activate_organizations(user) user = authenticate( username=form.cleaned_data["username"], password=form.cleaned_data["password"], ) login(request, user) return redirect(self.get_success_url()) return render(request, self.registration_form_template, {"form": form})
View function that activates the given User by setting `is_active` to true if the provided information is verified.
16,235
def update(self, enabled): data = values.of({: enabled, }) payload = self._version.update( , self._uri, data=data, ) return InstalledAddOnExtensionInstance( self._version, payload, installed_add_on_sid=self._solution[], sid=self._solution[], )
Update the InstalledAddOnExtensionInstance :param bool enabled: A Boolean indicating if the Extension will be invoked :returns: Updated InstalledAddOnExtensionInstance :rtype: twilio.rest.preview.marketplace.installed_add_on.installed_add_on_extension.InstalledAddOnExtensionInstance
16,236
def call_rpc(self, address, rpc_id, payload=b""): if rpc_id < 0 or rpc_id > 0xFFFF: raise RPCInvalidIDError("Invalid RPC ID: {}".format(rpc_id)) if address not in self._rpc_overlays and address not in self._tiles: raise TileNotFoundError("Unknown tile address, no registered handler", address=address) overlay = self._rpc_overlays.get(address, None) tile = self._tiles.get(address, None) if overlay is not None and overlay.has_rpc(rpc_id): return overlay.call_rpc(rpc_id, payload) elif tile is not None and tile.has_rpc(rpc_id): return tile.call_rpc(rpc_id, payload) raise RPCNotFoundError("Could not find RPC 0x%X at address %d" % (rpc_id, address))
Call an RPC by its address and ID. Args: address (int): The address of the mock tile this RPC is for rpc_id (int): The number of the RPC payload (bytes): A byte string of payload parameters up to 20 bytes Returns: bytes: The response payload from the RPC
16,237
def save(self, saveModelDir): logger = self._getLogger() logger.debug("(%s) Creating local checkpoint in %r...", self, saveModelDir) modelPickleFilePath = self._getModelPickleFilePath(saveModelDir) if os.path.exists(saveModelDir): if not os.path.isdir(saveModelDir): raise Exception(("Existing filesystem entry <%s> is not a model" " checkpoint -- refusing to delete (not a directory)") \ % saveModelDir) if not os.path.isfile(modelPickleFilePath): raise Exception(("Existing filesystem entry <%s> is not a model" " checkpoint -- refusing to delete"\ " (%s missing or not a file)") % \ (saveModelDir, modelPickleFilePath)) shutil.rmtree(saveModelDir) self.__makeDirectoryFromAbsolutePath(saveModelDir) with open(modelPickleFilePath, ) as modelPickleFile: logger.debug("(%s) Pickling Model instance...", self) pickle.dump(self, modelPickleFile, protocol=pickle.HIGHEST_PROTOCOL) logger.debug("(%s) Finished pickling Model instance", self) self._serializeExtraData(extraDataDir=self._getModelExtraDataDir(saveModelDir)) logger.debug("(%s) Finished creating local checkpoint", self) return
Save the model in the given directory. :param saveModelDir: (string) Absolute directory path for saving the model. This directory should only be used to store a saved model. If the directory does not exist, it will be created automatically and populated with model data. A pre-existing directory will only be accepted if it contains previously saved model data. If such a directory is given, the full contents of the directory will be deleted and replaced with current model data.
16,238
def driver_name(self): (self._driver_name, value) = self.get_cached_attr_string(self._driver_name, ) return value
Returns the name of the driver that provides this tacho motor device.
16,239
def is_driver(self): if not hasattr(self, ): self.parse_data_directories(directories=[ DIRECTORY_ENTRY[]]) return True return False
Check whether the file is a Windows driver. This will return true only if there are reliable indicators of the image being a driver.
16,240
def _get_simple_score(self, profile: List[str], negated_classes: List[str], bg_mean_pic: float, bg_mean_max_pic: float, bg_mean_sum_pic: float, negation_weight: Optional[float] = .25, ic_map: Optional[Dict[str, float]] = None) -> float: if ic_map is None: ic_map = self.ic_store.get_profile_ic(profile + negated_classes) pos_map = {cls: ic for cls, ic in ic_map.items() if cls in profile} neg_map = {cls: ic for cls, ic in ic_map.items() if cls in negated_classes} mean_ic = mean(pos_map.values()) if len(profile) > 0 else 0 max_ic = max(pos_map.values()) if len(profile) > 0 else 0 sum_ic = sum(pos_map.values()) if len(profile) > 0 else 0 if len(negated_classes) > 0: weighted_ic = [ic * negation_weight for ic in neg_map.values()] mean_ic = max([np.average([mean_ic, mean(neg_map.values())], weights=[1, negation_weight]), mean_ic]) max_ic = max([max_ic] + weighted_ic) sum_ic = sum_ic + sum(weighted_ic) return mean([ min([mean_ic / bg_mean_pic, 1.0]), min([max_ic / bg_mean_max_pic, 1.0]), min([sum_ic / bg_mean_sum_pic, 1.0]) ])
Simple score is the average of the relative mean ic, max ic, and sum ic (relative to global stats) :param ic_map: dictionary of class - information content mappings :param bg_mean_pic: the average of the average IC in the background profile annotations :param bg_mean_max_pic: max IC annotated to the background set of profiles :param bg_mean_sum_pic: Average of the profile sum IC in background set :param negation_weight: Average of the profile sum IC in background set :param ic_map: Average of the profile sum IC in background set :return: simple score (float)
16,241
def _kalman_prediction_step(k, p_m , p_P, p_dyn_model_callable, calc_grad_log_likelihood=False, p_dm = None, p_dP = None): A = p_dyn_model_callable.Ak(k,p_m,p_P) Q = p_dyn_model_callable.Qk(k) m_pred = p_dyn_model_callable.f_a(k, p_m, A) P_pred = A.dot(p_P).dot(A.T) + Q if calc_grad_log_likelihood: dA_all_params = p_dyn_model_callable.dAk(k) dQ_all_params = p_dyn_model_callable.dQk(k) param_number = p_dP.shape[2] dm_pred = np.empty(p_dm.shape) dP_pred = np.empty(p_dP.shape) for j in range(param_number): dA = dA_all_params[:,:,j] dQ = dQ_all_params[:,:,j] dP = p_dP[:,:,j] dm = p_dm[:,:,j] dm_pred[:,:,j] = np.dot(dA, p_m) + np.dot(A, dm) dP_pred[:,:,j] = np.dot( dA ,np.dot(p_P, A.T)) dP_pred[:,:,j] += dP_pred[:,:,j].T dP_pred[:,:,j] += np.dot( A ,np.dot(dP, A.T)) + dQ dP_pred[:,:,j] = 0.5*(dP_pred[:,:,j] + dP_pred[:,:,j].T) else: dm_pred = None dP_pred = None return m_pred, P_pred, dm_pred, dP_pred
Desctrete prediction function Input: k:int Iteration No. Starts at 0. Total number of iterations equal to the number of measurements. p_m: matrix of size (state_dim, time_series_no) Mean value from the previous step. For "multiple time series mode" it is matrix, second dimension of which correspond to different time series. p_P: Covariance matrix from the previous step. p_dyn_model_callable: class calc_grad_log_likelihood: boolean Whether to calculate gradient of the marginal likelihood of the state-space model. If true then the next parameter must provide the extra parameters for gradient calculation. p_dm: 3D array (state_dim, time_series_no, parameters_no) Mean derivatives from the previous step. For "multiple time series mode" it is 3D array, second dimension of which correspond to different time series. p_dP: 3D array (state_dim, state_dim, parameters_no) Mean derivatives from the previous step Output: ---------------------------- m_pred, P_pred, dm_pred, dP_pred: metrices, 3D objects Results of the prediction steps.
16,242
def ipaddress(): try: import socket s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect(("gmail.com", 80)) result = s.getsockname()[0] s.close() return result except Exception: return ""
Determine our own IP adress. This seems to be far more complicated than you would think:
16,243
def convert_geojson_to_shapefile(geojson_path): layer = QgsVectorLayer(geojson_path, , ) if not layer.isValid(): return False shapefile_path = os.path.splitext(geojson_path)[0] + QgsVectorFileWriter.writeAsVectorFormat( layer, shapefile_path, , layer.crs(), ) if os.path.exists(shapefile_path): return True return False
Convert geojson file to shapefile. It will create a necessary file next to the geojson file. It will not affect another files (e.g. .xml, .qml, etc). :param geojson_path: The path to geojson file. :type geojson_path: basestring :returns: True if shapefile layer created, False otherwise. :rtype: bool
16,244
def threadsafe_generator(generator_func): def decoration(*args, **keyword_args): return ThreadSafeIter(generator_func(*args, **keyword_args)) return decoration
A decorator that takes a generator function and makes it thread-safe.
16,245
def is_set(name): val = os.environ.get(name, ) assert val == or val == , f"env var {name} has value {val}, expected 0 or 1" return val ==
Helper method to check if given property is set
16,246
def _set_ldp(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=ldp.ldp, is_container=, presence=False, yang_name="ldp", rest_name="ldp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u: {u: u, u: None}}, namespace=, defining_module=, yang_type=, is_config=False) except (TypeError, ValueError): raise ValueError({ : , : "container", : , }) self.__ldp = t if hasattr(self, ): self._set()
Setter method for ldp, mapped from YANG variable /mpls_state/ldp (container) If this variable is read-only (config: false) in the source YANG file, then _set_ldp is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_ldp() directly. YANG Description: LDP Operational Information
16,247
def register(): signals.article_generator_finalized.connect(link_source_files) signals.page_generator_finalized.connect(link_source_files) signals.page_writer_finalized.connect(write_source_files)
Calls the shots, based on signals
16,248
def write(self): if self.text is None: return None if self._last_text == self.text: char_delay = 0 else: char_delay = self.char_delay self._last_text = self.text with self.lock: ctl = Control().move_column(1).pos_save().erase_line() if char_delay == 0: ctl.text(str(self)).write(file=self.file) else: self.write_char_delay(ctl, char_delay) ctl.delay(self.delay) return None
Writes a single frame of the progress spinner to the terminal. This function updates the current frame before returning.
16,249
def post(method, hmc, uri, uri_parms, body, logon_required, wait_for_completion): assert wait_for_completion is True cpc_oid = uri_parms[0] try: cpc = hmc.cpcs.lookup_by_oid(cpc_oid) except KeyError: raise InvalidResourceError(method, uri) check_required_fields(method, uri, body, []) power_saving = body[] if power_saving not in [, , ]: raise BadRequestError(method, uri, reason=7, message="Invalid power-saving value: %r" % power_saving) cpc.properties[] = power_saving cpc.properties[] = power_saving cpc.properties[] = power_saving cpc.properties[] = power_saving
Operation: Set CPC Power Save (any CPC mode).
16,250
def connect(self, (host, port)): super(GeventTransport, self).connect((host, port), klass=socket.socket)
Connect using a host,port tuple
16,251
def _validate_config(self): if len(cfg.CONF.ml2_arista.get()) < 1: msg = _( ) LOG.exception(msg) raise arista_exc.AristaConfigError(msg=msg)
Ensure at least one switch is configured
16,252
def __get_enabled_heuristics(self, url): if url in self.__sites_heuristics: return self.__sites_heuristics[url] site = self.__sites_object[url] heuristics = dict(self.cfg_heuristics["enabled_heuristics"]) if "overwrite_heuristics" in site: for heuristic, value in site["overwrite_heuristics"].items(): if value is False and heuristic in heuristics: del heuristics[heuristic] else: heuristics[heuristic] = value self.__sites_heuristics[site["url"]] = heuristics self.log.debug( "Enabled heuristics for %s: %s", site["url"], heuristics ) return heuristics
Get the enabled heuristics for a site, merging the default and the overwrite together. The config will only be read once and the merged site-config will be cached. :param str url: The url to get the heuristics for.
16,253
def process_event(self, module_name, event, default_event=False): module_info = self.output_modules.get(module_name) if module_info["type"] == "py3status": module = module_info["module"] module.click_event(event) if self.config["debug"]: self.py3_wrapper.log("dispatching event {}".format(event)) if not module.prevent_refresh: self.py3_wrapper.refresh_modules(module_name) default_event = False if default_event: if self.config["debug"]: self.py3_wrapper.log("dispatching default event {}".format(event)) self.py3_wrapper.refresh_modules(module_name) module_groups = self.py3_config[".module_groups"] containers = module_groups.get(module_name, []) for container in containers: self.process_event(container, event)
Process the event for the named module. Events may have been declared in i3status.conf, modules may have on_click() functions. There is a default middle click event etc.
16,254
def edit_pool(self, id): p = Pool.get(int(id)) if in request.json: p.name = validate_string(request.json, ) if in request.json: p.description = validate_string(request.json, ) if in request.json: p.default_type = validate_string(request.json, ) if in request.json: p.ipv4_default_prefix_length = request.json[] if in request.json: p.ipv6_default_prefix_length = request.json[] if in request.json: p.tags = request.json[] if in request.json: p.avps = request.json[] try: p.save() except NipapError, e: return json.dumps({: 1, : e.args, : type(e).__name__}) return json.dumps(p, cls=NipapJSONEncoder)
Edit a pool.
16,255
def validated_element(x, tags=None, attrs=None): ele = to_ele(x) if tags: if isinstance(tags, (str, bytes)): tags = [tags] if ele.tag not in tags: raise XMLError("Element [%s] does not meet requirement" % ele.tag) if attrs: for req in attrs: if isinstance(req, (str, bytes)): req = [req] for alt in req: if alt in ele.attrib: break else: raise XMLError("Element [%s] does not have required attributes" % ele.tag) return ele
Checks if the root element of an XML document or Element meets the supplied criteria. *tags* if specified is either a single allowable tag name or sequence of allowable alternatives *attrs* if specified is a sequence of required attributes, each of which may be a sequence of several allowable alternatives Raises :exc:`XMLError` if the requirements are not met.
16,256
def _back_compatible_gemini(conf_files, data): if vcfanno.is_human(data, builds=["37"]): for f in conf_files: if f and os.path.basename(f) == "gemini.conf" and os.path.exists(f): with open(f) as in_handle: for line in in_handle: if line.startswith("file"): fname = line.strip().split("=")[-1].replace(, ).strip() if fname.find(".tidy.") > 0: return install.get_gemini_dir(data) return None
Provide old install directory for configuration with GEMINI supplied tidy VCFs. Handles new style (bcbio installed) and old style (GEMINI installed) configuration and data locations.
16,257
def _connect_db(self): db_args = {} db_args[] = self._cfg.get(, ) db_args[] = self._cfg.get(, ) db_args[] = self._cfg.get(, ) db_args[] = self._cfg.get(, ) db_args[] = self._cfg.get(, ) db_args[] = self._cfg.get(, ) if db_args[] is not None and db_args[] == : db_args[] = None for key in db_args.copy(): if db_args[key] is None: del(db_args[key]) while True: try: self._con_pg = psycopg2.connect(**db_args) self._con_pg.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) self._curs_pg = self._con_pg.cursor(cursor_factory=psycopg2.extras.DictCursor) self._register_inet() psycopg2.extras.register_hstore(self._con_pg, globally=True, unicode=True) except psycopg2.Error as exc: if re.search("database.*does not exist", unicode(exc)): raise NipapDatabaseNonExistentError("Database does not exist" % db_args[]) if self._auto_install_db: self._db_install(db_args[]) continue raise exc except NipapError as exc: self._logger.error(unicode(exc)) raise exc if current_db_version != nipap.__db_version__: if self._auto_upgrade_db: self._db_upgrade(db_args[]) continue raise NipapDatabaseWrongVersionError("NIPAP PostgreSQL database is outdated. Schema version %s is required to run but you are using %s" % (nipap.__db_version__, current_db_version)) break
Open database connection
16,258
def set_of(*generators): class SetOfGenerators(ArbitraryInterface): @classmethod def arbitrary(cls): arbitrary_set = set() for generator in generators: arbitrary_set |= { arbitrary(generator) for _ in range(arbitrary(int) % 100) } return arbitrary_set SetOfGenerators.__name__ = .join([ , .join(generator.__name__ for generator in generators), ]) return SetOfGenerators
Generates a set consisting solely of the specified generators. This is a class factory, it makes a class which is a closure around the specified generators.
16,259
def get_wyu_news(self, page): if page <= 0: return [] res = WyuNews.__wyu_news(page) soup = BeautifulSoup(res, from_encoding=) tag_a = soup.find_all(self.__get_tag_a) tag_td = soup.find_all(self.__get_tag_td) result = [] for index, item in enumerate(tag_a): result.append({ : .join((, item.attrs[])) , : item.string , : self.__get_news_type(tag_td[index]) , : self.__get_news_from(tag_td[index]) , : self.__get_news_posttime(tag_td[index]) }) return result
获取新闻列表 :param page: 页码 :return: json
16,260
def make_screenshot(self, screenshot_name=None): if not self.screenshot_path: raise Exception() if not screenshot_name: screenshot_name = str(time.time()) self.get_screenshot_as_file(.format(self.screenshot_path, screenshot_name))
Shortcut for ``get_screenshot_as_file`` but with configured path. If you are using base :py:class:`~webdriverwrapper.unittest.testcase.WebdriverTestCase`. or pytest, ``screenshot_path`` is passed to driver automatically. If ``screenshot_name`` is not passed, current timestamp is used. .. versionadded:: 2.2
16,261
def gen_pypirc(username=None, password=None): path = join(conf.getenv(), ) username = username or conf.getenv(, None) password = password or conf.getenv(, None) if username is None or password is None: log.err("You must provide $PYPI_USER and $PYPI_PASS") sys.exit(1) log.info("Generating <94>{}".format(path)) fs.write_file(path, util.remove_indent(.format( username=username, password=password )))
Generate ~/.pypirc with the given credentials. Useful for CI builds. Can also get credentials through env variables ``PYPI_USER`` and ``PYPI_PASS``. Args: username (str): pypi username. If not given it will try to take it from the `` PYPI_USER`` env variable. password (str): pypi password. If not given it will try to take it from the `` PYPI_PASS`` env variable.
16,262
def parse_csv_file(csv_filepath, expect_negative_correlation = False, STDev_cutoff = 1.0, headers_start_with = , comments_start_with = None, separator = ): assert (os.path.exists(csv_filepath)) return parse_csv(get_file_lines(csv_filepath), expect_negative_correlation = expect_negative_correlation, STDev_cutoff = STDev_cutoff, headers_start_with = headers_start_with, comments_start_with = comments_start_with, separator = separator)
Analyzes a CSV file. Expects a CSV file with a header line starting with headers_start_with e.g. "ID,experimental value, prediction 1 value, prediction 2 value," Record IDs are expected in the first column. Experimental values are expected in the second column. Predicted values are expected in the subsequent columns. :param csv_filepath: The path to a CSV file containing experimental and predicted data for some dataset. :param expect_negative_correlation: See parse_csv. :param STDev_cutoff: See parse_csv. :param headers_start_with: See parse_csv. :param comments_start_with: See parse_csv. :param separator: See parse_csv.
16,263
def map_arguments(self, arguments): if self.argument_mappings is None: return arguments return [f(a) for f, a in zip(self.argument_mappings, arguments)]
Returns the mapped function arguments. If no mapping functions are defined, the arguments are returned as they were supplied. :param arguments: List of arguments for bound function as strings. :return: Mapped arguments.
16,264
def register(coordinator): fetch_queue = Queue.Queue() coordinator.register(FetchItem, fetch_queue) for i in xrange(FLAGS.fetch_threads): coordinator.worker_threads.append( FetchThread(fetch_queue, coordinator.input_queue))
Registers this module as a worker with the given coordinator.
16,265
def add_errback(self, errback, *errback_args, **errback_kwargs): return self.add_callbacks(None, errback=errback, errback_args=errback_args, errback_kwargs=errback_kwargs)
Add a errback without an associated callback.
16,266
def getFragment(self): fragment = self.fragmentClass() self.openElements[0].reparentChildren(fragment) return fragment
Return the final fragment
16,267
def layout_item(layout, item_id, item_class): item = layout.itemById(item_id) if item is None: return item if issubclass(item_class, QgsLayoutMultiFrame): frame = sip.cast(item, QgsLayoutFrame) multi_frame = frame.multiFrame() return sip.cast(multi_frame, item_class) else: return sip.cast(item, item_class)
Fetch a specific item according to its type in a layout. There's some sip casting conversion issues with QgsLayout::itemById. Don't use it, and use this function instead. See https://github.com/inasafe/inasafe/issues/4271 :param layout: The layout to look in. :type layout: QgsLayout :param item_id: The ID of the item to look for. :type item_id: basestring :param item_class: The expected class name. :type item_class: cls :return: The layout item, inherited class of QgsLayoutItem.
16,268
def get_airport_stats(self, iata, page=1, limit=100): url = AIRPORT_DATA_BASE.format(iata, str(self.AUTH_TOKEN), page, limit) return self._fr24.get_airport_stats(url)
Retrieve the performance statistics at an airport Given the IATA code of an airport, this method returns the performance statistics for the airport. Args: iata (str): The IATA code for an airport, e.g. HYD page (int): Optional page number; for users who are on a plan with flightradar24 they can pass in higher page numbers to get more data limit (int): Optional limit on number of records returned Returns: A list of dicts with the data; one dict for each row of data from flightradar24 Example:: from pyflightdata import FlightData f=FlightData() #optional login f.login(myemail,mypassword) f.get_airport_stats('HYD') f.get_airport_stats('HYD',page=1,limit=10)
16,269
def standardize_strings(arg, strtype=settings.MODERNRPC_PY2_STR_TYPE, encoding=settings.MODERNRPC_PY2_STR_ENCODING): assert six.PY2, "This function should be used with Python 2 only" if not strtype: return arg if strtype == six.binary_type or strtype == : return _generic_convert_string(arg, six.text_type, six.binary_type, encoding) elif strtype == six.text_type or strtype == : return _generic_convert_string(arg, six.binary_type, six.text_type, encoding) raise TypeError( .format(repr(strtype)))
Python 2 only. Lookup given *arg* and convert its str or unicode value according to MODERNRPC_PY2_STR_TYPE and MODERNRPC_PY2_STR_ENCODING settings.
16,270
def enable( self, cmd="enable", pattern="password", re_flags=re.IGNORECASE, default_username="manager", ): if self.check_enable_mode(): return "" output = self.send_command_timing(cmd) if ( "username" in output.lower() or "login name" in output.lower() or "user name" in output.lower() ): output += self.send_command_timing(default_username) if "password" in output.lower(): output += self.send_command_timing(self.secret) log.debug("{}".format(output)) self.clear_buffer() return output
Enter enable mode
16,271
def tas53(msg): d = hex2bin(data(msg)) if d[33] == : return None tas = bin2int(d[34:46]) * 0.5 return round(tas, 1)
Aircraft true airspeed, BDS 5,3 message Args: msg (String): 28 bytes hexadecimal message Returns: float: true airspeed in knots
16,272
def add_listener(self, callback, mask=EVENT_ALL): self._scheduler.add_listener(callback, mask)
Add a listener for scheduler events. When a matching event occurs, ``callback`` is executed with the event object as its sole argument. If the ``mask`` parameter is not provided, the callback will receive events of all types. For further info: https://apscheduler.readthedocs.io/en/latest/userguide.html#scheduler-events :param callback: any callable that takes one argument :param int mask: bitmask that indicates which events should be listened to
16,273
def _next_method(self): queue = self.queue put = self._quick_put read_frame = self.source.read_frame while not queue: try: frame_type, channel, payload = read_frame() except Exception as exc: put(exc) break self.bytes_recv += 1 if frame_type not in (self.expected_types[channel], 8): put(( channel, UnexpectedFrame( .format( frame_type, self.expected_types[channel])))) elif frame_type == 1: self._process_method_frame(channel, payload) elif frame_type == 2: self._process_content_header(channel, payload) elif frame_type == 3: self._process_content_body(channel, payload) elif frame_type == 8: self._process_heartbeat(channel, payload)
Read the next method from the source, once one complete method has been assembled it is placed in the internal queue.
16,274
def get_cleaned_data_for_step(self, step): if step in self.form_list: form_obj = self.get_form(step=step, data=self.storage.get_step_data(step), files=self.storage.get_step_files(step)) if form_obj.is_valid(): return form_obj.cleaned_data return None
Returns the cleaned data for a given `step`. Before returning the cleaned data, the stored values are being revalidated through the form. If the data doesn't validate, None will be returned.
16,275
def _write(self, s, s_length=None, flush=False, ignore_overflow=False, err_msg=None): if not ignore_overflow: s_length = len(s) if s_length is None else s_length if err_msg is None: err_msg = ( "Terminal has {} columns; attempted to write " "a string {} of length {}.".format( self.columns, repr(s), s_length) ) ensure(s_length <= self.columns, WidthOverflowError, err_msg) self.cursor.write(s) if flush: self.cursor.flush()
Write ``s`` :type s: str|unicode :param s: String to write :param s_length: Custom length of ``s`` :param flush: Set this to flush the terminal stream after writing :param ignore_overflow: Set this to ignore if s will exceed the terminal's width :param err_msg: The error message given to WidthOverflowError if it is triggered
16,276
def record_diff(old, new): return .join(difflib.ndiff( [ % (k, v) for op in old for k, v in op.items()], [ % (k, v) for op in new for k, v in op.items()], ))
Generate a human-readable diff of two performance records.
16,277
def _decode(cls, value): value = cls._DEC_RE.sub(lambda x: % int(x.group(1), 16), value) return json.loads(value)
Decode the given value, reverting '%'-encoded groups.
16,278
def connect(self, host=None, port=None): host = self.host if host is None else host port = self.port if port is None else port self.socket.connect(host, port)
Connects to given host address and port.
16,279
def graph_to_svg(graph): import tempfile import subprocess with tempfile.NamedTemporaryFile() as dot_file: nx.drawing.nx_agraph.write_dot(graph, dot_file.name) svg = subprocess.check_output([, dot_file.name, ]) return svg
Turn a networkx graph into an SVG string, using graphviz dot. Parameters ---------- graph: networkx graph Returns --------- svg: string, pictoral layout in SVG format
16,280
def validate_types(schemas_and_tables): all_types = get_types() if not (all(sn in all_types for sn, tn in schemas_and_tables)): bad_types = [sn for sn, tn in schemas_and_tables if sn not in all_types] msg = .format(bad_types) raise UnknownAnnotationTypeException(msg)
normalize a list of desired annotation types if passed None returns all types, otherwise checks that types exist Parameters ---------- types: list[str] or None Returns ------- list[str] list of types Raises ------ UnknownAnnotationTypeException If types contains an invalid type
16,281
def makeplantloop(idf, loopname, sloop, dloop, testing=None): testn = 0 newplantloop = idf.newidfobject("PLANTLOOP", Name=loopname) testn = doingtesting(testing, testn, newplantloop) if testn == None: returnnone() fields = SomeFields.p_fields flnames = [field.replace(, ) for field in fields] fields1 = [field.replace(, ) for field in fields] fields1 = [field.replace(, ) for field in fields1] fields1 = [field[:field.find() - 1] for field in fields1] fields1 = [field.replace(, ) for field in fields1] fields1 = [field.replace(, ) for field in fields1] fieldnames = [ % (loopname, field) for field in fields1] for fieldname, thefield in zip(fieldnames, flnames): newplantloop[thefield] = fieldname testn = doingtesting(testing, testn, newplantloop) if testn == None: returnnone() sbranchlist = idf.newidfobject( "BRANCHLIST", Name=newplantloop.Plant_Side_Branch_List_Name) testn = doingtesting(testing, testn, newplantloop) if testn == None: returnnone() dbranchlist = idf.newidfobject( "BRANCHLIST", Name=newplantloop.Demand_Side_Branch_List_Name) testn = doingtesting(testing, testn, newplantloop) if testn == None: returnnone() sbranchnames = flattencopy(sloop) for branchname in sbranchnames: sbranchlist.obj.append(branchname) testn = doingtesting(testing, testn, newplantloop) if testn == None: returnnone() dbranchnames = flattencopy(dloop) for branchname in dbranchnames: dbranchlist.obj.append(branchname) testn = doingtesting(testing, testn, newplantloop) if testn == None: returnnone() sbranchs = [] for bname in sbranchnames: branch = makepipebranch(idf, bname) sbranchs.append(branch) testn = doingtesting(testing, testn, newplantloop) if testn == None: returnnone() anode = "Component_1_Inlet_Node_Name" sameinnode = "Plant_Side_Inlet_Node_Name" sbranchs[0][anode] = newplantloop[sameinnode] anode = "Component_1_Outlet_Node_Name" sameoutnode = "Plant_Side_Outlet_Node_Name" sbranchs[-1][anode] = newplantloop[sameoutnode] testn = doingtesting(testing, testn, newplantloop) if testn == None: returnnone() pname = sbranchs[0][] apipe = idf.getobject(.upper(), pname) apipe.Inlet_Node_Name = newplantloop[sameinnode] pname = sbranchs[-1][] apipe = idf.getobject(.upper(), pname) apipe.Outlet_Node_Name = newplantloop[sameoutnode] testn = doingtesting(testing, testn, newplantloop) if testn == None: returnnone() dbranchs = [] for bname in dbranchnames: branch = makepipebranch(idf, bname) dbranchs.append(branch) testn = doingtesting(testing, testn, newplantloop) if testn == None: returnnone() anode = "Component_1_Inlet_Node_Name" sameinnode = "Demand_Side_Inlet_Node_Name" dbranchs[0][anode] = newplantloop[sameinnode] anode = "Component_1_Outlet_Node_Name" sameoutnode = "Demand_Side_Outlet_Node_Name" dbranchs[-1][anode] = newplantloop[sameoutnode] testn = doingtesting(testing, testn, newplantloop) if testn == None: returnnone() pname = dbranchs[0][] apipe = idf.getobject(.upper(), pname) apipe.Inlet_Node_Name = newplantloop[sameinnode] pname = dbranchs[-1][] apipe = idf.getobject(.upper(), pname) apipe.Outlet_Node_Name = newplantloop[sameoutnode] testn = doingtesting(testing, testn, newplantloop) if testn == None: returnnone() sconnlist = idf.newidfobject( "CONNECTORLIST", Name=newplantloop.Plant_Side_Connector_List_Name) sconnlist.Connector_1_Object_Type = "Connector:Splitter" sconnlist.Connector_1_Name = "%s_supply_splitter" % (loopname,) sconnlist.Connector_2_Object_Type = "Connector:Mixer" sconnlist.Connector_2_Name = "%s_supply_mixer" % (loopname,) dconnlist = idf.newidfobject( "CONNECTORLIST", Name=newplantloop.Demand_Side_Connector_List_Name) dconnlist.Connector_1_Object_Type = "Connector:Splitter" dconnlist.Connector_1_Name = "%s_demand_splitter" % (loopname,) dconnlist.Connector_2_Object_Type = "Connector:Mixer" dconnlist.Connector_2_Name = "%s_demand_mixer" % (loopname,) testn = doingtesting(testing, testn, newplantloop) if testn == None: returnnone() s_splitter = idf.newidfobject( "CONNECTOR:SPLITTER", Name=sconnlist.Connector_1_Name) s_splitter.obj.extend([sloop[0]] + sloop[1]) s_mixer = idf.newidfobject( "CONNECTOR:MIXER", Name=sconnlist.Connector_2_Name) s_mixer.obj.extend([sloop[-1]] + sloop[1]) d_splitter = idf.newidfobject( "CONNECTOR:SPLITTER", Name=dconnlist.Connector_1_Name) d_splitter.obj.extend([dloop[0]] + dloop[1]) d_mixer = idf.newidfobject( "CONNECTOR:MIXER", Name=dconnlist.Connector_2_Name) d_mixer.obj.extend([dloop[-1]] + dloop[1]) testn = doingtesting(testing, testn, newplantloop) if testn == None: returnnone() return newplantloop
make plant loop with pip components
16,282
def get_matches(expr_lst, ts): logger_ts.info("enter get_matches") new_ts = [] idxs = [] match = False try: for idx, ts_data in enumerate(ts): for expr in expr_lst: try: val = ts_data[expr[0]] if expr[1] == : match = True else: match = False break except KeyError as e: logger_ts.warn("get_matches: KeyError: getting value from TimeSeries object, {}, {}".format(expr, e)) match = False except IndexError as e: logger_ts.warn("get_matches: IndexError: getting value from TimeSeries object, {}, {}".format(expr, e)) match = False if match: idxs.append(idx) new_ts.append(ts_data) except AttributeError as e: logger_ts.debug("get_matches: AttributeError: unable to get expression matches, {}, {}".format(type(ts), e)) print("Error: Timeseries is an invalid data type") if not new_ts: print("No matches found for that expression") else: print("Found {} matches from {} columns".format(len(new_ts), len(ts))) logger_ts.info("exit get_matches") return new_ts, idxs
Get a list of TimeSeries objects that match the given expression. :param list expr_lst: Expression :param list ts: TimeSeries :return list new_ts: Matched time series objects :return list idxs: Indices of matched objects
16,283
def GET_account_history(self, path_info, account_addr): if not check_account_address(account_addr): return self._reply_json({: }, status_code=400) qs_values = path_info[] page = qs_values.get(, None) if page is None: page = "0" try: assert len(page) < 10 page = int(page) assert page >= 0 except: return self._reply_json({: }, status_code=400) blockstackd_url = get_blockstackd_url() res = blockstackd_client.get_account_history_page(account_addr, page, hostport=blockstackd_url) if json_is_error(res): log.error("Failed to list account history for {} at page {}: {}".format(account_addr, page, res[])) return self._reply_json({: .format(account_addr, page)}, status_code=res.get(, 500)) self._reply_json(res) return
Get the history of an account at a given page. Returns [{...}]
16,284
def _swclock_to_hwclock(): res = __salt__[]([, ], python_shell=False) if res[] != 0: msg = .format(res[]) raise CommandExecutionError(msg) return True
Set hardware clock to value of software clock.
16,285
def commits(self, drop_collections=True): base_df = self._data if drop_collections is True: out_df = self._drop_collections(base_df) else: out_df = base_df return out_df
Returns a table of git log data, with "commits" as rows/observations. :param bool drop_collections: Defaults to True. Indicates whether columns with lists/dicts/sets will be dropped. :return: pandas.DataFrame
16,286
def get_vnetwork_portgroups_output_vnetwork_pgs_name(self, **kwargs): config = ET.Element("config") get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups") config = get_vnetwork_portgroups output = ET.SubElement(get_vnetwork_portgroups, "output") vnetwork_pgs = ET.SubElement(output, "vnetwork-pgs") name = ET.SubElement(vnetwork_pgs, "name") name.text = kwargs.pop() callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code
16,287
def replace(self, rdata, filtered=False): r rdata_ex = pexdoc.exh.addai("rdata") rows_ex = pexdoc.exh.addex( ValueError, "Number of rows mismatch between input and replacement data" ) cols_ex = pexdoc.exh.addex( ValueError, "Number of columns mismatch between input and replacement data" ) rdata_ex(any([len(item) != len(rdata[0]) for item in rdata])) cfilter = ( self._cfilter if filtered in [True, "B", "b", "C", "c"] else self._header ) col_num = len(self._data[0]) - 1 odata = self._apply_filter(filtered) cfilter = ( self._cfilter if filtered in [True, "B", "b", "C", "c"] else self._header ) col_index = [ self._header_upper.index(col_id.upper()) if isinstance(col_id, str) else col_id for col_id in cfilter ] rows_ex(len(odata) != len(rdata)) cols_ex(len(odata[0]) != len(rdata[0])) df_tuples = self._format_rfilter(self._rfilter) rnum = 0 for row in self._data: if (not filtered) or ( filtered and all([row[col_num] in col_value for col_num, col_value in df_tuples]) ): for col_num, new_data in zip(col_index, rdata[rnum]): row[col_num] = new_data rnum = rnum + 1
r""" Replace data. :param rdata: Replacement data :type rdata: list of lists :param filtered: Filtering type :type filtered: :ref:`CsvFiltered` .. [[[cog cog.out(exobj.get_sphinx_autodoc(width=63)) ]]] .. Auto-generated exceptions documentation for .. pcsv.csv_file.CsvFile.replace :raises: * RuntimeError (Argument \`filtered\` is not valid) * RuntimeError (Argument \`rdata\` is not valid) * ValueError (Number of columns mismatch between input and replacement data) * ValueError (Number of rows mismatch between input and replacement data) .. [[[end]]]
16,288
def init_virtualenv(self): if not in os.environ: return if sys.executable.startswith(os.environ[]): import site sys.path.insert(0, virtual_env) site.addsitedir(virtual_env)
Add a virtualenv to sys.path so the user can import modules from it. This isn't perfect: it doesn't use the Python interpreter with which the virtualenv was built, and it ignores the --no-site-packages option. A warning will appear suggesting the user installs IPython in the virtualenv, but for many cases, it probably works well enough. Adapted from code snippets online. http://blog.ufsoft.org/2009/1/29/ipython-and-virtualenv
16,289
def cdf(arr, pos=None): r = (arr.min(), arr.max()) hist, bin_edges = np.histogram(arr, bins=2 * int(r[1] - r[0]), range=r) hist = np.asfarray(hist) / hist.sum() cdf = np.cumsum(hist) if pos is None: return cdf i = np.argmax(cdf > pos) return bin_edges[i]
Return the cumulative density function of a given array or its intensity at a given position (0-1)
16,290
def basic(username, password): none() _config.username = username _config.password = password
Add basic authentication to the requests of the clients.
16,291
def _notf(ins): output = _float_oper(ins.quad[2]) output.append() output.append() REQUIRES.add() return output
Negates top of the stack (48 bits)
16,292
def short_codes(self): if self._short_codes is None: self._short_codes = ShortCodeList(self._version, account_sid=self._solution[], ) return self._short_codes
Access the short_codes :returns: twilio.rest.api.v2010.account.short_code.ShortCodeList :rtype: twilio.rest.api.v2010.account.short_code.ShortCodeList
16,293
def iter_values(self): yVal = self._element.yVal if yVal is None: return for idx in range(yVal.ptCount_val): yield yVal.pt_v(idx)
Generate each float Y value in this series, in the order they appear on the chart. A value of `None` represents a missing Y value (corresponding to a blank Excel cell).
16,294
def visit_list(self, node, *args, **kwargs): rv = self.visit(node, *args, **kwargs) if not isinstance(rv, list): rv = [rv] return rv
As transformers may return lists in some places this method can be used to enforce a list as return value.
16,295
def _extract_cookies(self, response: Response): self._cookie_jar.extract_cookies( response, response.request, self._get_cookie_referrer_host() )
Load the cookie headers from the Response.
16,296
def format_price(self, price): price = api.to_float(price, default=0.0) dm = self.get_decimal_mark() cur = self.get_currency_symbol() price = "%s %.2f" % (cur, price) return price.replace(".", dm)
Formats the price with the set decimal mark and currency
16,297
def is_list(self, key): data = self.model.get_data() return isinstance(data[key], (tuple, list))
Return True if variable is a list or a tuple
16,298
def listThirdPartyLibs(self, configuration = ): interrogator = self._getUE4BuildInterrogator() return interrogator.list(self.getPlatformIdentifier(), configuration, self._getLibraryOverrides())
Lists the supported Unreal-bundled third-party libraries
16,299
def keyPressEvent(self, event): if event.key() in (Qt.Key_Enter, Qt.Key_Return): self.queryEntered.emit(self.query()) super(XOrbQuickFilterWidget, self).keyPressEvent(event)
Listens for the enter event to check if the query is setup.