Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
382,200
def vcf2cytosure(store, institute_id, case_name, individual_id): institute_obj, case_obj = institute_and_case(store, institute_id, case_name) for individual in case_obj[]: if individual[] == individual_id: individual_obj = individual return (individual_obj[], individual_obj[])
vcf2cytosure CGH file for inidividual.
382,201
def get_load_time(self, request_type=None, content_type=None, status_code=None, asynchronous=True, **kwargs): entries = self.filter_entries( request_type=request_type, content_type=content_type, status_code=status_code ) if "async" in kwargs: asynchronous = kwargs[] if not asynchronous: time = 0 for entry in entries: time += entry[] return time else: return len(self.parser.create_asset_timeline(entries))
This method can return the TOTAL load time for the assets or the ACTUAL load time, the difference being that the actual load time takes asynchronous transactions into account. So, if you want the total load time, set asynchronous=False. EXAMPLE: I want to know the load time for images on a page that has two images, each of which took 2 seconds to download, but the browser downloaded them at the same time. self.get_load_time(content_types=['image']) (returns 2) self.get_load_time(content_types=['image'], asynchronous=False) (returns 4)
382,202
def _invoke(self, arguments, autoescape): rv = self._func(*arguments) if autoescape: rv = Markup(rv) return rv
This method is being swapped out by the async implementation.
382,203
def _match_item(item, any_all=any, ignore_case=False, normalize_values=False, **kwargs): it = get_item_tags(item) return any_all( _match_field( get_field(it, field), pattern, ignore_case=ignore_case, normalize_values=normalize_values ) for field, patterns in kwargs.items() for pattern in patterns )
Match items by metadata. Note: Metadata values are lowercased when ``normalized_values`` is ``True``, so ``ignore_case`` is automatically set to ``True``. Parameters: item (~collections.abc.Mapping, str, os.PathLike): Item dict or filepath. any_all (callable): A callable to determine if any or all filters must match to match item. Expected values :obj:`any` (default) or :obj:`all`. ignore_case (bool): Perform case-insensitive matching. Default: ``False`` normalize_values (bool): Normalize metadata values to remove common differences between sources. Default: ``False`` kwargs (list): Lists of values to match the given metadata field. Returns: bool: True if matched, False if not.
382,204
def to_dict(self, prefix=None): conf_obj = dict(self) return self.__dictify__(conf_obj, prefix)
Converts recursively the Config object into a valid dictionary. :param prefix: A string to optionally prefix all key elements in the returned dictonary.
382,205
async def set_max_ch_setpoint(self, temperature, timeout=OTGW_DEFAULT_TIMEOUT): cmd = OTGW_CMD_SET_MAX status = {} ret = await self._wait_for_cmd(cmd, temperature, timeout) if ret is None: return ret = float(ret) status[DATA_MAX_CH_SETPOINT] = ret self._update_status(status) return ret
Set the maximum central heating setpoint. This command is only available with boilers that support this function. Return the newly accepted setpoint, or None on failure. This method is a coroutine
382,206
def show_tip(self, point, tip, wrapped_tiplines): if self.isVisible(): if self.tip == tip: return True else: self.hide() text_edit = self._text_edit cursor = text_edit.textCursor() search_pos = cursor.position() - 1 self._start_position, _ = self._find_parenthesis(search_pos, forward=False) if self._start_position == -1: return False if self.hide_timer_on: self._hide_timer.stop() if len(wrapped_tiplines) == 1: args = wrapped_tiplines[0].split()[1] nargs = len(args.split()) if nargs == 1: hide_time = 1400 elif nargs == 2: hide_time = 1600 else: hide_time = 1800 elif len(wrapped_tiplines) == 2: args1 = wrapped_tiplines[1].strip() nargs1 = len(args1.split()) if nargs1 == 1: hide_time = 2500 else: hide_time = 2800 else: hide_time = 3500 self._hide_timer.start(hide_time, self) self.tip = tip self.setText(tip) self.resize(self.sizeHint()) padding = 3 cursor_rect = text_edit.cursorRect(cursor) screen_rect = self.app.desktop().screenGeometry(text_edit) point.setY(point.y() + padding) tip_height = self.size().height() tip_width = self.size().width() vertical = horizontal = if point.y() + tip_height > screen_rect.height() + screen_rect.y(): point_ = text_edit.mapToGlobal(cursor_rect.topRight()) if point_.y() - tip_height < padding: if 2*point.y() < screen_rect.height(): vertical = else: vertical = else: vertical = if point.x() + tip_width > screen_rect.width() + screen_rect.x(): point_ = text_edit.mapToGlobal(cursor_rect.topRight()) if point_.x() - tip_width < padding: if 2*point.x() < screen_rect.width(): horizontal = else: horizontal = else: horizontal = pos = getattr(cursor_rect, %(vertical, horizontal)) adjusted_point = text_edit.mapToGlobal(pos()) if vertical == : point.setY(adjusted_point.y() - tip_height - padding) if horizontal == : point.setX(adjusted_point.x() - tip_width - padding) self.move(point) self.show() return True
Attempts to show the specified tip at the current cursor location.
382,207
def absent(name, auth=None): ret = {: name, : {}, : True, : } __salt__[](auth) subnet = __salt__[](name=name) if subnet: if __opts__[] is True: ret[] = None ret[] = {: subnet.id} ret[] = return ret __salt__[](name=subnet) ret[][] = name ret[] = return ret
Ensure a subnet does not exists name Name of the subnet
382,208
def as_select(self, _items=None, **kwargs): attrs = self.extra.copy() attrs.update(kwargs) attrs[] = self.name if not self.optional: attrs[] = True html = [u % get_html_attrs(attrs)] values = [self.to_string(**attrs)] or [] items = _items or self.items for item in items: if isinstance(item, list): html.extend(self._render_optgroup(item, values)) else: html.append(self._render_option(item, values)) html.append(u) return Markup(.join(html))
Render the field as a `<select>` element. :param **kwargs: Named paremeters used to generate the HTML attributes of each item. It follows the same rules as `get_html_attrs`
382,209
def get_thunk_env(self, k): if k not in self.__items: raise exceptions.EvaluationError( % (k, self)) x = self.__items[k] env = self.env(self) if isinstance(x, framework.BindableThunk): return x.bind(self.__parent_env), env return x, env
Return the thunk AND environment for validating it in for the given key. There might be different envs in case the thunk comes from a different (composed) tuple. If the thunk needs its environment bound on retrieval, that will be done here.
382,210
def update_module(self, modname, underlined=None): try: pymodule = self.project.get_module(modname) self._add_names(pymodule, modname, underlined) except exceptions.ModuleNotFoundError: pass
Update the cache for global names in `modname` module `modname` is the name of a module.
382,211
def check_model(depth, res, aniso, epermH, epermV, mpermH, mpermV, xdirect, verb): r global _min_res if depth is None: depth = [] depth = _check_var(depth, float, 1, ) if depth.size == 0: depth = np.array([-np.infty, ]) elif depth[0] != -np.infty: depth = np.insert(depth, 0, -np.infty) if np.any(depth[1:] - depth[:-1] < 0): print( + + _strvar(depth)) raise ValueError() if isinstance(res, dict): res_dict, res = res, res[] else: res_dict = False res = _check_var(res, float, 1, , depth.shape) res = _check_min(res, _min_res, , , verb) def check_inp(var, name, min_val): r if var is None: return np.ones(depth.size) else: param = _check_var(var, float, 1, name, depth.shape) if name == : param = param**2*res param = _check_min(param, min_val, + name, , verb) if name == : param = np.sqrt(param/res) return param aniso = check_inp(aniso, , _min_res) epermH = check_inp(epermH, , 0.0) epermV = check_inp(epermV, , 0.0) mpermH = check_inp(mpermH, , 0.0) mpermV = check_inp(mpermV, , 0.0) if verb > 2: print(" depth [m] : ", _strvar(depth[1:])) print(" res [Ohm.m] : ", _strvar(res)) print(" aniso [-] : ", _strvar(aniso)) print(" epermH [-] : ", _strvar(epermH)) print(" epermV [-] : ", _strvar(epermV)) print(" mpermH [-] : ", _strvar(mpermH)) print(" mpermV [-] : ", _strvar(mpermV)) isores = (res - res[0] == 0).all()*(aniso - aniso[0] == 0).all() isoep = (epermH - epermH[0] == 0).all()*(epermV - epermV[0] == 0).all() isomp = (mpermH - mpermH[0] == 0).all()*(mpermV - mpermV[0] == 0).all() isfullspace = isores*isoep*isomp if res_dict: isfullspace = False for key, value in res_dict.items(): if key not in [, , ]: res_dict[key] = check_inp(value, key, None) res_dict[] = res res = res_dict if verb > 2 and isfullspace: if xdirect: print("\n> MODEL IS A FULLSPACE; returning analytical " + "frequency-domain solution") else: print("\n> MODEL IS A FULLSPACE") if verb > 2: if xdirect is None: print(" direct field : Not calculated (secondary field)") elif xdirect: print(" direct field : Calc. in frequency domain") else: print(" direct field : Calc. in wavenumber domain") return depth, res, aniso, epermH, epermV, mpermH, mpermV, isfullspace
r"""Check the model: depth and corresponding layer parameters. This check-function is called from one of the modelling routines in :mod:`model`. Consult these modelling routines for a detailed description of the input parameters. Parameters ---------- depth : list Absolute layer interfaces z (m); #depth = #res - 1 (excluding +/- infinity). res : array_like Horizontal resistivities rho_h (Ohm.m); #res = #depth + 1. aniso : array_like Anisotropies lambda = sqrt(rho_v/rho_h) (-); #aniso = #res. epermH, epermV : array_like Relative horizontal/vertical electric permittivities epsilon_h/epsilon_v (-); #epermH = #epermV = #res. mpermH, mpermV : array_like Relative horizontal/vertical magnetic permeabilities mu_h/mu_v (-); #mpermH = #mpermV = #res. xdirect : bool, optional If True and source and receiver are in the same layer, the direct field is calculated analytically in the frequency domain, if False it is calculated in the wavenumber domain. verb : {0, 1, 2, 3, 4} Level of verbosity. Returns ------- depth : array Depths of layer interfaces, adds -infty at beginning if not present. res : array As input, checked for size. aniso : array As input, checked for size. If None, defaults to an array of ones. epermH, epermV : array_like As input, checked for size. If None, defaults to an array of ones. mpermH, mpermV : array_like As input, checked for size. If None, defaults to an array of ones. isfullspace : bool If True, the model is a fullspace (res, aniso, epermH, epermV, mpermM, and mpermV are in all layers the same).
382,212
def option(self, section, option): if self.config.has_section(section): if self.config.has_option(section, option): return (True, self.config.get(section, option)) return (False, + option + ) return (False, + section + )
Returns the value of the option
382,213
def get_input_info_dict(self, signature=None): return self._spec.get_input_info_dict(signature=signature, tags=self._tags)
Describes the inputs required by a signature. Args: signature: A string with the signature to get inputs information for. If None, the default signature is used if defined. Returns: The result of ModuleSpec.get_input_info_dict() for the given signature, and the graph variant selected by `tags` when this Module was initialized. Raises: KeyError: if there is no such signature.
382,214
def parse_footnote(document, container, elem): "Parse the footnote element." _rid = elem.attrib[_name()] foot = doc.Footnote(_rid) container.elements.append(foot)
Parse the footnote element.
382,215
def get_relationship(self, from_object, relation_type): for rel in self.relationships.get(relation_type, ()): if rel.from_object is from_object: return rel raise KeyError(relation_type)
return a relation ship or None
382,216
def compact(paths): sep = os.path.sep short_paths = set() for path in sorted(paths, key=len): should_skip = any( path.startswith(shortpath.rstrip("*")) and path[len(shortpath.rstrip("*").rstrip(sep))] == sep for shortpath in short_paths ) if not should_skip: short_paths.add(path) return short_paths
Compact a path set to contain the minimal number of paths necessary to contain all paths in the set. If /a/path/ and /a/path/to/a/file.txt are both in the set, leave only the shorter path.
382,217
def _prepare_find(cls, *args, **kw): cls, collection, query, options = cls._prepare_query( cls.FIND_MAPPING, cls.FIND_OPTIONS, *args, **kw ) if in options: raise TypeError("Await is hard-deprecated as reserved keyword in Python 3.7, use wait instead.") if in options and {, } & set(options): raise TypeError("Can not combine cursor_type and tail/wait arguments.") elif options.pop(, False): options[] = CursorType.TAILABLE_AWAIT if options.pop(, True) else CursorType.TAILABLE elif in options: raise TypeError("Wait option only applies to tailing cursors.") modifiers = options.get(, dict()) if in options: modifiers[] = options.pop() if modifiers: options[] = modifiers return cls, collection, query, options
Execute a find and return the resulting queryset using combined plain and parametric query generation. Additionally, performs argument case normalization, refer to the `_prepare_query` method's docstring.
382,218
def _ensure_someone_took_responsability(self, state, _responses): if not state.so_took_responsability: self.debug( , state.factory.descriptor_type) return self._restart_yourself()
Called as a callback for sending *died* notifications to all the partners. Check if someone has offered to restart the agent. If yes, setup expiration call and wait for report. If no, initiate doing it on our own.
382,219
def put(self, id): try: object_ = json_util.loads(self.request.body) toa = self.request.headers.get("Caesium-TOA", None) obj_check = yield self.client.find_one_by_id(id) if not obj_check: self.raise_error(404, "Resource not found: %s" % id) self.finish() return if toa: stack = AsyncSchedulableDocumentRevisionStack(self.client.collection_name, self.settings, master_id=id) revision_id = yield stack.push(object_, int(toa), meta=self._get_meta_data()) if isinstance(revision_id, str): self.set_header("Caesium-TOA", toa) object_["id"] = id self.return_resource(object_) else: self.raise_error(404, "Revision not scheduled for object: %s" % id) else: if object_.get("_id"): del object_["_id"] response = yield self.client.update(id, object_) if response.get("updatedExisting"): object_ = yield self.client.find_one_by_id(id) self.return_resource(object_) else: self.raise_error(404, "Resource not found: %s" % id) except ValidationError as vex: self.logger.error("%s validation error" % self.object_name, vex) self.raise_error(400, "Your %s cannot be updated because it is missing required fields, see docs" % self.object_name) except ValueError as ex: self.raise_error(400, "Invalid JSON Body, check formatting. %s" % ex[0]) except InvalidId as ex: self.raise_error(message="Your ID is malformed: %s" % id) except Exception as ex: self.logger.error(ex) self.raise_error()
Update a resource by bson ObjectId :returns: json string representation :rtype: JSON
382,220
def flavor_delete(self, flavor_id): nt_ks = self.compute_conn nt_ks.flavors.delete(flavor_id) return .format(flavor_id)
Delete a flavor
382,221
def show_network(kwargs=None, call=None): if call != : raise SaltCloudSystemExit( ) if not kwargs or not in kwargs: log.error( ) return False conn = get_conn() return _expand_item(conn.ex_get_network(kwargs[]))
Show the details of an existing network. CLI Example: .. code-block:: bash salt-cloud -f show_network gce name=mynet
382,222
def maintain_leases(self): while self._manager.is_active and not self._stop_event.is_set(): p99 = self._manager.ack_histogram.percentile(99) _LOGGER.debug("The current p99 value is %d seconds.", p99) leased_messages = copy.copy(self._leased_messages) cutoff = time.time() - self._manager.flow_control.max_lease_duration to_drop = [ requests.DropRequest(ack_id, item.size) for ack_id, item in six.iteritems(leased_messages) if item.added_time < cutoff ] if to_drop: _LOGGER.warning( "Dropping %s items because they were leased too long.", len(to_drop) ) self._manager.dispatcher.drop(to_drop) for item in to_drop: leased_messages.pop(item.ack_id) ack_ids = leased_messages.keys() if ack_ids: _LOGGER.debug("Renewing lease for %d ack IDs.", len(ack_ids)) self._manager.dispatcher.modify_ack_deadline( [requests.ModAckRequest(ack_id, p99) for ack_id in ack_ids] ) snooze = random.uniform(0.0, p99 * 0.9) _LOGGER.debug("Snoozing lease management for %f seconds.", snooze) self._stop_event.wait(timeout=snooze) _LOGGER.info("%s exiting.", _LEASE_WORKER_NAME)
Maintain all of the leases being managed. This method modifies the ack deadline for all of the managed ack IDs, then waits for most of that time (but with jitter), and repeats.
382,223
def p_IndexTypes(self, p): n = len(p) if n == 4: p[0] = p[1] + [p[3]] elif n == 2: p[0] = [p[1]]
IndexTypes : IndexTypes ',' IndexType | IndexType
382,224
async def rcpt( self, recipient: str, options: Iterable[str] = None, timeout: DefaultNumType = _default, ) -> SMTPResponse: await self._ehlo_or_helo_if_needed() if options is None: options = [] options_bytes = [option.encode("ascii") for option in options] to = b"TO:" + quote_address(recipient).encode("ascii") async with self._command_lock: response = await self.execute_command( b"RCPT", to, *options_bytes, timeout=timeout ) success_codes = (SMTPStatus.completed, SMTPStatus.will_forward) if response.code not in success_codes: raise SMTPRecipientRefused(response.code, response.message, recipient) return response
Send an SMTP RCPT command, which specifies a single recipient for the message. This command is sent once per recipient and must be preceded by 'MAIL'. :raises SMTPRecipientRefused: on unexpected server response code
382,225
def load(fileobj): with gzip.GzipFile(fileobj=fileobj, mode=) as z: submission = Submission(metadata=json.loads(z.readline())) for line in z: token_id, token = json.loads(line) submission[][token_id] = token return submission
Load the submission from a file-like object :param fileobj: File-like object :return: the loaded submission
382,226
def _gei8(ins): output = _8bit_oper(ins.quad[2], ins.quad[3], reversed_=True) output.append() output.append() REQUIRES.add() return output
Compares & pops top 2 operands out of the stack, and checks if the 1st operand >= 2nd operand (top of the stack). Pushes 0 if False, 1 if True. 8 bit signed version
382,227
def update_subtask(client, subtask_id, revision, title=None, completed=None): if title is not None: _check_title_length(title, client.api) data = { : int(revision), : title, : completed, } data = { key: value for key, value in data.items() if value is not None } endpoint = .join([client.api.Endpoints.SUBTASKS, str(subtask_id)]) response = client.authenticated_request(endpoint, , data=data) return response.json()
Updates the subtask with the given ID See https://developer.wunderlist.com/documentation/endpoints/subtask for detailed parameter information
382,228
def _handle_func_decl(self, node, scope, ctxt, stream): self._dlog("handling func decl") if node.args is not None: for param in node.args.params: param.is_func_param = True params = self._handle_node(node.args, scope, ctxt, stream) else: params = functions.ParamListDef([], node.coord) func_type = self._handle_node(node.type, scope, ctxt, stream) func = functions.Function(func_type, params, scope) return func
Handle FuncDecl nodes :node: TODO :scope: TODO :ctxt: TODO :stream: TODO :returns: TODO
382,229
def dinfdistdown(np, ang, fel, slp, src, statsm, distm, edgecontamination, wg, dist, workingdir=None, mpiexedir=None, exedir=None, log_file=None, runtime_file=None, hostfile=None): in_params = {: % (TauDEM.convertstatsmethod(statsm), TauDEM.convertdistmethod(distm))} if StringClass.string_match(edgecontamination, ) or edgecontamination is False: in_params[] = None fname = TauDEM.func_name() return TauDEM.run(FileClass.get_executable_fullpath(fname, exedir), {: fel, : slp, : ang, : src, : wg}, workingdir, in_params, {: dist}, {: mpiexedir, : hostfile, : np}, {: log_file, : runtime_file})
Run D-inf distance down to stream
382,230
def update_volumes(self): self.log.debug(.format(self.account.account_name, self.region)) ec2 = self.session.resource(, region_name=self.region) try: existing_volumes = EBSVolume.get_all(self.account, self.region) volumes = {x.id: x for x in ec2.volumes.all()} for data in list(volumes.values()): if data.id in existing_volumes: vol = existing_volumes[data.id] if vol.update(data): self.log.debug(.format( self.account.account_name, self.region, vol.resource.resource_id )) else: properties = { : data.create_time, : data.encrypted, : data.iops or 0, : data.kms_key_id, : data.size, : data.state, : data.snapshot_id, : data.volume_type, : sorted([x[] for x in data.attachments]) } tags = {t[]: t[] for t in data.tags or {}} vol = EBSVolume.create( data.id, account_id=self.account.account_id, location=self.region, properties=properties, tags=tags ) self.log.debug(.format( self.account.account_name, self.region, vol.resource.resource_id )) db.session.commit() vk = set(list(volumes.keys())) evk = set(list(existing_volumes.keys())) try: for volumeID in evk - vk: db.session.delete(existing_volumes[volumeID].resource) self.log.debug(.format( volumeID, self.account.account_name, self.region )) db.session.commit() except: self.log.exception() db.session.rollback() finally: del ec2
Update list of EBS Volumes for the account / region Returns: `None`
382,231
def get_unicode_from_response(r): warnings.warn(( ), DeprecationWarning) tried_encodings = [] encoding = get_encoding_from_headers(r.headers) if encoding: try: return str(r.content, encoding) except UnicodeError: tried_encodings.append(encoding) try: return str(r.content, encoding, errors=) except TypeError: return r.content
Returns the requested content back in unicode. :param r: Response object to get unicode content from. Tried: 1. charset from content-type 2. fall back and replace all unicode characters :rtype: str
382,232
def all_to_public(self): if "private" not in self.modifiers: def public_collection(attribute): for key in self.collection(attribute): if key not in self.publics: self.publics[key.lower()] = 1 public_collection("members") public_collection("types") public_collection("executables")
Sets all members, types and executables in this module as public as long as it doesn't already have the 'private' modifier.
382,233
def output(self, _filename): txt = "" for c in self.contracts: (name, _inheritance, _var, func_summaries, _modif_summaries) = c.get_summary() txt += blue("\n+ Contract %s\n"%name) public = [(elem[0], (elem[1], elem[2]) ) for elem in func_summaries] collect = collections.defaultdict(list) for a,b in public: collect[a].append(b) public = list(collect.items()) for contract, functions in public: txt += blue(" - From {}\n".format(contract)) functions = sorted(functions) for (function, visi) in functions: if visi in [, ]: txt += green(" - {} ({})\n".format(function, visi)) for (function, visi) in functions: if visi in [, ]: txt += magenta(" - {} ({})\n".format(function, visi)) for (function, visi) in functions: if visi not in [, , , ]: txt += " - {}  ({})\n".format(function, visi) self.info(txt)
_filename is not used Args: _filename(string)
382,234
def get_handler_classes(self): handler_classes = [import_string(handler_cls) for handler_cls in settings.MODERNRPC_HANDLERS] if self.protocol == ALL: return handler_classes else: return [cls for cls in handler_classes if cls.protocol in ensure_sequence(self.protocol)]
Return the list of handlers to use when receiving RPC requests.
382,235
def set_shared_config(cls, config): assert isinstance(config, dict) cls._sharedInstance.config.update(config) if cls._sharedInstance.instance: cls._sharedInstance.instance = None
This allows to set a config that will be used when calling ``shared_blockchain_instance`` and allows to define the configuration without requiring to actually create an instance
382,236
def _create_simulated_annealing_expander(schedule): def _expander(fringe, iteration, viewer): T = schedule(iteration) current = fringe[0] neighbors = current.expand(local_search=True) if viewer: viewer.event(, [current], [neighbors]) if neighbors: succ = random.choice(neighbors) delta_e = succ.value - current.value if delta_e > 0 or random.random() < math.exp(delta_e / T): fringe.pop() fringe.append(succ) if viewer: viewer.event(, succ) return _expander
Creates an expander that has a random chance to choose a node that is worse than the current (first) node, but that chance decreases with time.
382,237
def generateRecords(self, records): if self.verbosity>0: print , len(records), for record in records: self.generateRecord(record)
Generate multiple records. Refer to definition for generateRecord
382,238
def calculate_checksum_on_stream( f, algorithm=d1_common.const.DEFAULT_CHECKSUM_ALGORITHM, chunk_size=DEFAULT_CHUNK_SIZE, ): checksum_calc = get_checksum_calculator_by_dataone_designator(algorithm) while True: chunk = f.read(chunk_size) if not chunk: break checksum_calc.update(chunk) return checksum_calc.hexdigest()
Calculate the checksum of a stream. Args: f: file-like object Only requirement is a ``read()`` method that returns ``bytes``. algorithm: str Checksum algorithm, ``MD5`` or ``SHA1`` / ``SHA-1``. chunk_size : int Number of bytes to read from the file and add to the checksum at a time. Returns: str : Checksum as a hexadecimal string, with length decided by the algorithm.
382,239
def _issubclass_Union_rec(subclass, superclass, bound_Generic, bound_typevars, bound_typevars_readonly, follow_fwd_refs, _recursion_check): super_args = get_Union_params(superclass) if super_args is None: return is_Union(subclass) elif is_Union(subclass): sub_args = get_Union_params(subclass) if sub_args is None: return False return all(_issubclass(c, superclass, bound_Generic, bound_typevars, bound_typevars_readonly, follow_fwd_refs, _recursion_check) \ for c in (sub_args)) elif isinstance(subclass, TypeVar): if subclass in super_args: return True if subclass.__constraints__: return _issubclass(Union[subclass.__constraints__], superclass, bound_Generic, bound_typevars, bound_typevars_readonly, follow_fwd_refs, _recursion_check) return False else: return any(_issubclass(subclass, t, bound_Generic, bound_typevars, bound_typevars_readonly, follow_fwd_refs, _recursion_check) \ for t in super_args)
Helper for _issubclass_Union.
382,240
def compare_version(value): import re res = re.match(r, str(value).strip()) if not res: return False operator, value, _ = res.groups() value = tuple(int(x) for x in str(value).split()) if len(value) < 3: value += (0,) version = __version_info__ if operator in (, ): if version < value: return True if operator != : return False elif operator in (, ): if version > value: return True if operator != : return False return value == version
Determines if the provided version value compares with program version. `value` Version comparison string (e.g. ==1.0, <=1.0, >1.1) Supported operators: <, <=, ==, >, >=
382,241
def format_float(value): string = "{:g}".format(value).replace("e+", "e") string = re.sub("e(-?)0*(\d+)", r"e\1\2", string) return string
Modified form of the 'g' format specifier.
382,242
def get_assessment_offered_bank_session(self, proxy): if not self.supports_assessment_offered_bank(): raise errors.Unimplemented() return sessions.AssessmentOfferedBankSession(proxy=proxy, runtime=self._runtime)
Gets the session for retrieving offered assessments to bank mappings. arg: proxy (osid.proxy.Proxy): a proxy return: (osid.assessment.AssessmentOfferedBankSession) - an ``AssessmentOfferedBankSession`` raise: NullArgument - ``proxy`` is ``null`` raise: OperationFailed - unable to complete request raise: Unimplemented - ``supports_assessment_offered_bank()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_assessment_offered_bank()`` is ``true``.*
382,243
def all(cls, include_deactivated=False): if include_deactivated: resources = yield cls.view.get(include_docs=True) else: resources = yield cls.active_view.get(include_docs=True) result = [] for resource in resources[]: parent = cls.parent_resource(**resource[]) result.append(cls(parent=parent, **resource[])) raise Return(result)
Get all sub-resources :param include_deactivated: Include deactivated resources in response :returns: list of SubResource instances :raises: SocketError, CouchException
382,244
def config_logging(debug): if debug: logging.basicConfig(level=logging.DEBUG, format=) logging.debug("Debug mode activated") else: logging.basicConfig(level=logging.INFO, format=)
Config logging level output output
382,245
def update_from( self, obj=None, yaml_env=None, yaml_file=None, json_env=None, json_file=None, env_namespace=None, ): if obj: self.update_from_object(obj) if yaml_env: self.update_from_yaml_env(yaml_env) if yaml_file: self.update_from_yaml_file(yaml_file) if json_env: self.update_from_json_env(json_env) if json_file: self.update_from_json_file(json_file) if env_namespace: self.update_from_env_namespace(env_namespace)
Update dict from several sources at once. This is simply a convenience method that can be used as an alternative to making several calls to the various :meth:`~ConfigLoader.update_from_*` methods. Updates will be applied in the order that the parameters are listed below, with each source taking precedence over those before it. :arg obj: Object or name of object, e.g. 'myapp.settings'. :arg yaml_env: Name of an environment variable containing the path to a YAML config file. :arg yaml_file: Path to a YAML config file, or a file-like object. :arg json_env: Name of an environment variable containing the path to a JSON config file. :arg json_file: Path to a JSON config file, or a file-like object. :arg env_namespace: Common prefix of the environment variables containing the desired config.
382,246
def parse_file_args(file_obj, file_type, resolver=None, **kwargs): metadata = {} opened = False if ( in kwargs and isinstance(kwargs[], dict)): metadata.update(kwargs[]) if util.is_file(file_obj) and file_type is None: raise ValueError() if util.is_string(file_obj): try: len(file_obj.name) > 0): resolver = visual.resolvers.FilePathResolver(file_obj.name) return file_obj, file_type, metadata, opened, resolver
Given a file_obj and a file_type try to turn them into a file-like object and a lowercase string of file type. Parameters ----------- file_obj: str: if string represents a file path, returns ------------------------------------------- file_obj: an 'rb' opened file object of the path file_type: the extension from the file path str: if string is NOT a path, but has JSON-like special characters ------------------------------------------- file_obj: the same string passed as file_obj file_type: set to 'json' str: string is a valid URL ------------------------------------------- file_obj: an open 'rb' file object with retrieved data file_type: from the extension str: string is not an existing path or a JSON-like object ------------------------------------------- ValueError will be raised as we can't do anything with input file like object: we cannot grab information on file_type automatically ------------------------------------------- ValueError will be raised if file_type is None file_obj: same as input file_type: same as input other object: like a shapely.geometry.Polygon, etc: ------------------------------------------- file_obj: same as input file_type: if None initially, set to the class name (in lower case), otherwise passed through file_type: str, type of file and handled according to above Returns ----------- file_obj: loadable object file_type: str, lower case of the type of file (eg 'stl', 'dae', etc) metadata: dict, any metadata opened: bool, did we open the file or not
382,247
def sequence_content_plot (self): data = OrderedDict() for s_name in sorted(self.fastqc_data.keys()): try: data[s_name] = {self.avg_bp_from_range(d[]): d for d in self.fastqc_data[s_name][]} except KeyError: pass for b in data[s_name]: tot = sum([data[s_name][b][base] for base in [,,,]]) if tot == 100.0: break else: for base in [,,,]: data[s_name][b][base] = (float(data[s_name][b][base])/float(tot)) * 100.0 if len(data) == 0: log.debug() return None html = .format(d=json.dumps(data)) self.add_section ( name = , anchor = , description = , helptext = s worth noting that some types of library will always produce biased sequence composition, normally at the start of the read. Libraries produced by priming using random hexamers (including nearly all RNA-Seq libraries) and those which were fragmented using transposases inherit an intrinsic bias in the positions at which reads start. This bias does not concern an absolute sequence, but instead provides enrichement of a number of different K-mers at the 5t something which can be corrected by trimming and in most cases doesn, content = html )
Create the epic HTML for the FastQC sequence content heatmap
382,248
async def emitters(self, key, value): while True: await asyncio.sleep(value[].total_seconds()) await self.channel_layer.send(key, { "type": value[], "message": value[] })
Single-channel emitter
382,249
def add_edge(self, x, y, label=None): self.adjacency_list[x].append((y, label)) if x not in self.reverse_list[y]: self.reverse_list[y].append(x)
Add an edge from distribution *x* to distribution *y* with the given *label*. :type x: :class:`distutils2.database.InstalledDistribution` or :class:`distutils2.database.EggInfoDistribution` :type y: :class:`distutils2.database.InstalledDistribution` or :class:`distutils2.database.EggInfoDistribution` :type label: ``str`` or ``None``
382,250
def dump_simulation(simulation, directory): parent_directory = os.path.abspath(os.path.join(directory, os.pardir)) if not os.path.isdir(parent_directory): os.mkdir(parent_directory) if not os.path.isdir(directory): os.mkdir(directory) if os.listdir(directory): raise ValueError("Directory is not empty".format(directory)) entities_dump_dir = os.path.join(directory, "__entities__") os.mkdir(entities_dump_dir) for entity in simulation.populations.values(): _dump_entity(entity, entities_dump_dir) for holder in entity._holders.values(): _dump_holder(holder, directory)
Write simulation data to directory, so that it can be restored later.
382,251
def received_char_count(self, count): n1 = count/100 n2 = (count-(n1*100))/10 n3 = (count-((n1*100)+(n2*10))) self.send(+chr(n1)+chr(n2)+chr(n3))
Set recieved char count limit Args: count: the amount of received characters you want to stop at. Returns: None Raises: None
382,252
def requestAvatar(self, avatarId, mind, *interfaces): if IBoxReceiver in interfaces: return (IBoxReceiver, Adder(avatarId), lambda: None) raise NotImplementedError()
Create Adder avatars for any IBoxReceiver request.
382,253
def sign(self, pkey, digest): if pkey._only_public: raise ValueError("Key has only public part") if not pkey._initialized: raise ValueError("Key is uninitialized") digest_obj = _lib.EVP_get_digestbyname(_byte_string(digest)) if digest_obj == _ffi.NULL: raise ValueError("No such digest method") sign_result = _lib.NETSCAPE_SPKI_sign( self._spki, pkey._pkey, digest_obj ) _openssl_assert(sign_result > 0)
Sign the certificate request with this key and digest type. :param pkey: The private key to sign with. :type pkey: :py:class:`PKey` :param digest: The message digest to use. :type digest: :py:class:`bytes` :return: ``None``
382,254
def __patch_write_method(tango_device_klass, attribute): write_method = getattr(attribute, "fset", None) if write_method: method_name = "__write_{0}__".format(attribute.attr_name) attribute.write_method_name = method_name else: method_name = attribute.write_method_name write_method = getattr(tango_device_klass, method_name) write_attr = _get_wrapped_write_method(attribute, write_method) setattr(tango_device_klass, method_name, write_attr)
Checks if method given by it's name for the given DeviceImpl class has the correct signature. If a read/write method doesn't have a parameter (the traditional Attribute), then the method is wrapped into another method which has correct parameter definition to make it work. :param tango_device_klass: a DeviceImpl class :type tango_device_klass: class :param attribute: the attribute data information :type attribute: AttrData
382,255
def fix_text_segment(text, *, fix_entities=, remove_terminal_escapes=True, fix_encoding=True, fix_latin_ligatures=True, fix_character_width=True, uncurl_quotes=True, fix_line_breaks=True, fix_surrogates=True, remove_control_chars=True, remove_bom=True, normalization=): if isinstance(text, bytes): raise UnicodeError(fixes.BYTES_ERROR_TEXT) if fix_entities == and in text and in text: fix_entities = False while True: origtext = text if remove_terminal_escapes: text = fixes.remove_terminal_escapes(text) if fix_encoding: text = fixes.fix_encoding(text) if fix_entities: text = fixes.unescape_html(text) if fix_latin_ligatures: text = fixes.fix_latin_ligatures(text) if fix_character_width: text = fixes.fix_character_width(text) if uncurl_quotes: text = fixes.uncurl_quotes(text) if fix_line_breaks: text = fixes.fix_line_breaks(text) if fix_surrogates: text = fixes.fix_surrogates(text) if remove_control_chars: text = fixes.remove_control_chars(text) if remove_bom and not remove_control_chars: text = fixes.remove_bom(text) if normalization is not None: text = unicodedata.normalize(normalization, text) if text == origtext: return text
Apply fixes to text in a single chunk. This could be a line of text within a larger run of `fix_text`, or it could be a larger amount of text that you are certain is in a consistent encoding. See `fix_text` for a description of the parameters.
382,256
def _validate_certificate_url(self, cert_url): parsed_url = urlparse(cert_url) protocol = parsed_url.scheme if protocol.lower() != CERT_CHAIN_URL_PROTOCOL.lower(): raise VerificationException( "Signature Certificate URL has invalid protocol: {}. " "Expecting {}".format(protocol, CERT_CHAIN_URL_PROTOCOL)) hostname = parsed_url.hostname if (hostname is None or hostname.lower() != CERT_CHAIN_URL_HOSTNAME.lower()): raise VerificationException( "Signature Certificate URL has invalid hostname: {}. " "Expecting {}".format(hostname, CERT_CHAIN_URL_HOSTNAME)) normalized_path = os.path.normpath(parsed_url.path) if not normalized_path.startswith(CERT_CHAIN_URL_STARTPATH): raise VerificationException( "Signature Certificate URL has invalid path: {}. " "Expecting the path to start with {}".format( normalized_path, CERT_CHAIN_URL_STARTPATH)) port = parsed_url.port if port is not None and port != CERT_CHAIN_URL_PORT: raise VerificationException( "Signature Certificate URL has invalid port: {}. " "Expecting {}".format(str(port), str(CERT_CHAIN_URL_PORT)))
Validate the URL containing the certificate chain. This method validates if the URL provided adheres to the format mentioned here : https://developer.amazon.com/docs/custom-skills/host-a-custom-skill-as-a-web-service.html#cert-verify-signature-certificate-url :param cert_url: URL for retrieving certificate chain :type cert_url: str :raises: :py:class:`VerificationException` if the URL is invalid
382,257
def get_jwt_value(self, request): from django.utils.encoding import smart_text from django.utils.translation import ugettext as _ from rest_framework import exceptions auth = self.get_authorization(request).split() auth_header_prefix = self.prefix.lower() or if not auth: if self.cookie: return request.COOKIES.get(self.cookie) return None if auth_header_prefix is None or len(auth_header_prefix) < 1: auth.append() auth.reverse() if smart_text(auth[0].lower()) != auth_header_prefix: return None if len(auth) == 1: msg = _() raise exceptions.AuthenticationFailed(msg) elif len(auth) > 2: msg = _( ) raise exceptions.AuthenticationFailed(msg) return auth[1]
This function has been overloaded and it returns the proper JWT auth string. Parameters ---------- request: HttpRequest This is the request that is received by DJango in the view. Returns ------- str This returns the extracted JWT auth token string.
382,258
def delete(filething): t = OggSpeex(filething) filething.fileobj.seek(0) t.delete(filething)
delete(filething) Arguments: filething (filething) Raises: mutagen.MutagenError Remove tags from a file.
382,259
def from_github(user_repo_pair, file=): "build and use a file (default ) from github repo" from urllib.request import urlopen url = .format( user_repo_pair, file) with utils.catch_and_die([Exception], debug=url): resp = urlopen(url) plashstr = resp.read() return utils.run_write_read([, , ], plashstr).decode().rstrip()
build and use a file (default 'plashfile') from github repo
382,260
def get_trend(timeseries): last = timeseries[][len(timeseries[]) - 1] prev = timeseries[][len(timeseries[]) - 2] trend = last - prev trend_percentage = None if last == 0: if prev > 0: trend_percentage = -100 else: trend_percentage = 0 else: trend_percentage = int((trend / last) * 100) return (last, trend_percentage)
Using the values returned by get_timeseries(), compare the current Metric value with it's previous period's value :param timeseries: data returned from the get_timeseries() method :returns: the last period value and relative change
382,261
def dt_month_name(x): import pandas as pd return pd.Series(_pandas_dt_fix(x)).dt.month_name().values.astype(str)
Returns the month names of a datetime sample in English. :returns: an expression containing the month names extracted from a datetime column. Example: >>> import vaex >>> import numpy as np >>> date = np.array(['2009-10-12T03:31:00', '2016-02-11T10:17:34', '2015-11-12T11:34:22'], dtype=np.datetime64) >>> df = vaex.from_arrays(date=date) >>> df # date 0 2009-10-12 03:31:00 1 2016-02-11 10:17:34 2 2015-11-12 11:34:22 >>> df.date.dt.month_name Expression = dt_month_name(date) Length: 3 dtype: str (expression) --------------------------------- 0 October 1 February 2 November
382,262
def badge(left_text: str, right_text: str, left_link: Optional[str] = None, right_link: Optional[str] = None, whole_link: Optional[str] = None, logo: Optional[str] = None, left_color: str = , right_color: str = , measurer: Optional[text_measurer.TextMeasurer] = None, embed_logo: bool = False) -> str: if measurer is None: measurer = ( precalculated_text_measurer.PrecalculatedTextMeasurer .default()) if (left_link or right_link) and whole_link: raise ValueError( ) template = _JINJA2_ENVIRONMENT.get_template() if logo and embed_logo: logo = _embed_image(logo) svg = template.render( left_text=left_text, right_text=right_text, left_text_width=measurer.text_width(left_text) / 10.0, right_text_width=measurer.text_width(right_text) / 10.0, left_link=left_link, right_link=right_link, whole_link=whole_link, logo=logo, left_color=_NAME_TO_COLOR.get(left_color, left_color), right_color=_NAME_TO_COLOR.get(right_color, right_color), ) xml = minidom.parseString(svg) _remove_blanks(xml) xml.normalize() return xml.documentElement.toxml()
Creates a github-style badge as an SVG image. >>> badge(left_text='coverage', right_text='23%', right_color='red') '<svg...</svg>' >>> badge(left_text='build', right_text='green', right_color='green', ... whole_link="http://www.example.com/") '<svg...</svg>' Args: left_text: The text that should appear on the left-hand-side of the badge e.g. "coverage". right_text: The text that should appear on the right-hand-side of the badge e.g. "23%". left_link: The URL that should be redirected to when the left-hand text is selected. right_link: The URL that should be redirected to when the right-hand text is selected. whole_link: The link that should be redirected to when the badge is selected. If set then left_link and right_right may not be set. logo: A url representing a logo that will be displayed inside the badge. Can be a data URL e.g. "data:image/svg+xml;utf8,<svg..." left_color: The color of the part of the badge containing the left-hand text. Can be an valid CSS color (see https://developer.mozilla.org/en-US/docs/Web/CSS/color) or a color name defined here: https://github.com/badges/shields/blob/master/lib/colorscheme.json right_color: The color of the part of the badge containing the right-hand text. Can be an valid CSS color (see https://developer.mozilla.org/en-US/docs/Web/CSS/color) or a color name defined here: https://github.com/badges/shields/blob/master/lib/colorscheme.json measurer: A text_measurer.TextMeasurer that can be used to measure the width of left_text and right_text. embed_logo: If True then embed the logo image directly in the badge. This can prevent an HTTP request and some browsers will not render external image referenced. When True, `logo` must be a HTTP/HTTPS URI or a filesystem path. Also, the `badge` call may raise an exception if the logo cannot be loaded, is not an image, etc.
382,263
def is_module_reloadable(self, module, modname): if self.has_cython: return False else: if (self.is_module_in_pathlist(module) or self.is_module_in_namelist(modname)): return False else: return True
Decide if a module is reloadable or not.
382,264
def get_args(stream_spec, overwrite_output=False): nodes = get_stream_spec_nodes(stream_spec) args = [] sorted_nodes, outgoing_edge_maps = topo_sort(nodes) input_nodes = [node for node in sorted_nodes if isinstance(node, InputNode)] output_nodes = [node for node in sorted_nodes if isinstance(node, OutputNode)] global_nodes = [node for node in sorted_nodes if isinstance(node, GlobalNode)] filter_nodes = [node for node in sorted_nodes if isinstance(node, FilterNode)] stream_name_map = {(node, None): str(i) for i, node in enumerate(input_nodes)} filter_arg = _get_filter_arg(filter_nodes, outgoing_edge_maps, stream_name_map) args += reduce(operator.add, [_get_input_args(node) for node in input_nodes]) if filter_arg: args += [, filter_arg] args += reduce(operator.add, [_get_output_args(node, stream_name_map) for node in output_nodes]) args += reduce(operator.add, [_get_global_args(node) for node in global_nodes], []) if overwrite_output: args += [] return args
Build command-line arguments to be passed to ffmpeg.
382,265
def ids(self): if config().identifiers() == : ids = self._id_todo_map.keys() else: ids = [str(i + 1) for i in range(self.count())] return set(ids)
Returns set with all todo IDs.
382,266
def prt_gos_flat(self, prt): prtfmt = self.datobj.kws[] _go2nt = self.sortobj.grprobj.go2nt go2nt = {go:_go2nt[go] for go in self.go2nt} prt.write("\n{N} GO IDs:\n".format(N=len(go2nt))) _sortby = self._get_sortgo() for ntgo in sorted(go2nt.values(), key=_sortby): prt.write(prtfmt.format(**ntgo._asdict()))
Print flat GO list.
382,267
def leave_module(self, node): assert len(self._to_consume) == 1 not_consumed = self._to_consume.pop().to_consume if "__all__" in node.locals: self._check_all(node, not_consumed) self._check_globals(not_consumed) if not self.config.init_import and node.package: return self._check_imports(not_consumed)
leave module: check globals
382,268
def insert_image(filename, extnum_filename, auximage, extnum_auximage): with fits.open(auximage) as hdulist: newimage = hdulist[extnum_auximage].data hdulist = fits.open(filename, mode=) oldimage_shape = hdulist[extnum_filename].data.shape if oldimage_shape == newimage.shape: hdulist[extnum_filename].data = newimage hdulist.flush() else: print(, oldimage_shape) print(, newimage.shape) print("ERROR: new image doesn't have the same shape") hdulist.close()
Replace image in filename by another image (same size) in newimage. Parameters ---------- filename : str File name where the new image will be inserted. extnum_filename : int Extension number in filename where the new image will be inserted. Note that the first extension is 1 (and not zero). auximage : str File name of the new image. extnum_auximage : int Extension number where the new image is located in auximage. Note that the first extension is 1 (and not zero).
382,269
def cli(obj, role, scopes, delete): client = obj[] if delete: client.delete_perm(delete) else: if not role: raise click.UsageError() if not scopes: raise click.UsageError() try: perm = client.create_perm(role, scopes) except Exception as e: click.echo(.format(e)) sys.exit(1) click.echo(perm.id)
Add or delete role-to-permission lookup entry.
382,270
def tags(self): if self._tags is None: config = global_config() self._tags = self.create(Tags, config=config) return self._tags
Return a thread local :class:`dossier.web.Tags` client.
382,271
def _get_content_type(self, content_type, filename=None): if content_type is None: content_type = self.content_type if content_type is None and filename is not None: content_type, _ = mimetypes.guess_type(filename) if content_type is None: content_type = _DEFAULT_CONTENT_TYPE return content_type
Determine the content type from the current object. The return value will be determined in order of precedence: - The value passed in to this method (if not :data:`None`) - The value stored on the current blob - The default value ('application/octet-stream') :type content_type: str :param content_type: (Optional) type of content. :type filename: str :param filename: (Optional) The name of the file where the content is stored. :rtype: str :returns: Type of content gathered from the object.
382,272
def copy_memory(self, address, size): start_time = time.time() map_bytes = self._cpu._raw_read(address, size) self._emu.mem_write(address, map_bytes) if time.time() - start_time > 3: logger.info(f"Copying {hr_size(size)} map at {hex(address)} took {time.time() - start_time} seconds")
Copy the bytes from address to address+size into Unicorn Used primarily for copying memory maps :param address: start of buffer to copy :param size: How many bytes to copy
382,273
def create_marker_table(self): if self.marker_table is None: self.marker_table = luigi.configuration.get_config().get(, , ) engine = self.engine with engine.begin() as con: metadata = sqlalchemy.MetaData() if not con.dialect.has_table(con, self.marker_table): self.marker_table_bound = sqlalchemy.Table( self.marker_table, metadata, sqlalchemy.Column("update_id", sqlalchemy.String(128), primary_key=True), sqlalchemy.Column("target_table", sqlalchemy.String(128)), sqlalchemy.Column("inserted", sqlalchemy.DateTime, default=datetime.datetime.now())) metadata.create_all(engine) else: metadata.reflect(only=[self.marker_table], bind=engine) self.marker_table_bound = metadata.tables[self.marker_table]
Create marker table if it doesn't exist. Using a separate connection since the transaction might have to be reset.
382,274
def list2key(self, keyList): for keyCombo in keyList: if not (isinstance(keyCombo, list) or isinstance(keyCombo, tuple)): msg = ( ) raise QtmacsKeysequenceError(msg) if len(keyCombo) != 2: msg = msg += raise QtmacsKeysequenceError(msg) key_event = QtGui.QKeyEvent(QtCore.QEvent.KeyPress, keyCombo[1], keyCombo[0]) try: key_event = QtGui.QKeyEvent(QtCore.QEvent.KeyPress, keyCombo[1], keyCombo[0]) err = False except TypeError: err = True if err: msg = ( ) raise QtmacsKeysequenceError(msg) else: self.appendQKeyEvent(key_event)
Convert a list of (``QtModifier``, ``QtCore.Qt.Key_*``) tuples into a key sequence. If no error is raised, then the list was accepted. |Args| * ``keyList`` (**list**): eg. (QtCore.Qt.ControlModifier, QtCore.Qt.Key_F). |Returns| **None** |Raises| * **QtmacsKeysequenceError** if the provided ``keysequence`` could not be parsed.
382,275
def rev_reg_id2cred_def_id__tag(rr_id: str) -> (str, str): return ( .join(rr_id.split()[2:-2]), str(rr_id.split()[-1]) )
Given a revocation registry identifier, return its corresponding credential definition identifier and (stringified int) tag. :param rr_id: revocation registry identifier :return: credential definition identifier and tag
382,276
def export_widgets(self_or_cls, obj, filename, fmt=None, template=None, json=False, json_path=, **kwargs): if fmt not in list(self_or_cls.widgets.keys())+[, None]: raise ValueError("Renderer.export_widget may only export " "registered widget types.") if not isinstance(obj, NdWidget): if not isinstance(filename, (BytesIO, StringIO)): filedir = os.path.dirname(filename) current_path = os.getcwd() html_path = os.path.abspath(filedir) rel_path = os.path.relpath(html_path, current_path) save_path = os.path.join(rel_path, json_path) else: save_path = json_path kwargs[] = save_path kwargs[] = json_path widget = self_or_cls.get_widget(obj, fmt, **kwargs) else: widget = obj html = self_or_cls.static_html(widget, fmt, template) encoded = self_or_cls.encode((html, {: })) if isinstance(filename, (BytesIO, StringIO)): filename.write(encoded) filename.seek(0) else: with open(filename, ) as f: f.write(encoded)
Render and export object as a widget to a static HTML file. Allows supplying a custom template formatting string with fields to interpolate 'js', 'css' and the main 'html' containing the widget. Also provides options to export widget data to a json file in the supplied json_path (defaults to current path).
382,277
def node_transmit(node_id): exp = Experiment(session) what = request_parameter(parameter="what", optional=True) to_whom = request_parameter(parameter="to_whom", optional=True) node = models.Node.query.get(node_id) if node is None: return error_response(error_type="/node/transmit, node does not exist") if what is not None: try: what = int(what) what = models.Info.query.get(what) if what is None: return error_response( error_type="/node/transmit POST, info does not exist", participant=node.participant, ) except Exception: try: what = exp.known_classes[what] except KeyError: msg = "/node/transmit POST, {} not in experiment.known_classes" return error_response( error_type=msg.format(what), participant=node.participant ) if to_whom is not None: try: to_whom = int(to_whom) to_whom = models.Node.query.get(to_whom) if to_whom is None: return error_response( error_type="/node/transmit POST, recipient Node does not exist", participant=node.participant, ) except Exception: try: to_whom = exp.known_classes[to_whom] except KeyError: msg = "/node/transmit POST, {} not in experiment.known_classes" return error_response( error_type=msg.format(to_whom), participant=node.participant ) try: transmissions = node.transmit(what=what, to_whom=to_whom) for t in transmissions: assign_properties(t) session.commit() exp.transmission_post_request(node=node, transmissions=transmissions) session.commit() except Exception: return error_response( error_type="/node/transmit POST, server error", participant=node.participant ) return success_response(transmissions=[t.__json__() for t in transmissions])
Transmit to another node. The sender's node id must be specified in the url. As with node.transmit() the key parameters are what and to_whom. However, the values these accept are more limited than for the back end due to the necessity of serialization. If what and to_whom are not specified they will default to None. Alternatively you can pass an int (e.g. '5') or a class name (e.g. 'Info' or 'Agent'). Passing an int will get that info/node, passing a class name will pass the class. Note that if the class you are specifying is a custom class it will need to be added to the dictionary of known_classes in your experiment code. You may also pass the values property1, property2, property3, property4, property5 and details. If passed this will fill in the relevant values of the transmissions created with the values you specified. For example, to transmit all infos of type Meme to the node with id 10: dallinger.post( "/node/" + my_node_id + "/transmit", {what: "Meme", to_whom: 10} );
382,278
def save_files(self, nodes): metrics = {"Opened": 0, "Cached": 0} for node in nodes: file = node.file if self.__container.get_editor(file): if self.__container.save_file(file): metrics["Opened"] += 1 self.__uncache(file) else: cache_data = self.__files_cache.get_content(file) if cache_data is None: LOGGER.warning( "!> {0} | file doesn{1}t exists in files cache!".format(self.__class__.__name__, file)) self.__container.engine.notifications_manager.notify( "{0} | opened file(s) and cached file(s) saved!".format(self.__class__.__name__, metrics["Opened"], metrics["Cached"]))
Saves user defined files using give nodes. :param nodes: Nodes. :type nodes: list :return: Method success. :rtype: bool
382,279
def add_extensions(self, extensions): stack = _lib.sk_X509_EXTENSION_new_null() _openssl_assert(stack != _ffi.NULL) stack = _ffi.gc(stack, _lib.sk_X509_EXTENSION_free) for ext in extensions: if not isinstance(ext, X509Extension): raise ValueError("One of the elements is not an X509Extension") _lib.sk_X509_EXTENSION_push(stack, ext._extension) add_result = _lib.X509_REQ_add_extensions(self._req, stack) _openssl_assert(add_result == 1)
Add extensions to the certificate signing request. :param extensions: The X.509 extensions to add. :type extensions: iterable of :py:class:`X509Extension` :return: ``None``
382,280
def build_response(content, code=200): response = make_response( jsonify(content), content[] ) response.headers[] = response.headers[] = \ return response
Build response, add headers
382,281
def save_conf(fn=None): if fn is None: fn = cfile() try: os.makedirs(os.path.dirname(fn)) except (OSError, IOError): pass with open(fn, ) as f: yaml.dump(conf, f)
Save current configuration to file as YAML If not given, uses current config directory, ``confdir``, which can be set by INTAKE_CONF_DIR.
382,282
def pause(self, duration_seconds=0, force=False, force_regen_rospec=False): logger.debug(, duration_seconds) if self.state != LLRPClient.STATE_INVENTORYING: if not force: logger.info(, self.getStateName(self.state)) return None else: logger.info() if duration_seconds: logger.info(, duration_seconds) rospec = self.getROSpec(force_new=force_regen_rospec)[] self.sendMessage({ : { : 1, : 25, : 0, : rospec[] }}) self.setState(LLRPClient.STATE_PAUSING) d = defer.Deferred() d.addCallback(self._setState_wrapper, LLRPClient.STATE_PAUSED) d.addErrback(self.complain, ) self._deferreds[].append(d) if duration_seconds > 0: startAgain = task.deferLater(reactor, duration_seconds, lambda: None) startAgain.addCallback(lambda _: self.resume()) return d
Pause an inventory operation for a set amount of time.
382,283
def center(self): try: return self._center except AttributeError: pass self._center = Point() return self._center
Center point of the ellipse, equidistant from foci, Point class.\n Defaults to the origin.
382,284
def write(self, notifications): "Connect to the APNS service and send notifications" if not self.factory: log.msg() server, port = ((APNS_SERVER_SANDBOX_HOSTNAME if self.environment == else APNS_SERVER_HOSTNAME), APNS_SERVER_PORT) self.factory = self.clientProtocolFactory() context = self.getContextFactory() reactor.connectSSL(server, port, self.factory, context) client = self.factory.clientProtocol if client: return client.sendMessage(notifications) else: d = self.factory.deferred timeout = reactor.callLater(self.timeout, lambda: d.called or d.errback( Exception( % self.timeout))) def cancel_timeout(r): try: timeout.cancel() except: pass return r d.addCallback(lambda p: p.sendMessage(notifications)) d.addErrback(log_errback()) d.addBoth(cancel_timeout) return d
Connect to the APNS service and send notifications
382,285
def redraw_canvas(self): from xdot_parser import XdotAttrParser xdot_parser = XdotAttrParser() canvas = self._component_default() for node in self.nodes: components = xdot_parser.parse_xdot_data( node._draw_ ) canvas.add( *components ) components = xdot_parser.parse_xdot_data( node._ldraw_ ) canvas.add( *components ) for edge in self.edges: components = xdot_parser.parse_xdot_data( edge._draw_ ) canvas.add( *components ) components = xdot_parser.parse_xdot_data( edge._ldraw_ ) canvas.add( *components ) components = xdot_parser.parse_xdot_data( edge._hdraw_ ) canvas.add( *components ) components = xdot_parser.parse_xdot_data( edge._tdraw_ ) canvas.add( *components ) components = xdot_parser.parse_xdot_data( edge._hldraw_ ) canvas.add( *components ) components = xdot_parser.parse_xdot_data( edge._tldraw_ ) canvas.add( *components ) self.component = canvas self.vp.request_redraw()
Parses the Xdot attributes of all graph components and adds the components to a new canvas.
382,286
def _load_candidate_wrapper(self, source_file=None, source_config=None, dest_file=None, file_system=None): return_status = False msg = if source_file and source_config: raise ValueError("Cannot simultaneously set source_file and source_config") if source_config: if self.inline_transfer: (return_status, msg) = self._inline_tcl_xfer(source_config=source_config, dest_file=dest_file, file_system=file_system) else: tmp_file = self._create_tmp_file(source_config) (return_status, msg) = self._scp_file(source_file=tmp_file, dest_file=dest_file, file_system=file_system) if tmp_file and os.path.isfile(tmp_file): os.remove(tmp_file) if source_file: if self.inline_transfer: (return_status, msg) = self._inline_tcl_xfer(source_file=source_file, dest_file=dest_file, file_system=file_system) else: (return_status, msg) = self._scp_file(source_file=source_file, dest_file=dest_file, file_system=file_system) if not return_status: if msg == : msg = "Transfer to remote device failed" return (return_status, msg)
Transfer file to remote device for either merge or replace operations Returns (return_status, msg)
382,287
def add_info(self, entry): entry = entry[8:-1] info = entry.split() if len(info) < 4: return False for v in info: key, value = v.split(, 1) if key == : self.info[value] = {} id_ = value elif key == : if value == or value == : value = -1 self.info[id_][] = value elif key == : self.info[id_][] = self.type_map[value] elif key == : self.info[id_][] = value if len(info) > 4: self.info[id_][] += .join(info[4:]) break return True
Parse and store the info field
382,288
def parse_gtf( filepath_or_buffer, chunksize=1024 * 1024, features=None, intern_columns=["seqname", "source", "strand", "frame"], fix_quotes_columns=["attribute"]): if features is not None: features = set(features) dataframes = [] def parse_frame(s): if s == ".": return 0 else: return int(s) chunk_iterator = pd.read_csv( filepath_or_buffer, sep="\t", comment=" names=REQUIRED_COLUMNS, skipinitialspace=True, skip_blank_lines=True, error_bad_lines=True, warn_bad_lines=True, chunksize=chunksize, engine="c", dtype={ "start": np.int64, "end": np.int64, "score": np.float32, "seqname": str, }, na_values=".", converters={"frame": parse_frame}) dataframes = [] try: for df in chunk_iterator: for intern_column in intern_columns: df[intern_column] = [intern(str(s)) for s in df[intern_column]] if features is not None: df = df[df["feature"].isin(features)] for fix_quotes_column in fix_quotes_columns: df[fix_quotes_column] = [ s.replace(, ).replace(";-", "-") for s in df[fix_quotes_column] ] dataframes.append(df) except Exception as e: raise ParsingError(str(e)) df = pd.concat(dataframes) return df
Parameters ---------- filepath_or_buffer : str or buffer object chunksize : int features : set or None Drop entries which aren't one of these features intern_columns : list These columns are short strings which should be interned fix_quotes_columns : list Most commonly the 'attribute' column which had broken quotes on some Ensembl release GTF files.
382,289
def remove_not_allowed_chars(savepath): split_savepath = os.path.splitdrive(savepath) savepath_without_invalid_chars = re.sub(r, , split_savepath[1]) return split_savepath[0] + savepath_without_invalid_chars
Removes invalid filepath characters from the savepath. :param str savepath: the savepath to work on :return str: the savepath without invalid filepath characters
382,290
def _get_scsi_controller_key(bus_number, scsi_ctrls): keys = [ctrl.key for ctrl in scsi_ctrls if scsi_ctrls and ctrl.busNumber == bus_number] if not keys: raise salt.exceptions.VMwareVmCreationError( t exist'.format(bus_number)) return keys[0]
Returns key number of the SCSI controller keys bus_number Controller bus number from the adapter scsi_ctrls List of SCSI Controller objects (old+newly created)
382,291
def quantize(image, bits_per_channel=None): if bits_per_channel is None: bits_per_channel = 6 assert image.dtype == np.uint8 shift = 8-bits_per_channel halfbin = (1 << shift) >> 1 return ((image.astype(int) >> shift) << shift) + halfbin
Reduces the number of bits per channel in the given image.
382,292
def peek(init, exposes, debug=False): def _peek(store, container, _stack=None): args = [ store.peek(objname, container, _stack=_stack) \ for objname in exposes ] if debug: print(args) return init(*args) return _peek
Default deserializer factory. Arguments: init (callable): type constructor. exposes (iterable): attributes to be peeked and passed to `init`. Returns: callable: deserializer (`peek` routine).
382,293
def get_activity_admin_session_for_objective_bank(self, objective_bank_id, proxy): if not self.supports_activity_admin(): raise errors.Unimplemented() return sessions.ActivityAdminSession(objective_bank_id, proxy, self._runtime)
Gets the ``OsidSession`` associated with the activity admin service for the given objective bank. arg: objective_bank_id (osid.id.Id): the ``Id`` of the objective bank arg: proxy (osid.proxy.Proxy): a proxy return: (osid.learning.ActivityAdminSession) - a ``ActivityAdminSession`` raise: NotFound - ``objective_bank_id`` not found raise: NullArgument - ``objective_bank_id`` or ``proxy`` is ``null`` raise: OperationFailed - ``unable to complete request`` raise: Unimplemented - ``supports_activity_admin()`` or ``supports_visible_federation()`` is ``false`` *compliance: optional -- This method must be implemented if ``supports_activity_admin()`` and ``supports_visible_federation()`` are ``true``.*
382,294
def pickle_dumps(inbox): gc.disable() str_ = cPickle.dumps(inbox[0], cPickle.HIGHEST_PROTOCOL) gc.enable() return str_
Serializes the first element of the input using the pickle protocol using the fastes binary protocol.
382,295
def validate_seal(cls, header: BlockHeader) -> None: check_pow( header.block_number, header.mining_hash, header.mix_hash, header.nonce, header.difficulty)
Validate the seal on the given header.
382,296
def extract_images(bs4, lazy_image_attribute=None): if lazy_image_attribute: images = [image[lazy_image_attribute] for image in bs4.select() if image.has_attr(lazy_image_attribute)] else: images = [image[] for image in bs4.select() if image.has_attr()] image_links = [link for link in extract_links(bs4) if link.endswith((, , , , , ))] image_metas = [meta[] for meta in extract_metas(bs4) if in meta if meta[].endswith((, , , , , ))] return list(set(images + image_links + image_metas))
If lazy attribute is supplied, find image url on that attribute :param bs4: :param lazy_image_attribute: :return:
382,297
def assert_equal(first, second, msg_fmt="{msg}"): if isinstance(first, dict) and isinstance(second, dict): assert_dict_equal(first, second, msg_fmt) elif not first == second: msg = "{!r} != {!r}".format(first, second) fail(msg_fmt.format(msg=msg, first=first, second=second))
Fail unless first equals second, as determined by the '==' operator. >>> assert_equal(5, 5.0) >>> assert_equal("Hello World!", "Goodbye!") Traceback (most recent call last): ... AssertionError: 'Hello World!' != 'Goodbye!' The following msg_fmt arguments are supported: * msg - the default error message * first - the first argument * second - the second argument
382,298
def cdssequencethreads(self): for i in range(self.cpus): threads = Thread(target=self.cdssequence, args=()) threads.setDaemon(True) threads.start() for sample in self.metadata.samples: sample[self.analysistype].coresequence = dict() self.sequencequeue.put(sample) self.sequencequeue.join()
Extracts the sequence of each gene for each strain
382,299
def get_stats_summary(start=None, end=None, **kwargs): return MonthlySummaryReader(start=start, end=end, **kwargs).fetch()
Stats Historical Summary Reference: https://iexcloud.io/docs/api/#stats-historical-summary Data Weighting: ``Free`` Parameters ---------- start: datetime.datetime, default None, optional Start of data retrieval period end: datetime.datetime, default None, optional End of data retrieval period kwargs: Additional Request Parameters (see base class)