Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
7,700
def _use_gl(objs): from ..models.plots import Plot return _any(objs, lambda obj: isinstance(obj, Plot) and obj.output_backend == "webgl")
Whether a collection of Bokeh objects contains a plot requesting WebGL Args: objs (seq[Model or Document]) : Returns: bool
7,701
def _on_remove_library(self, *event): self.view[].grab_focus() if react_to_event(self.view, self.view[], event): path = self.view["library_tree_view"].get_cursor()[0] if path is not None: library_name = self.library_list_store[int(path[0])][0] library_config = self.core_config_model.get_current_config_value("LIBRARY_PATHS", use_preliminary=True, default={}) del library_config[library_name] self.core_config_model.set_preliminary_config_value("LIBRARY_PATHS", library_config) if len(self.library_list_store) > 0: self.view[].set_cursor(min(path[0], len(self.library_list_store) - 1)) return True
Callback method handling the removal of an existing library
7,702
def add_element(self, elt): if not isinstance(elt, Element): raise TypeError("argument should be a subclass of Element") self.elements[elt.get_name()] = elt return elt
Helper to add a element to the current section. The Element name will be used as an identifier.
7,703
def proxy_label_for(label: str) -> str: label_java = _VertexLabel(label).unwrap() proxy_label_java = k.jvm_view().SequenceBuilder.proxyLabelFor(label_java) return proxy_label_java.getQualifiedName()
>>> Sequence.proxy_label_for("foo") 'proxy_for.foo'
7,704
def best_policy(mdp, U): pi = {} for s in mdp.states: pi[s] = argmax(mdp.actions(s), lambda a:expected_utility(a, s, U, mdp)) return pi
Given an MDP and a utility function U, determine the best policy, as a mapping from state to action. (Equation 17.4)
7,705
def get_push_pop_stack(): push = copy.deepcopy(PUSH_STACK) pop = copy.deepcopy(POP_STACK) anno.setanno(push, , pop) anno.setanno(push, , True) anno.setanno(pop, , push) op_id = _generate_op_id() return push, pop, op_id
Create pop and push nodes for substacks that are linked. Returns: A push and pop node which have `push_func` and `pop_func` annotations respectively, identifying them as such. They also have a `pop` and `push` annotation respectively, which links the push node to the pop node and vice versa.
7,706
def _dry_message_received(self, msg): for callback in self._dry_wet_callbacks: callback(LeakSensorState.DRY) self._update_subscribers(0x11)
Report a dry state.
7,707
def _take_values(self, item: Node) -> DictBasicType: values = super()._take_values(item) values[] = None return values
Takes snapshot of the object and replaces _parent property value on None to avoid infitinite recursion in GPflow tree traversing. :param item: GPflow node object. :return: dictionary snapshot of the node object.
7,708
def admin_log(instances, msg: str, who: User=None, **kw): from django.contrib.admin.models import LogEntry, CHANGE from django.contrib.admin.options import get_content_type_for_model from django.utils.encoding import force_text if not who: username = settings.DJANGO_SYSTEM_USER if hasattr(settings, ) else who, created = User.objects.get_or_create(username=username) att_str = for k, v in kw.items(): if hasattr(v, ): v = v.pk att_str += .format(k, v) if not att_str else .format(k, v) if att_str: att_str = .format(att_str) msg = str(msg) + att_str if not isinstance(instances, list) and not isinstance(instances, tuple): instances = [instances] for instance in instances: if instance: LogEntry.objects.log_action( user_id=who.pk, content_type_id=get_content_type_for_model(instance).pk, object_id=instance.pk, object_repr=force_text(instance), action_flag=CHANGE, change_message=msg, )
Logs an entry to admin logs of model(s). :param instances: Model instance or list of instances :param msg: Message to log :param who: Who did the change :param kw: Optional key-value attributes to append to message :return: None
7,709
def zlist(self, name_start, name_end, limit=10): limit = get_positive_integer(, limit) return self.execute_command(, name_start, name_end, limit)
Return a list of the top ``limit`` zset's name between ``name_start`` and ``name_end`` in ascending order .. note:: The range is (``name_start``, ``name_end``]. The ``name_start`` isn't in the range, but ``name_end`` is. :param string name_start: The lower bound(not included) of zset names to be returned, empty string ``''`` means -inf :param string name_end: The upper bound(included) of zset names to be returned, empty string ``''`` means +inf :param int limit: number of elements will be returned. :return: a list of zset's name :rtype: list >>> ssdb.zlist('zset_ ', 'zset_z', 10) ['zset_1', 'zset_2'] >>> ssdb.zlist('zset_ ', '', 3) ['zset_1', 'zset_2'] >>> ssdb.zlist('', 'aaa_not_exist', 10) []
7,710
def apply_pre_filters(instance, html): for post_func in appsettings.PRE_FILTER_FUNCTIONS: html = post_func(instance, html) return html
Perform optimizations in the HTML source code. :type instance: fluent_contents.models.ContentItem :raise ValidationError: when one of the filters detects a problem.
7,711
def visit_importfrom(self, node): if not self._analyse_fallback_blocks and utils.is_from_fallback_block(node): return name_parts = node.modname.split(".") try: module = node.do_import_module(name_parts[0]) except astroid.AstroidBuildingException: return module = self._check_module_attrs(node, module, name_parts[1:]) if not module: return for name, _ in node.names: if name == "*": continue self._check_module_attrs(node, module, name.split("."))
check modules attribute accesses
7,712
def p_recent(self, kind, cur_p=, with_catalog=True, with_date=True): if cur_p == : current_page_number = 1 else: current_page_number = int(cur_p) current_page_number = 1 if current_page_number < 1 else current_page_number pager_num = int(MPost.total_number(kind) / CMS_CFG[]) kwd = { : , : , : with_catalog, : with_date, : kind, : current_page_number, : MPost.get_counts(), : config.router_post[kind], } self.render(, kwd=kwd, view=MPost.query_recent(num=20, kind=kind), infos=MPost.query_pager_by_slug( kind=kind, current_page_num=current_page_number ), format_date=tools.format_date, userinfo=self.userinfo, cfg=CMS_CFG, )
List posts that recent edited, partially.
7,713
def update_user(self, user_id, **kwargs): body = self._formdata(kwargs, FastlyUser.FIELDS) content = self._fetch("/user/%s" % user_id, method="PUT", body=body) return FastlyUser(self, content)
Update a user.
7,714
def is_child_of(self, node): return node.get_children().filter(pk=self.pk).exists()
:returns: ``True`` if the node is a child of another node given as an argument, else, returns ``False`` :param node: The node that will be checked as a parent
7,715
def patch_namespaced_stateful_set_scale(self, name, namespace, body, **kwargs): kwargs[] = True if kwargs.get(): return self.patch_namespaced_stateful_set_scale_with_http_info(name, namespace, body, **kwargs) else: (data) = self.patch_namespaced_stateful_set_scale_with_http_info(name, namespace, body, **kwargs) return data
patch_namespaced_stateful_set_scale # noqa: E501 partially update scale of the specified StatefulSet # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_stateful_set_scale(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Scale (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param UNKNOWN_BASE_TYPE body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1Scale If the method is called asynchronously, returns the request thread.
7,716
def read_multi(flatten, cls, source, *args, **kwargs): verbose = kwargs.pop(, False) try: files = file_list(source) except ValueError: files = [source] path = None else: path = files[0] if files else None return fobj, exc.getException() return fobj, exc if verbose is True: verbose = .format(kwargs[]) output = mp_utils.multiprocess_with_queues( nproc, _read_single_file, files, verbose=verbose, unit=) for fobj, exc in output: if isinstance(exc, Exception): exc.args = ( % (fobj, str(exc)),) raise exc _, out = zip(*output) return flatten(out)
Read sources into a `cls` with multiprocessing This method should be called by `cls.read` and uses the `nproc` keyword to enable and handle pool-based multiprocessing of multiple source files, using `flatten` to combine the chunked data into a single object of the correct type. Parameters ---------- flatten : `callable` a method to take a list of ``cls`` instances, and combine them into a single ``cls`` instance cls : `type` the object type to read source : `str`, `list` of `str`, ... the input data source, can be of in many different forms *args positional arguments to pass to the reader **kwargs keyword arguments to pass to the reader
7,717
async def delete_chat_photo(self, chat_id: typing.Union[base.Integer, base.String]) -> base.Boolean: payload = generate_payload(**locals()) result = await self.request(api.Methods.DELETE_CHAT_PHOTO, payload) return result
Use this method to delete a chat photo. Photos can't be changed for private chats. The bot must be an administrator in the chat for this to work and must have the appropriate admin rights. Note: In regular groups (non-supergroups), this method will only work if the ‘All Members Are Admins’ setting is off in the target group. Source: https://core.telegram.org/bots/api#deletechatphoto :param chat_id: Unique identifier for the target chat or username of the target channel :type chat_id: :obj:`typing.Union[base.Integer, base.String]` :return: Returns True on success :rtype: :obj:`base.Boolean`
7,718
def compare(ver1, ver2): v1, v2 = parse(ver1), parse(ver2) return _compare_by_keys(v1, v2)
Compare two versions :param ver1: version string 1 :param ver2: version string 2 :return: The return value is negative if ver1 < ver2, zero if ver1 == ver2 and strictly positive if ver1 > ver2 :rtype: int >>> import semver >>> semver.compare("1.0.0", "2.0.0") -1 >>> semver.compare("2.0.0", "1.0.0") 1 >>> semver.compare("2.0.0", "2.0.0") 0
7,719
def alphavsks(self,autozoom=True,**kwargs): pylab.plot(self._alpha_values, self._xmin_kstest, ) pylab.errorbar(self._alpha, self._ks, xerr=self._alphaerr, fmt=) ax=pylab.gca() if autozoom: ax.set_ylim(0.8*(self._ks),3*(self._ks)) ax.set_xlim((self._alpha)-5*self._alphaerr,(self._alpha)+5*self._alphaerr) ax.set_ylabel("KS statistic") ax.set_xlabel(r) pylab.draw() return ax
Plot alpha versus the ks value for derived alpha. This plot can be used as a diagnostic of whether you have derived the 'best' fit: if there are multiple local minima, your data set may be well suited to a broken powerlaw or a different function.
7,720
def edit(self, entity, id, payload, sync=True): url = urljoin(self.host, entity.value + ) url = urljoin(url, id + ) params = {: str(sync).lower()} url = Utils.add_url_parameters(url, params) r = requests.put(url, auth=self.auth, data=json.dumps(payload), headers=self.headers) if r.status_code == 500: error_message = r.json()[] raise CoredataError(.format(error=error_message))
Edit a document.
7,721
def merge_commit(commit): "Fetches the latest code and merges up the specified commit." with cd(env.path): run() if in commit: branch, commit = commit.split() run(.format(branch)) run(.format(commit))
Fetches the latest code and merges up the specified commit.
7,722
def calcRapRperi(self,*args,**kwargs): if isinstance(self._pot,list): thispot= [p.toPlanar() for p in self._pot if not isinstance(p,planarPotential)] thispot.extend([p for p in self._pot if isinstance(p,planarPotential)]) elif not isinstance(self._pot,planarPotential): thispot= self._pot.toPlanar() else: thispot= self._pot aAAxi= actionAngleAxi(*args,pot=thispot, gamma=self._gamma) return aAAxi.calcRapRperi(**kwargs)
NAME: calcRapRperi PURPOSE: calculate the apocenter and pericenter radii INPUT: Either: a) R,vR,vT,z,vz b) Orbit instance: initial condition used if that's it, orbit(t) if there is a time given as well OUTPUT: (rperi,rap) HISTORY: 2013-11-27 - Written - Bovy (IAS)
7,723
def subclass(cls, t): t.doc = None t.terms = [] t.__class__ = SectionTerm return t
Change a term into a Section Term
7,724
def save(self): self.cells = list(self.renumber()) if not self.cells[-1].endswith(): self.cells[-1] += with open(self.filename, ) as file_open: file_open.write(.join(self.cells))
Format and save cells.
7,725
def _add_embedding_config(file_path, data_dir, has_metadata=False, label_img_shape=None): with open(os.path.join(file_path, ), ) as f: s = s += .format(data_dir) s += .format(os.path.join(data_dir, )) if has_metadata: s += .format(os.path.join(data_dir, )) if label_img_shape is not None: if len(label_img_shape) != 4: logging.warning( , len(label_img_shape)) else: s += s += .format(os.path.join(data_dir, )) s += .format(label_img_shape[3]) s += .format(label_img_shape[2]) s += s += f.write(s)
Creates a config file used by the embedding projector. Adapted from the TensorFlow function `visualize_embeddings()` at https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/tensorboard/plugins/projector/__init__.py
7,726
def fixtags(self, text): text = _guillemetLeftPat.sub(ur, text) text = _guillemetRightPat.sub(ur, text) return text
Clean up special characters, only run once, next-to-last before doBlockLevels
7,727
def _to_ascii(s): from six import text_type, binary_type if isinstance(s, text_type): ascii_ = s.encode(, ) elif isinstance(s, binary_type): ascii_ = s.decode().encode(, ) else: raise Exception(.format(type(s))) return ascii_
Converts given string to ascii ignoring non ascii. Args: s (text or binary): Returns: str:
7,728
def generic_visit(self, node): if node.__class__.__name__ == : if node.ctx.__class__ == ast.Load and node.id not in self.names: self.names.append(node.id) ast.NodeVisitor.generic_visit(self, node)
TODO: docstring in public method.
7,729
def findAnyBracketBackward(self, block, column): depth = {: 1, : 1, : 1 } for foundBlock, foundColumn, char in self.iterateCharsBackwardFrom(block, column): if self._qpart.isCode(foundBlock.blockNumber(), foundColumn): for brackets in depth.keys(): opening, closing = brackets if char == opening: depth[brackets] -= 1 if depth[brackets] == 0: return foundBlock, foundColumn elif char == closing: depth[brackets] += 1 else: raise ValueError()
Search for a needle and return (block, column) Raise ValueError, if not found NOTE this methods ignores strings and comments
7,730
def _helpful_failure(method): @wraps(method) def wrapper(self, val): try: return method(self, val) except: exc_cls, inst, tb = sys.exc_info() if hasattr(inst, ): _, expr, _, inner_val = Q.__debug_info__ Q.__debug_info__ = QDebug(self, expr, val, inner_val) raise if issubclass(exc_cls, KeyError): exc_cls = QKeyError prettyval = repr(val) if len(prettyval) > 150: prettyval = "<%s instance>" % (type(val).__name__) msg = "{0}\n\n\tEncountered when evaluating {1}{2}".format( inst, prettyval, self) new_exc = exc_cls(msg) new_exc._RERAISE = True Q.__debug_info__ = QDebug(self, self, val, val) six.reraise(exc_cls, new_exc, tb) return wrapper
Decorator for eval_ that prints a helpful error message if an exception is generated in a Q expression
7,731
def get_version(): "Returns a PEP 386-compliant version number from VERSION." assert len(VERSION) == 5 assert VERSION[3] in (, , , ) parts = 2 if VERSION[2] == 0 else 3 main = .join(str(x) for x in VERSION[:parts]) sub = if VERSION[3] != : mapping = {: , : , : } sub = mapping[VERSION[3]] + str(VERSION[4]) return str(main + sub)
Returns a PEP 386-compliant version number from VERSION.
7,732
def debug(self, value): self._debug = value if self._debug: logging.getLogger().setLevel(logging.DEBUG)
Turn on debug logging if necessary. :param value: Value of debug flag
7,733
def translate(self, body, params=None): if body in SKIP_IN_PATH: raise ValueError("Empty value passed for a required argument .") return self.transport.perform_request( "POST", "/_sql/translate", params=params, body=body )
`<Translate SQL into Elasticsearch queries>`_ :arg body: Specify the query in the `query` element.
7,734
def wait(self): "wait for a message, respecting timeout" data=self.getcon().recv(256) if not data: raise PubsubDisco if self.reset: self.reset=False raise PubsubDisco self.buf+=data msg,self.buf=complete_message(self.buf) return msg
wait for a message, respecting timeout
7,735
def generate(cls, curve=ec.SECP256R1(), progress_func=None, bits=None): if bits is not None: curve = cls._ECDSA_CURVES.get_by_key_length(bits) if curve is None: raise ValueError("Unsupported key length: {:d}".format(bits)) curve = curve.curve_class() private_key = ec.generate_private_key(curve, backend=default_backend()) return ECDSAKey(vals=(private_key, private_key.public_key()))
Generate a new private ECDSA key. This factory function can be used to generate a new host key or authentication key. :param progress_func: Not used for this type of key. :returns: A new private key (`.ECDSAKey`) object
7,736
def replay_position(position, result): assert position.n == len(position.recent), "Position history is incomplete" pos = Position(komi=position.komi) for player_move in position.recent: color, next_move = player_move yield PositionWithContext(pos, next_move, result) pos = pos.play_move(next_move, color=color)
Wrapper for a go.Position which replays its history. Assumes an empty start position! (i.e. no handicap, and history must be exhaustive.) Result must be passed in, since a resign cannot be inferred from position history alone. for position_w_context in replay_position(position): print(position_w_context.position)
7,737
def _iop(self, operation, other, *allowed): f = self._field if self._combining: return reduce(self._combining, (q._iop(operation, other, *allowed) for q in f)) if __debug__ and _complex_safety_check(f, {operation} | set(allowed)): raise NotImplementedError("{self!r} does not allow {op} comparison.".format( self=self, op=operation)) def _t(o): for value in o: yield None if value is None else f.transformer.foreign(value, (f, self._document)) other = other if len(other) > 1 else other[0] values = list(_t(other)) return Filter({self._name: {operation: values}})
An iterative operation operating on multiple values. Consumes iterators to construct a concrete list at time of execution.
7,738
def MergeAttributeContainers( self, callback=None, maximum_number_of_containers=0): if maximum_number_of_containers < 0: raise ValueError() if not self._cursor: self._Open() self._ReadStorageMetadata() self._container_types = self._GetContainerTypes() number_of_containers = 0 while self._active_cursor or self._container_types: if not self._active_cursor: self._PrepareForNextContainerType() if maximum_number_of_containers == 0: rows = self._active_cursor.fetchall() else: number_of_rows = maximum_number_of_containers - number_of_containers rows = self._active_cursor.fetchmany(size=number_of_rows) if not rows: self._active_cursor = None continue for row in rows: identifier = identifiers.SQLTableIdentifier( self._active_container_type, row[0]) if self._compression_format == definitions.COMPRESSION_FORMAT_ZLIB: serialized_data = zlib.decompress(row[1]) else: serialized_data = row[1] attribute_container = self._DeserializeAttributeContainer( self._active_container_type, serialized_data) attribute_container.SetIdentifier(identifier) if self._active_container_type == self._CONTAINER_TYPE_EVENT_TAG: event_identifier = identifiers.SQLTableIdentifier( self._CONTAINER_TYPE_EVENT, attribute_container.event_row_identifier) attribute_container.SetEventIdentifier(event_identifier) del attribute_container.event_row_identifier if callback: callback(self._storage_writer, attribute_container) self._add_active_container_method(attribute_container) number_of_containers += 1 if (maximum_number_of_containers != 0 and number_of_containers >= maximum_number_of_containers): return False self._Close() os.remove(self._path) return True
Reads attribute containers from a task storage file into the writer. Args: callback (function[StorageWriter, AttributeContainer]): function to call after each attribute container is deserialized. maximum_number_of_containers (Optional[int]): maximum number of containers to merge, where 0 represent no limit. Returns: bool: True if the entire task storage file has been merged. Raises: RuntimeError: if the add method for the active attribute container type is missing. OSError: if the task storage file cannot be deleted. ValueError: if the maximum number of containers is a negative value.
7,739
def parseWord(word): mapping = {: True, : True, : False, : False} _, key, value = word.split(, 2) try: value = int(value) except ValueError: value = mapping.get(value, value) return (key, value)
Split given attribute word to key, value pair. Values are casted to python equivalents. :param word: API word. :returns: Key, value pair.
7,740
def set_output_fields(self, output_fields): if isinstance(output_fields, dict) or isinstance(output_fields, list): self.output_fields = output_fields elif isinstance(output_fields, basestring): self.output_field = output_fields else: raise ValueError("set_output_fields requires a dictionary of " + "output fields to remap, a list of keys to filter, or a scalar string") return self
Defines where to put the dictionary output of the extractor in the doc, but renames the fields of the extracted output for the document or just filters the keys
7,741
def get_random(self): Statement = self.get_model() statement = Statement.objects.order_by().first() if statement is None: raise self.EmptyDatabaseException() return statement
Returns a random statement from the database
7,742
def get_prtfmt_list(self, flds, add_nl=True): fmts = [] for fld in flds: if fld[:2] == : fmts.append(.format(FLD=fld)) elif fld in self.default_fld2fmt: fmts.append(self.default_fld2fmt[fld]) else: raise Exception("UNKNOWN FORMAT: {FLD}".format(FLD=fld)) if add_nl: fmts.append("\n") return fmts
Get print format, given fields.
7,743
def _request_bulk(self, urls: List[str]) -> List: if not urls: raise Exception("No results were found") session: FuturesSession = FuturesSession(max_workers=len(urls)) self.log.info("Bulk requesting: %d" % len(urls)) futures = [session.get(u, headers=gen_headers(), timeout=3) for u in urls] done, incomplete = wait(futures) results: List = list() for response in done: try: results.append(response.result()) except Exception as err: self.log.warn("Failed result: %s" % err) return results
Batch the requests going out.
7,744
def remove(self, removeItems=False): if not self.prepareToRemove(): return False items = self.items() if self._scene._layers: new_layer = self._scene._layers[0] else: new_layer = None if removeItems: self.scene().removeItems(items) else: for item in items: item.setLayer(new_layer) if self in self._scene._layers: self._scene._layers.remove(self) if new_layer: new_layer.setCurrent() self._scene.setModified() return True
Removes this layer from the scene. If the removeItems flag is set to \ True, then all the items on this layer will be removed as well. \ Otherwise, they will be transferred to another layer from the scene. :param removeItems | <bool> :return <bool>
7,745
def update_house(self, complex: str, id: str, **kwargs): self.check_house(complex, id) self.put(.format( developer=self.developer, complex=complex, id=id, ), data=kwargs)
Update the existing house
7,746
def default(self, obj): countrowsEd JonesPete JonesWendy WilliamsMary ContraryFred Smith if hasattr(obj, ) and six.callable(obj.__json__): return obj.__json__() elif isinstance(obj, (date, datetime)): return str(obj) elif isinstance(obj, Decimal): return float(obj) elif is_saobject(obj): props = {} for key in obj.__dict__: if not key.startswith(): props[key] = getattr(obj, key) return props elif isinstance(obj, ResultProxy): props = dict(rows=list(obj), count=obj.rowcount) if props[] < 0: props[] = len(props[]) return props elif isinstance(obj, RowProxy): return dict(obj) elif isinstance(obj, webob_dicts): return obj.mixed() else: return JSONEncoder.default(self, obj)
Converts an object and returns a ``JSON``-friendly structure. :param obj: object or structure to be converted into a ``JSON``-ifiable structure Considers the following special cases in order: * object has a callable __json__() attribute defined returns the result of the call to __json__() * date and datetime objects returns the object cast to str * Decimal objects returns the object cast to float * SQLAlchemy objects returns a copy of the object.__dict__ with internal SQLAlchemy parameters removed * SQLAlchemy ResultProxy objects Casts the iterable ResultProxy into a list of tuples containing the entire resultset data, returns the list in a dictionary along with the resultset "row" count. .. note:: {'count': 5, 'rows': [('Ed Jones',), ('Pete Jones',), ('Wendy Williams',), ('Mary Contrary',), ('Fred Smith',)]} * SQLAlchemy RowProxy objects Casts the RowProxy cursor object into a dictionary, probably losing its ordered dictionary behavior in the process but making it JSON-friendly. * webob_dicts objects returns webob_dicts.mixed() dictionary, which is guaranteed to be JSON-friendly.
7,747
def _solNa2SO4(T, mH2SO4, mNaCl): if T < 523.15 or T > 623.15 or mH2SO4 < 0 or mH2SO4 > 0.75 or \ mNaCl < 0 or mNaCl > 2.25: raise NotImplementedError("Incoming out of bound") A00 = -0.8085987*T+81.4613752+0.10537803*T*log(T) A10 = 3.4636364*T-281.63322-0.46779874*T*log(T) A20 = -6.0029634*T+480.60108+0.81382854*T*log(T) A30 = 4.4540258*T-359.36872-0.60306734*T*log(T) A01 = 0.4909061*T-46.556271-0.064612393*T*log(T) A02 = -0.002781314*T+1.722695+0.0000013319698*T*log(T) A03 = -0.014074108*T+0.99020227+0.0019397832*T*log(T) A11 = -0.87146573*T+71.808756+0.11749585*T*log(T) S = A00 + A10*mH2SO4 + A20*mH2SO4**2 + A30*mH2SO4**3 + A01*mNaCl + \ A02*mNaCl**2 + A03*mNaCl**3 + A11*mH2SO4*mNaCl return S
Equation for the solubility of sodium sulfate in aqueous mixtures of sodium chloride and sulfuric acid Parameters ---------- T : float Temperature, [K] mH2SO4 : float Molality of sufuric acid, [mol/kg(water)] mNaCl : float Molality of sodium chloride, [mol/kg(water)] Returns ------- S : float Molal solutility of sodium sulfate, [mol/kg(water)] Notes ------ Raise :class:`NotImplementedError` if input isn't in limit: * 523.15 ≤ T ≤ 623.15 * 0 ≤ mH2SO4 ≤ 0.75 * 0 ≤ mNaCl ≤ 2.25 Examples -------- >>> _solNa2SO4(523.15, 0.25, 0.75) 2.68 References ---------- IAPWS, Solubility of Sodium Sulfate in Aqueous Mixtures of Sodium Chloride and Sulfuric Acid from Water to Concentrated Solutions, http://www.iapws.org/relguide/na2so4.pdf
7,748
def refresh(self, leave_clean=False): remote, merge = self._get_upstream() self._check_call([, , remote, merge], raise_type=Scm.RemoteException) try: self._check_call([, ], raise_type=Scm.LocalException) except Scm.LocalException as e: if leave_clean: logger.debug() try: self._check_call([, ], raise_type=Scm.LocalException) except Scm.LocalException as abort_exc: logger.debug() logger.debug(traceback.format_exc(abort_exc)) raise e
Attempt to pull-with-rebase from upstream. This is implemented as fetch-plus-rebase so that we can distinguish between errors in the fetch stage (likely network errors) and errors in the rebase stage (conflicts). If leave_clean is true, then in the event of a rebase failure, the branch will be rolled back. Otherwise, it will be left in the conflicted state.
7,749
def operation_recorder_enabled(self, value): for recorder in self._operation_recorders: if value: recorder.enable() else: recorder.disable()
Setter method; for a description see the getter method.
7,750
def get_item(env, name, default=None): for key in name.split(): if isinstance(env, dict) and key in env: env = env[key] elif isinstance(env, types.ModuleType) and key in env.__dict__: env = env.__dict__[key] else: return default return env
Get an item from a dictionary, handling nested lookups with dotted notation. Args: env: the environment (dictionary) to use to look up the name. name: the name to look up, in dotted notation. default: the value to return if the name if not found. Returns: The result of looking up the name, if found; else the default.
7,751
def set_position(self, position): if position > self._duration(): return position_ns = position * _NANOSEC_MULT self._manager[ATTR_POSITION] = position self._player.seek_simple(_FORMAT_TIME, Gst.SeekFlags.FLUSH, position_ns)
Set media position.
7,752
def compute_Wp(self, Epmin=None, Epmax=None): if Epmin is None and Epmax is None: Wp = self.Wp else: if Epmax is None: Epmax = self.Epmax if Epmin is None: Epmin = self.Epmin log10Epmin = np.log10(Epmin.to("GeV").value) log10Epmax = np.log10(Epmax.to("GeV").value) Ep = ( np.logspace( log10Epmin, log10Epmax, int(self.nEpd * (log10Epmax - log10Epmin)), ) * u.GeV ) pdist = self.particle_distribution(Ep) Wp = trapz_loglog(Ep * pdist, Ep).to("erg") return Wp
Total energy in protons between energies Epmin and Epmax Parameters ---------- Epmin : :class:`~astropy.units.Quantity` float, optional Minimum proton energy for energy content calculation. Epmax : :class:`~astropy.units.Quantity` float, optional Maximum proton energy for energy content calculation.
7,753
def filter(args): p = OptionParser(filter.__doc__) opts, args = p.parse_args(args) if len(args) != 2: sys.exit(not p.print_help()) frgfile, idsfile = args assert frgfile.endswith(".frg") fp = open(idsfile) allowed = set(x.strip() for x in fp) logging.debug("A total of {0} allowed ids loaded.".format(len(allowed))) newfrgfile = frgfile.replace(".frg", ".filtered.frg") fp = open(frgfile) fw = open(newfrgfile, "w") nfrags, discarded_frags = 0, 0 nmates, discarded_mates = 0, 0 for rec in iter_records(fp): if rec.type == "FRG": readname = rec.get_field("acc") readname = readname.rstrip("ab") nfrags += 1 if readname not in allowed: discarded_frags += 1 continue if rec.type == "LKG": readname = rec.get_field("frg") readname = readname.rstrip("ab") nmates += 1 if readname not in allowed: discarded_mates += 1 continue print(rec, file=fw) survived_frags = nfrags - discarded_frags survived_mates = nmates - discarded_mates print("Survived fragments: {0}".\ format(percentage(survived_frags, nfrags)), file=sys.stderr) print("Survived mates: {0}".\ format(percentage(survived_mates, nmates)), file=sys.stderr)
%prog filter frgfile idsfile Removes the reads from frgfile that are indicated as duplicates in the clstrfile (generated by CD-HIT-454). `idsfile` includes a set of names to include in the filtered frgfile. See apps.cdhit.ids().
7,754
def pick(self, *props): result = Parameters() for prop in props: if self.contains_key(prop): result.put(prop, self.get(prop)) return result
Picks select parameters from this Parameters and returns them as a new Parameters object. :param props: keys to be picked and copied over to new Parameters. :return: a new Parameters object.
7,755
def check(self, topic, value): datatype_key = topic.meta.get(, ) self._datatypes[datatype_key].check(topic, value) validate_dt = topic.meta.get(, None) if validate_dt: self._datatypes[validate_dt].check(topic, value)
Checking the value if it fits into the given specification
7,756
def run_iterations(cls, the_callable, iterations=1, label=None, schedule=, userdata = None, run_immediately=False, delay_until=None): task = task_with_callable(the_callable, label=label, schedule=schedule, userdata=userdata) task.iterations = iterations if delay_until is not None: if isinstance(delay_until, datetime): if delay_until > timezone.now(): task.start_running = delay_until else: raise ValueError("Task cannot start running in the past") else: raise ValueError("delay_until must be a datetime.datetime instance") if run_immediately: task.next_run = timezone.now() else: task.calc_next_run() task.save()
Class method to run a callable with a specified number of iterations
7,757
def exportable(self): if in self._signature.subpackets: return bool(next(iter(self._signature.subpackets[]))) return True
``False`` if this signature is marked as being not exportable. Otherwise, ``True``.
7,758
def deploy(self, initial_instance_count, instance_type, accelerator_type=None, endpoint_name=None, use_compiled_model=False, update_endpoint=False, **kwargs): self._ensure_latest_training_job() endpoint_name = endpoint_name or self.latest_training_job.name self.deploy_instance_type = instance_type if use_compiled_model: family = .join(instance_type.split()[:-1]) if family not in self._compiled_models: raise ValueError("No compiled model for {}. " "Please compile one with compile_model before deploying.".format(family)) model = self._compiled_models[family] else: model = self.create_model(**kwargs) return model.deploy( instance_type=instance_type, initial_instance_count=initial_instance_count, accelerator_type=accelerator_type, endpoint_name=endpoint_name, update_endpoint=update_endpoint, tags=self.tags)
Deploy the trained model to an Amazon SageMaker endpoint and return a ``sagemaker.RealTimePredictor`` object. More information: http://docs.aws.amazon.com/sagemaker/latest/dg/how-it-works-training.html Args: initial_instance_count (int): Minimum number of EC2 instances to deploy to an endpoint for prediction. instance_type (str): Type of EC2 instance to deploy to an endpoint for prediction, for example, 'ml.c4.xlarge'. accelerator_type (str): Type of Elastic Inference accelerator to attach to an endpoint for model loading and inference, for example, 'ml.eia1.medium'. If not specified, no Elastic Inference accelerator will be attached to the endpoint. For more information: https://docs.aws.amazon.com/sagemaker/latest/dg/ei.html endpoint_name (str): Name to use for creating an Amazon SageMaker endpoint. If not specified, the name of the training job is used. use_compiled_model (bool): Flag to select whether to use compiled (optimized) model. Default: False. update_endpoint (bool): Flag to update the model in an existing Amazon SageMaker endpoint. If True, this will deploy a new EndpointConfig to an already existing endpoint and delete resources corresponding to the previous EndpointConfig. Default: False tags(List[dict[str, str]]): Optional. The list of tags to attach to this specific endpoint. Example: >>> tags = [{'Key': 'tagname', 'Value': 'tagvalue'}] For more information about tags, see https://boto3.amazonaws.com/v1/documentation\ /api/latest/reference/services/sagemaker.html#SageMaker.Client.add_tags **kwargs: Passed to invocation of ``create_model()``. Implementations may customize ``create_model()`` to accept ``**kwargs`` to customize model creation during deploy. For more, see the implementation docs. Returns: sagemaker.predictor.RealTimePredictor: A predictor that provides a ``predict()`` method, which can be used to send requests to the Amazon SageMaker endpoint and obtain inferences.
7,759
def _apply_advanced_config(config_spec, advanced_config, vm_extra_config=None): log.trace( , advanced_config) if isinstance(advanced_config, str): raise salt.exceptions.ArgumentValueError( advanced_configs\ ) for key, value in six.iteritems(advanced_config): if vm_extra_config: for option in vm_extra_config: if option.key == key and option.value == str(value): continue else: option = vim.option.OptionValue(key=key, value=value) config_spec.extraConfig.append(option)
Sets configuration parameters for the vm config_spec vm.ConfigSpec object advanced_config config key value pairs vm_extra_config Virtual machine vm_ref.config.extraConfig object
7,760
def render_image(self, rgbobj, dst_x, dst_y): pos = (0, 0) arr = self.viewer.getwin_array(order=self.rgb_order, alpha=1.0, dtype=np.uint8) self.gl_set_image(arr, pos)
Render the image represented by (rgbobj) at dst_x, dst_y in the pixel space.
7,761
def is_containerized() -> bool: try: cginfo = Path().read_text() if in cginfo or in cginfo: return True except IOError: return False
Check if I am running inside a Linux container.
7,762
def format_row(self, row): assert all(isinstance(x, VTMLBuffer) for x in row) raw = (fn(x) for x, fn in zip(row, self.formatters)) for line in itertools.zip_longest(*raw): line = list(line) for i, col in enumerate(line): if col is None: line[i] = self._get_blank_cell(i) yield line
Apply overflow, justification and padding to a row. Returns lines (plural) of rendered text for the row.
7,763
def get_connections(self): path = Client.urls[] conns = self._call(path, ) return conns
:returns: list of dicts, or an empty list if there are no connections.
7,764
def to_dict(self): return dict( variants=self.variants, distinct=self.distinct, sort_key=self.sort_key, sources=self.sources, source_to_metadata_dict=self.source_to_metadata_dict)
Since Collection.to_dict() returns a state dictionary with an 'elements' field we have to rename it to 'variants'.
7,765
def character_set(instance): char_re = re.compile(r) for key, obj in instance[].items(): if ( in obj and obj[] == and in obj): if enums.char_sets(): if obj[] not in enums.char_sets(): yield JSONError("The property of object " "() must be an IANA registered " "character set." % (key, obj[]), instance[]) else: info("Canpath_encpath_enc%s%spath_encidtypetypefilename_encname_encname_enc%s%sname_encidt reach IANA website; using regex for character_set.") if not char_re.match(obj[]): yield JSONError("The property of object " "() must be an IANA registered " "character set." % (key, obj[]), instance[])
Ensure certain properties of cyber observable objects come from the IANA Character Set list.
7,766
def _replace(self, feature, cursor): try: cursor.execute( constants._UPDATE, list(feature.astuple()) + [feature.id]) except sqlite3.ProgrammingError: cursor.execute( constants._INSERT, list(feature.astuple(self.default_encoding)) + [feature.id])
Insert a feature into the database.
7,767
def doQuery(self, url, method=, getParmeters=None, postParameters=None, files=None, extraHeaders={}, session={}): headers = {} if not postParameters: postParameters = {} for key, value in extraHeaders.iteritems(): if isinstance(value, basestring): headers[ + key] = value.encode() else: headers[ + key] = value for key, value in session.iteritems(): headers[ + key] = value if not in headers: headers[] = headers[] += key + + str(value) + if method == : if not files: r = requests.post(self.baseURI + + url, params=getParmeters, data=postParameters, stream=True, headers=headers) else: from poster.encode import multipart_encode, MultipartParam from poster.streaminghttp import register_openers import urllib2 import urllib register_openers() data = [] for x in postParameters: if isinstance(postParameters[x], list): for elem in postParameters[x]: data.append((x, elem)) else: data.append((x, postParameters[x])) for f in files: data.append((f, MultipartParam(f, fileobj=open(files[f].temporary_file_path(), ), filename=files[f].name))) datagen, headers_multi = multipart_encode(data) headers.update(headers_multi) if getParmeters: get_uri = + urllib.urlencode(getParmeters) else: get_uri = request = urllib2.Request(self.baseURI + + url + get_uri, datagen, headers) re = urllib2.urlopen(request) from requests import Response r = Response() r.status_code = re.getcode() r.headers = dict(re.info()) r.encoding = "application/json" r.raw = re.read() r._content = r.raw return r else: r = requests.request(method.upper(), self.baseURI + + url, params=getParmeters, stream=True, headers=headers, allow_redirects=True) return r
Send a request to the server and return the result
7,768
def add_dnc( self, obj_id, channel=, reason=MANUAL, channel_id=None, comments= ): data = { : reason, : channel_id, : comments } response = self._client.session.post( .format( url=self.endpoint_url, id=obj_id, channel=channel ), data=data ) return self.process_response(response)
Adds Do Not Contact :param obj_id: int :param channel: str :param reason: str :param channel_id: int :param comments: str :return: dict|str
7,769
def get_prinz_pot(nstep, x0=0., nskip=1, dt=0.01, kT=10.0, mass=1.0, damping=1.0): r pw = PrinzModel(dt, kT, mass=mass, damping=damping) return pw.sample(x0, nstep, nskip=nskip)
r"""wrapper for the Prinz model generator
7,770
def fetch_table_names(self, include_system_table=False): self.check_connection() return self.schema_extractor.fetch_table_names(include_system_table)
:return: List of table names in the database. :rtype: list :raises simplesqlite.NullDatabaseConnectionError: |raises_check_connection| :raises simplesqlite.OperationalError: |raises_operational_error| :Sample Code: .. code:: python from simplesqlite import SimpleSQLite con = SimpleSQLite("sample.sqlite", "w") con.create_table_from_data_matrix( "hoge", ["attr_a", "attr_b"], [[1, "a"], [2, "b"]]) print(con.fetch_table_names()) :Output: .. code-block:: python ['hoge']
7,771
def recv(self, bufsiz, flags=None): buf = _no_zero_allocator("char[]", bufsiz) if flags is not None and flags & socket.MSG_PEEK: result = _lib.SSL_peek(self._ssl, buf, bufsiz) else: result = _lib.SSL_read(self._ssl, buf, bufsiz) self._raise_ssl_error(self._ssl, result) return _ffi.buffer(buf, result)[:]
Receive data on the connection. :param bufsiz: The maximum number of bytes to read :param flags: (optional) The only supported flag is ``MSG_PEEK``, all other flags are ignored. :return: The string read from the Connection
7,772
def p_const_expression_stringliteral(self, p): p[0] = StringConst(p[1], lineno=p.lineno(1)) p.set_lineno(0, p.lineno(1))
const_expression : stringliteral
7,773
def eventFilter(self, watchedObject, event): if self.comboBox.isEditable() and event.type() == QtCore.QEvent.KeyPress: key = event.key() if key in (Qt.Key_Delete, Qt.Key_Backspace): if (watchedObject == self._comboboxListView or (watchedObject == self.comboBox and event.modifiers() == Qt.ControlModifier)): index = self._comboboxListView.currentIndex() if index.isValid(): row = index.row() logger.debug("Removing item {} from the combobox: {}" .format(row, self._comboboxListView.model().data(index))) self.cti.removeValueByIndex(row) self.comboBox.removeItem(row) return True return super(ChoiceCtiEditor, self).eventFilter(watchedObject, event)
Deletes an item from an editable combobox when the delete or backspace key is pressed in the list of items, or when ctrl-delete or ctrl-back space is pressed in the line-edit. When the combobox is not editable the filter does nothing.
7,774
def insert(self, key, obj, future_expiration_minutes=15): expiration_time = self._calculate_expiration(future_expiration_minutes) self._CACHE[key] = (expiration_time, obj) return True
Insert item into cache. :param key: key to look up in cache. :type key: ``object`` :param obj: item to store in cache. :type obj: varies :param future_expiration_minutes: number of minutes item is valid :type param: ``int`` :returns: True :rtype: ``bool``
7,775
def present(name, vname=None, vdata=None, vtype=, use_32bit_registry=False, win_owner=None, win_perms=None, win_deny_perms=None, win_inheritance=True, win_perms_reset=False): rd like to create beneath the Key. If this parameter is not passed it will assume you want to set the ``(Default)`` value vdata (str, int, list, bytes): The value youGrant` permissions will not be modified. .. note:: Permissions are set for the key that contains the value/data pair. You cannot set permissions on value/data pairs themselves. For each user specify the account name, with a sub dict for the permissions to grant and the setting. For example: ``{: {: , : }}``. ``perms`` must be specified. Registry permissions are specified using the ``perms`` key. You can specify a single basic permission or a list of advanced perms. The following are valid perms: Basic (passed as a string): - full_control - read - write Advanced (passed as a list): - delete - query_value - set_value - create_subkey - enum_subkeys - notify - create_link - read_control - write_dac - write_owner The setting is optional. It is specified using the ``applies_to`` key. If not specified ``this_key_subkeys`` is used. Valid options are: Applies to settings: - this_key_only - this_key_subkeys - subkeys_only .. versionadded:: 2019.2.0 win_deny_perms (dict): A dictionary containing permissions to deny and their propagation. If not passed the `Deny` permissions will not be modified. .. note:: Permissions are set for the key that contains the value/data pair. You cannot set permissions on value/data pairs themselves. Valid options are the same as those specified in ``win_perms`` .. note:: permissions always take precedence over permissions. .. versionadded:: 2019.2.0 win_inheritance (bool): ``True`` to inherit permissions from the parent key. ``False`` to disable inheritance. Default is ``True``. .. note:: Inheritance is set for the key that contains the value/data pair. You cannot set inheritance on value/data pairs themselves. .. versionadded:: 2019.2.0 win_perms_reset (bool): If ``True`` the existing DACL will be cleared and replaced with the settings defined in this function. If ``False``, new entries will be appended to the existing DACL. Default is ``False`` .. note:: Perms are reset for the key that contains the value/data pair. You cannot set permissions on value/data pairs themselves. .. versionadded:: 2019.2.0 Returns: dict: A dictionary showing the results of the registry operation. Example: The following example will set the ``(Default)`` value for the ``SOFTWARE\\Salt`` key in the ``HKEY_CURRENT_USER`` hive to ``2016.3.1``: .. code-block:: yaml HKEY_CURRENT_USER\\SOFTWARE\\Salt: reg.present: - vdata: 2016.3.1 Example: The following example will set the value for the ``version`` entry under the ``SOFTWARE\\Salt`` key in the ``HKEY_CURRENT_USER`` hive to ``2016.3.1``. The value will be reflected in ``Wow6432Node``: .. code-block:: yaml HKEY_CURRENT_USER\\SOFTWARE\\Salt: reg.present: - vname: version - vdata: 2016.3.1 In the above example the path is interpreted as follows: - ``HKEY_CURRENT_USER`` is the hive - ``SOFTWARE\\Salt`` is the key - ``vname`` is the value name () that will be created under the key - ``vdata`` is the data that will be assigned to Example: Binary data can be set in two ways. The following two examples will set a binary value of ``Salty Test`` .. code-block:: yaml no_conversion: reg.present: - name: HKLM\SOFTWARE\SaltTesting - vname: test_reg_binary_state - vdata: Salty Test - vtype: REG_BINARY conversion: reg.present: - name: HKLM\SOFTWARE\SaltTesting - vname: test_reg_binary_state_with_tag - vdata: !!binary U2FsdHkgVGVzdA==\n - vtype: REG_BINARY Example: To set a ``REG_MULTI_SZ`` value: .. code-block:: yaml reg_multi_sz: reg.present: - name: HKLM\SOFTWARE\Salt - vname: reg_multi_sz - vdata: - list item 1 - list item 2 Example: To ensure a key is present and has permissions: .. code-block:: yaml set_key_permissions: reg.present: - name: HKLM\SOFTWARE\Salt - vname: version - vdata: 2016.3.1 - win_owner: Administrators - win_perms: jsnuffy: perms: full_control sjones: perms: - read_control - enum_subkeys - query_value applies_to: - this_key_only - win_deny_perms: bsimpson: perms: full_control applies_to: this_key_subkeys - win_inheritance: True - win_perms_reset: True nameresultchangescommentreg.read_valuevdatasuccesscomment{0} in {1} is already presentutf-8(Default)utf-8dacl.check_perms\\registry32registryreg.cast_vdataKey{0}\{1}Entry{0}utf-8(Default)ValueOwnerPermsGrantDenyInheritancetestresultchangesregWill addresultreg.set_valueresultchangescommentFailed to add {0} to {1}\{2}changesregAddedcommentAdded {0} to {1}\{2}resultdacl.check_perms\\registry32registry', ret=ret, owner=win_owner, grant_perms=win_perms, deny_perms=win_deny_perms, inheritance=win_inheritance, reset=win_perms_reset) return ret
r''' Ensure a registry key or value is present. Args: name (str): A string value representing the full path of the key to include the HIVE, Key, and all Subkeys. For example: ``HKEY_LOCAL_MACHINE\\SOFTWARE\\Salt`` Valid hive values include: - HKEY_CURRENT_USER or HKCU - HKEY_LOCAL_MACHINE or HKLM - HKEY_USERS or HKU vname (str): The name of the value you'd like to create beneath the Key. If this parameter is not passed it will assume you want to set the ``(Default)`` value vdata (str, int, list, bytes): The value you'd like to set. If a value name (``vname``) is passed, this will be the data for that value name. If not, this will be the ``(Default)`` value for the key. The type of data this parameter expects is determined by the value type specified in ``vtype``. The correspondence is as follows: - REG_BINARY: Binary data (str in Py2, bytes in Py3) - REG_DWORD: int - REG_EXPAND_SZ: str - REG_MULTI_SZ: list of str - REG_QWORD: int - REG_SZ: str .. note:: When setting REG_BINARY, string data will be converted to binary automatically. To pass binary data, use the built-in yaml tag ``!!binary`` to denote the actual binary characters. For example, the following lines will both set the same data in the registry: - ``vdata: Salty Test`` - ``vdata: !!binary U2FsdHkgVGVzdA==\n`` For more information about the ``!!binary`` tag see `here <http://yaml.org/type/binary.html>`_ .. note:: The type for the ``(Default)`` value is always REG_SZ and cannot be changed. This parameter is optional. If not passed, the Key will be created with no associated item/value pairs. vtype (str): The value type for the data you wish to store in the registry. Valid values are: - REG_BINARY - REG_DWORD - REG_EXPAND_SZ - REG_MULTI_SZ - REG_QWORD - REG_SZ (Default) use_32bit_registry (bool): Use the 32bit portion of the registry. Applies only to 64bit windows. 32bit Windows will ignore this parameter. Default is False. win_owner (str): The owner of the registry key. If this is not passed, the account under which Salt is running will be used. .. note:: Owner is set for the key that contains the value/data pair. You cannot set ownership on value/data pairs themselves. .. versionadded:: 2019.2.0 win_perms (dict): A dictionary containing permissions to grant and their propagation. If not passed the 'Grant` permissions will not be modified. .. note:: Permissions are set for the key that contains the value/data pair. You cannot set permissions on value/data pairs themselves. For each user specify the account name, with a sub dict for the permissions to grant and the 'Applies to' setting. For example: ``{'Administrators': {'perms': 'full_control', 'applies_to': 'this_key_subkeys'}}``. ``perms`` must be specified. Registry permissions are specified using the ``perms`` key. You can specify a single basic permission or a list of advanced perms. The following are valid perms: Basic (passed as a string): - full_control - read - write Advanced (passed as a list): - delete - query_value - set_value - create_subkey - enum_subkeys - notify - create_link - read_control - write_dac - write_owner The 'Applies to' setting is optional. It is specified using the ``applies_to`` key. If not specified ``this_key_subkeys`` is used. Valid options are: Applies to settings: - this_key_only - this_key_subkeys - subkeys_only .. versionadded:: 2019.2.0 win_deny_perms (dict): A dictionary containing permissions to deny and their propagation. If not passed the `Deny` permissions will not be modified. .. note:: Permissions are set for the key that contains the value/data pair. You cannot set permissions on value/data pairs themselves. Valid options are the same as those specified in ``win_perms`` .. note:: 'Deny' permissions always take precedence over 'grant' permissions. .. versionadded:: 2019.2.0 win_inheritance (bool): ``True`` to inherit permissions from the parent key. ``False`` to disable inheritance. Default is ``True``. .. note:: Inheritance is set for the key that contains the value/data pair. You cannot set inheritance on value/data pairs themselves. .. versionadded:: 2019.2.0 win_perms_reset (bool): If ``True`` the existing DACL will be cleared and replaced with the settings defined in this function. If ``False``, new entries will be appended to the existing DACL. Default is ``False`` .. note:: Perms are reset for the key that contains the value/data pair. You cannot set permissions on value/data pairs themselves. .. versionadded:: 2019.2.0 Returns: dict: A dictionary showing the results of the registry operation. Example: The following example will set the ``(Default)`` value for the ``SOFTWARE\\Salt`` key in the ``HKEY_CURRENT_USER`` hive to ``2016.3.1``: .. code-block:: yaml HKEY_CURRENT_USER\\SOFTWARE\\Salt: reg.present: - vdata: 2016.3.1 Example: The following example will set the value for the ``version`` entry under the ``SOFTWARE\\Salt`` key in the ``HKEY_CURRENT_USER`` hive to ``2016.3.1``. The value will be reflected in ``Wow6432Node``: .. code-block:: yaml HKEY_CURRENT_USER\\SOFTWARE\\Salt: reg.present: - vname: version - vdata: 2016.3.1 In the above example the path is interpreted as follows: - ``HKEY_CURRENT_USER`` is the hive - ``SOFTWARE\\Salt`` is the key - ``vname`` is the value name ('version') that will be created under the key - ``vdata`` is the data that will be assigned to 'version' Example: Binary data can be set in two ways. The following two examples will set a binary value of ``Salty Test`` .. code-block:: yaml no_conversion: reg.present: - name: HKLM\SOFTWARE\SaltTesting - vname: test_reg_binary_state - vdata: Salty Test - vtype: REG_BINARY conversion: reg.present: - name: HKLM\SOFTWARE\SaltTesting - vname: test_reg_binary_state_with_tag - vdata: !!binary U2FsdHkgVGVzdA==\n - vtype: REG_BINARY Example: To set a ``REG_MULTI_SZ`` value: .. code-block:: yaml reg_multi_sz: reg.present: - name: HKLM\SOFTWARE\Salt - vname: reg_multi_sz - vdata: - list item 1 - list item 2 Example: To ensure a key is present and has permissions: .. code-block:: yaml set_key_permissions: reg.present: - name: HKLM\SOFTWARE\Salt - vname: version - vdata: 2016.3.1 - win_owner: Administrators - win_perms: jsnuffy: perms: full_control sjones: perms: - read_control - enum_subkeys - query_value applies_to: - this_key_only - win_deny_perms: bsimpson: perms: full_control applies_to: this_key_subkeys - win_inheritance: True - win_perms_reset: True
7,776
def generate_scalar_constant(output_name, tensor_name, scalar): t = onnx.helper.make_tensor(tensor_name, data_type=TensorProto.FLOAT, dims=[1], vals=[scalar]) c = onnx.helper.make_node("Constant", [], [output_name], value=t) return c
Convert a scalar value to a Constant buffer. This is mainly used for xxScalar operators.
7,777
def binaryEntropy(x): entropy = - x*x.log2() - (1-x)*(1-x).log2() entropy[x*(1 - x) == 0] = 0 return entropy, entropy.sum()
Calculate entropy for a list of binary random variables :param x: (torch tensor) the probability of the variable to be 1. :return: entropy: (torch tensor) entropy, sum(entropy)
7,778
def copy(self, dest, symlinks=False): if isinstance(dest, Directory): dest = dest.get_name() shutil.copytree(self.dirname, dest)
Copy to destination directory recursively. If symlinks is true, symbolic links in the source tree are represented as symbolic links in the new tree, but the metadata of the original links is NOT copied; if false or omitted, the contents and metadata of the linked files are copied to the new tree.
7,779
def _l2rgb(self, mode): self._check_modes(("L", "LA")) self.channels.append(self.channels[0].copy()) self.channels.append(self.channels[0].copy()) if self.fill_value is not None: self.fill_value = self.fill_value[:1] * 3 + self.fill_value[1:] if self.mode == "LA": self.channels[1], self.channels[3] = \ self.channels[3], self.channels[1] self.mode = mode
Convert from L (black and white) to RGB.
7,780
def filtany(entities, **kw): ret = set() for k,v in kw.items(): for entity in entities: if getattr(entity, k)() == v: ret.add(entity) return ret
Filter a set of entities based on method return. Use keyword arguments. Example: filtmeth(entities, id='123') filtmeth(entities, name='bart') Multiple filters are 'OR'.
7,781
def objwalk(obj, path=(), memo=None): if len( path ) > MAX_DEPTH + 1: yield path, obj if memo is None: memo = set() iterator = None if isinstance(obj, Mapping): iterator = iteritems elif isinstance(obj, (Sequence, Set)) and not isinstance(obj, string_types): iterator = enumerate elif hasattr( obj, ) and hasattr( obj, ) and type(obj) not in primitives: iterator = class_iterator elif hasattr(obj, ) or isinstance(obj, types.GeneratorType): obj = [o for o in obj] else: pass if iterator: if id(obj) not in memo: memo.add(id(obj)) for path_component, value in iterator(obj): for result in objwalk(value, path + (path_component,), memo): yield result memo.remove(id(obj)) else: yield path, obj
Walks an arbitrary python pbject. :param mixed obj: Any python object :param tuple path: A tuple of the set attributes representing the path to the value :param set memo: The list of attributes traversed thus far :rtype <tuple<tuple>, <mixed>>: The path to the value on the object, the value.
7,782
def nb_r_deriv(r, data_row): n = len(data_row) d = sum(digamma(data_row + r)) - n*digamma(r) + n*np.log(r/(r+np.mean(data_row))) return d
Derivative of log-likelihood wrt r (formula from wikipedia) Args: r (float): the R paramemter in the NB distribution data_row (array): 1d array of length cells
7,783
def delete_webhook(self, ): result = self.do("deleteWebhook", ) if self.return_python_objects: logger.debug("Trying to parse {data}".format(data=repr(result))) try: return from_array_list(bool, result, list_level=0, is_builtin=True) except TgApiParseException: logger.debug("Failed parsing as primitive bool", exc_info=True) raise TgApiParseException("Could not parse result.") return result
Use this method to remove webhook integration if you decide to switch back to getUpdates. Returns True on success. Requires no parameters. https://core.telegram.org/bots/api#deletewebhook Returns: :return: Returns True on success :rtype: bool
7,784
def generate_report( self, components, output_folder=None, iface=None, ordered_layers_uri=None, legend_layers_uri=None, use_template_extent=False): if not iface: iface = iface_object write_json(report_urls(self), filename) return error_code, message
Generate Impact Report independently by the Impact Function. :param components: Report components to be generated. :type components: list :param output_folder: The output folder. :type output_folder: str :param iface: A QGIS App interface :type iface: QgsInterface :param ordered_layers_uri: A list of layers uri for map. :type ordered_layers_uri: list :param legend_layers_uri: A list of layers uri for map legend. :type legend_layers_uri: list :param use_template_extent: A condition for using template extent. :type use_template_extent: bool :returns: Tuple of error code and message :type: tuple .. versionadded:: 4.3
7,785
def literal_eval(node_or_string): _safe_names = { : None, : True, : False, : dict, : list, : sorted } if isinstance(node_or_string, basestring): node_or_string = parse(node_or_string, mode=) if isinstance(node_or_string, ast.Expression): node_or_string = node_or_string.body def _convert(node): if isinstance(node, ast.Str): return node.s elif isinstance(node, ast.Num): return node.n elif isinstance(node, ast.Tuple): return tuple(map(_convert, node.elts)) elif isinstance(node, ast.List): return list(map(_convert, node.elts)) elif isinstance(node, ast.Dict): return dict((_convert(k), _convert(v)) for k, v in zip(node.keys, node.values)) elif isinstance(node, ast.Name): if node.id in _safe_names: return _safe_names[node.id] elif isinstance(node, ast.BinOp): left = _convert(node.left) right = _convert(node.right) op = { ast.Add: operator.add, ast.Sub: operator.sub, ast.Mult: operator.mul, ast.Div: operator.div, ast.Mod: operator.mod }.get(type(node.op), None) if op: return op(left, right) elif isinstance(node, ast.Call): func = _convert(node.func) args = map(_convert, node.args) kwargs = dict((kw.arg, _convert(kw.value)) for kw in node.keywords) if node.starargs: args.extend(_convert(node.starargs)) if node.kwargs: kwargs.update(_convert(node.kwargs)) return func(*args, **kwargs) elif isinstance(node, ast.Attribute): if not node.attr.startswith(): return getattr(_convert(node.value), node.attr) raise ValueError( % node) return _convert(node_or_string)
Safely evaluate an expression node or a string containing a Python expression. The string or node provided may only consist of the following Python literal structures: strings, numbers, tuples, lists, dicts, booleans, and None.
7,786
def _apply_odf_properties(df, headers, model): df.headers = headers df.model = model
Attach properties to the Dataframe to carry along ODF metadata :param df: The dataframe to be modified :param headers: The ODF header lines :param model: The ODF model type
7,787
def get_times_from_utterance(utterance: str, char_offset_to_token_index: Dict[int, int], indices_of_approximate_words: Set[int]) -> Dict[str, List[int]]: pm_linking_dict = _time_regex_match(r, utterance, char_offset_to_token_index, pm_map_match_to_query_value, indices_of_approximate_words) am_linking_dict = _time_regex_match(r, utterance, char_offset_to_token_index, am_map_match_to_query_value, indices_of_approximate_words) oclock_linking_dict = _time_regex_match(r"\d+ oclock")), indices_of_approximate_words) hours_linking_dict = _time_regex_match(r"\d+ hours", utterance, char_offset_to_token_index, lambda match: [int(match.rstrip(" hours"))], indices_of_approximate_words) times_linking_dict: Dict[str, List[int]] = defaultdict(list) linking_dicts = [pm_linking_dict, am_linking_dict, oclock_linking_dict, hours_linking_dict] for linking_dict in linking_dicts: for key, value in linking_dict.items(): times_linking_dict[key].extend(value) return times_linking_dict
Given an utterance, we get the numbers that correspond to times and convert them to values that may appear in the query. For example: convert ``7pm`` to ``1900``.
7,788
def changiling(self, infile): gf = infile[31:] baby, fetch = (self.word_toaster() for _ in range(2)) gf = [g.replace(baby, fetch) for g in gf] return infile[:31] + gf
Changiling: 任意のバイト文字を 他の任意のバイト文字に置き換える
7,789
def patch_project(self, owner, id, **kwargs): kwargs[] = True if kwargs.get(): return self.patch_project_with_http_info(owner, id, **kwargs) else: (data) = self.patch_project_with_http_info(owner, id, **kwargs) return data
Update a project Update an existing project. Note that only elements, files or linked datasets included in the request will be updated. All omitted elements, files or linked datasets will remain untouched. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.patch_project(owner, id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the creator of a project. For example, in the URL: [https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), government is the unique identifier of the owner. (required) :param str id: Project unique identifier. For example, in the URL:[https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs](https://data.world/government/how-to-add-depth-to-your-data-with-the-us-census-acs), how-to-add-depth-to-your-data-with-the-us-census-acs is the unique identifier of the project. (required) :param ProjectPatchRequest body: :return: SuccessMessage If the method is called asynchronously, returns the request thread.
7,790
def findScopedPar(theDict, scope, name): if len(scope): theDict = theDict[scope] return theDict, theDict[name]
Find the given par. Return tuple: (its own (sub-)dict, its value).
7,791
def trace(self, name, chain=-1): trace = copy.copy(self._traces[name]) trace._chain = chain return trace
Return the trace of a tallyable object stored in the database. :Parameters: name : string The name of the tallyable object. chain : int The trace index. Setting `chain=i` will return the trace created by the ith call to `sample`.
7,792
def get_line(thing): try: return inspect.getsourcelines(thing)[1] except TypeError: return inspect.getsourcelines(thing.fget)[1] except Exception as e: raise e
Get the line number for something. Parameters ---------- thing : function, class, module Returns ------- int Line number in the source file
7,793
def _process(self, project, build_system, job_priorities): jobs = [] cache_key = .format(project, build_system) ref_data_names_map = cache.get(cache_key) if not ref_data_names_map: ref_data_names_map = self._build_ref_data_names(project, build_system) cache.set(cache_key, ref_data_names_map, SETA_REF_DATA_NAMES_CACHE_TIMEOUT) for jp in job_priorities: if not valid_platform(jp.platform): continue if is_job_blacklisted(jp.testtype): continue key = jp.unique_identifier() if key in ref_data_names_map: jobs.append(ref_data_names_map[key]) else: logger.warning(, jp) return jobs
Return list of ref_data_name for job_priorities
7,794
def vote_count(self): return Vote.objects.filter( content_type=ContentType.objects.get_for_model(self), object_id=self.id ).aggregate(Sum())[] or 0
Returns the total number of votes cast for this poll options.
7,795
def get_symbol_train(network, num_classes, from_layers, num_filters, strides, pads, sizes, ratios, normalizations=-1, steps=[], min_filter=128, nms_thresh=0.5, force_suppress=False, nms_topk=400, **kwargs): label = mx.sym.Variable() body = import_module(network).get_symbol(num_classes, **kwargs) layers = multi_layer_feature(body, from_layers, num_filters, strides, pads, min_filter=min_filter) loc_preds, cls_preds, anchor_boxes = multibox_layer(layers, \ num_classes, sizes=sizes, ratios=ratios, normalization=normalizations, \ num_channels=num_filters, clip=False, interm_layer=0, steps=steps) tmp = mx.symbol.contrib.MultiBoxTarget( *[anchor_boxes, label, cls_preds], overlap_threshold=.5, \ ignore_label=-1, negative_mining_ratio=3, minimum_negative_samples=0, \ negative_mining_thresh=.5, variances=(0.1, 0.1, 0.2, 0.2), name="multibox_target") loc_target = tmp[0] loc_target_mask = tmp[1] cls_target = tmp[2] cls_prob = mx.symbol.SoftmaxOutput(data=cls_preds, label=cls_target, \ ignore_label=-1, use_ignore=True, grad_scale=1., multi_output=True, \ normalization=, name="cls_prob") loc_loss_ = mx.symbol.smooth_l1(name="loc_loss_", \ data=loc_target_mask * (loc_preds - loc_target), scalar=1.0) loc_loss = mx.symbol.MakeLoss(loc_loss_, grad_scale=1., \ normalization=, name="loc_loss") cls_label = mx.symbol.MakeLoss(data=cls_target, grad_scale=0, name="cls_label") det = mx.symbol.contrib.MultiBoxDetection(*[cls_prob, loc_preds, anchor_boxes], \ name="detection", nms_threshold=nms_thresh, force_suppress=force_suppress, variances=(0.1, 0.1, 0.2, 0.2), nms_topk=nms_topk) det = mx.symbol.MakeLoss(data=det, grad_scale=0, name="det_out") out = mx.symbol.Group([cls_prob, loc_loss, cls_label, det]) return out
Build network symbol for training SSD Parameters ---------- network : str base network symbol name num_classes : int number of object classes not including background from_layers : list of str feature extraction layers, use '' for add extra layers For example: from_layers = ['relu4_3', 'fc7', '', '', '', ''] which means extract feature from relu4_3 and fc7, adding 4 extra layers on top of fc7 num_filters : list of int number of filters for extra layers, you can use -1 for extracted features, however, if normalization and scale is applied, the number of filter for that layer must be provided. For example: num_filters = [512, -1, 512, 256, 256, 256] strides : list of int strides for the 3x3 convolution appended, -1 can be used for extracted feature layers pads : list of int paddings for the 3x3 convolution, -1 can be used for extracted layers sizes : list or list of list [min_size, max_size] for all layers or [[], [], []...] for specific layers ratios : list or list of list [ratio1, ratio2...] for all layers or [[], [], ...] for specific layers normalizations : int or list of int use normalizations value for all layers or [...] for specific layers, -1 indicate no normalizations and scales steps : list specify steps for each MultiBoxPrior layer, leave empty, it will calculate according to layer dimensions min_filter : int minimum number of filters used in 1x1 convolution nms_thresh : float non-maximum suppression threshold force_suppress : boolean whether suppress different class objects nms_topk : int apply NMS to top K detections Returns ------- mx.Symbol
7,796
def average_patterson_f3(acc, aca, acb, blen, normed=True): T, B = patterson_f3(acc, aca, acb) if normed: f3 = np.nansum(T) / np.nansum(B) else: f3 = np.nanmean(T) if normed: T_bsum = moving_statistic(T, statistic=np.nansum, size=blen) B_bsum = moving_statistic(B, statistic=np.nansum, size=blen) vb = T_bsum / B_bsum _, se, vj = jackknife((T_bsum, B_bsum), statistic=lambda t, b: np.sum(t) / np.sum(b)) else: vb = moving_statistic(T, statistic=np.nanmean, size=blen) _, se, vj = jackknife(vb, statistic=np.mean) z = f3 / se return f3, se, z, vb, vj
Estimate F3(C; A, B) and standard error using the block-jackknife. Parameters ---------- acc : array_like, int, shape (n_variants, 2) Allele counts for the test population (C). aca : array_like, int, shape (n_variants, 2) Allele counts for the first source population (A). acb : array_like, int, shape (n_variants, 2) Allele counts for the second source population (B). blen : int Block size (number of variants). normed : bool, optional If False, use un-normalised f3 values. Returns ------- f3 : float Estimated value of the statistic using all data. se : float Estimated standard error. z : float Z-score (number of standard errors from zero). vb : ndarray, float, shape (n_blocks,) Value of the statistic in each block. vj : ndarray, float, shape (n_blocks,) Values of the statistic from block-jackknife resampling. Notes ----- See Patterson (2012), main text and Appendix A. See Also -------- allel.stats.admixture.patterson_f3
7,797
def plot_entropy(self, tmin, tmax, ntemp, ylim=None, **kwargs): temperatures = np.linspace(tmin, tmax, ntemp) if self.structure: ylabel = r"$S$ (J/K/mol)" else: ylabel = r"$S$ (J/K/mol-c)" fig = self._plot_thermo(self.dos.entropy, temperatures, ylabel=ylabel, ylim=ylim, **kwargs) return fig
Plots the vibrational entrpy in a temperature range. Args: tmin: minimum temperature tmax: maximum temperature ntemp: number of steps ylim: tuple specifying the y-axis limits. kwargs: kwargs passed to the matplotlib function 'plot'. Returns: matplotlib figure
7,798
def nl_send_iovec(sk, msg, iov, _): hdr = msghdr(msg_name=sk.s_peer, msg_iov=iov) dst = nlmsg_get_dst(msg) if dst.nl_family == socket.AF_NETLINK: hdr.msg_name = dst creds = nlmsg_get_creds(msg) if creds: raise NotImplementedError return nl_sendmsg(sk, msg, hdr)
Transmit Netlink message. https://github.com/thom311/libnl/blob/libnl3_2_25/lib/nl.c#L342 This function is identical to nl_send(). This function triggers the `NL_CB_MSG_OUT` callback. Positional arguments: sk -- Netlink socket (nl_sock class instance). msg -- Netlink message (nl_msg class instance). iov -- data payload to be sent (bytearray). Returns: Number of bytes sent on success or a negative error code.
7,799
def bucket_exists(self, bucket_name): is_valid_bucket_name(bucket_name) try: self._url_open(, bucket_name=bucket_name) except NoSuchBucket: return False except ResponseError: raise return True
Check if the bucket exists and if the user has access to it. :param bucket_name: To test the existence and user access. :return: True on success.