Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
382,600
def val_to_edge(edges, x): edges = np.array(edges) w = edges[1:] - edges[:-1] w = np.insert(w, 0, w[0]) ibin = np.digitize(np.array(x, ndmin=1), edges - 0.5 * w) - 1 ibin[ibin < 0] = 0 return ibin
Convert axis coordinate to bin index.
382,601
def propose_live(self): i = self.rstate.randint(self.nlive) u = self.live_u[i, :] ell_idxs = self.mell.within(u) nidx = len(ell_idxs) ax = self.mell.ells[ell_idx].paxes else: ax = np.identity(self.npdim) return u, ax
Return a live point/axes to be used by other sampling methods.
382,602
def createNoiseExperimentArgs(): experimentArguments = [] n = 6000 for a in [128]: noisePct = 0.75 while noisePct <= 0.85: noise = int(round(noisePct*a,0)) experimentArguments.append( ("./sdr_calculations2", "results_noise_10m/temp_"+str(n)+"_"+str(a)+"_"+str(noise)+"_30.csv", "200000", str(n), str(a), str(noise)) ) noisePct += 0.05 return experimentArguments
Run the probability of false negatives with noise experiment.
382,603
def grey_erosion(image, radius=None, mask=None, footprint=None): if footprint is None: if radius is None: footprint = np.ones((3,3),bool) radius = 1 else: footprint = strel_disk(radius)==1 else: radius = max(1, np.max(np.array(footprint.shape) // 2)) iradius = int(np.ceil(radius)) big_image = np.ones(np.array(image.shape)+iradius*2) big_image[iradius:-iradius,iradius:-iradius] = image if not mask is None: not_mask = np.logical_not(mask) big_image[iradius:-iradius,iradius:-iradius][not_mask] = 1 processed_image = scind.grey_erosion(big_image, footprint=footprint) final_image = processed_image[iradius:-iradius,iradius:-iradius] if not mask is None: final_image[not_mask] = image[not_mask] return final_image
Perform a grey erosion with masking
382,604
def _dispatch_call_args(cls=None, bound_call=None, unbound_call=None, attr=): py3 = (sys.version_info.major > 2) specs = [, , ] if py3: specs += [] spec_msg = "\nPossible signatures are ( means optional):\n\n" spec_msg += .join(specs) spec_msg += if sum(arg is not None for arg in (cls, bound_call, unbound_call)) != 1: raise ValueError() if cls is not None: for parent in cls.mro(): call = parent.__dict__.get(attr, None) if call is not None: break if isinstance(call, staticmethod): raise TypeError(" is a static method. " "".format(cls.__name__, attr) + spec_msg) elif isinstance(call, classmethod): raise TypeError(" is a class method. " "".format(cls.__name__, attr) + spec_msg) elif bound_call is not None: call = bound_call if not inspect.ismethod(call): raise TypeError(.format(call)) else: call = unbound_call if py3: spec = inspect.getfullargspec(call) kw_only = spec.kwonlyargs kw_only_defaults = spec.kwonlydefaults else: spec = inspect.getargspec(call) kw_only = () kw_only_defaults = {} signature = _function_signature(call) pos_args = spec.args if unbound_call is not None: pos_args.insert(0, ) pos_defaults = spec.defaults varargs = spec.varargs if varargs is not None: raise ValueError("bad signature : variable arguments not allowed" "".format(signature) + spec_msg) if len(pos_args) not in (2, 3): raise ValueError("bad signature ".format(signature) + spec_msg) true_pos_args = pos_args[1:] if len(true_pos_args) == 1: if in true_pos_args: raise ValueError("bad signature : `out` cannot be the only " "positional argument" "".format(signature) + spec_msg) else: if not in kw_only: has_out = out_optional = False elif kw_only_defaults[] is not None: raise ValueError( "bad signature : `out` can only default to " "`None`, got " "".format(signature, kw_only_defaults[]) + spec_msg) else: has_out = True out_optional = True elif len(true_pos_args) == 2: if true_pos_args[0] == : py3_txt = if py3 else raise ValueError("bad signature : `out` can only be the " "second positional argument".format(signature) + py3_txt + spec_msg) elif true_pos_args[1] != : raise ValueError("bad signature : output parameter must " "be called , got " "".format(signature, true_pos_args[1]) + spec_msg) else: has_out = True out_optional = bool(pos_defaults) if pos_defaults and pos_defaults[-1] is not None: raise ValueError("bad signature : `out` can only " "default to `None`, got " "".format(signature, pos_defaults[-1]) + spec_msg) else: raise ValueError("bad signature : too many positional arguments" " ".format(signature) + spec_msg) return has_out, out_optional, spec
Check the arguments of ``_call()`` or similar for conformity. The ``_call()`` method of `Operator` is allowed to have the following signatures: Python 2 and 3: - ``_call(self, x)`` - ``_call(self, vec, out)`` - ``_call(self, x, out=None)`` Python 3 only: - ``_call(self, x, *, out=None)`` (``out`` as keyword-only argument) For disambiguation, the instance name (the first argument) **must** be 'self'. The name of the ``out`` argument **must** be 'out', the second argument may have any name. Additional variable ``**kwargs`` and keyword-only arguments (Python 3 only) are also allowed. Not allowed: - ``_call(self)`` -- No arguments except instance: - ``_call(x)`` -- 'self' missing, i.e. ``@staticmethod`` - ``_call(cls, x)`` -- 'self' missing, i.e. ``@classmethod`` - ``_call(self, out, x)`` -- ``out`` as second argument - ``_call(self, *x)`` -- Variable arguments - ``_call(self, x, y, out=None)`` -- more positional arguments - ``_call(self, x, out=False)`` -- default other than None for ``out`` In particular, static or class methods are not allowed. Parameters ---------- cls : `class`, optional The ``_call()`` method of this class is checked. If omitted, provide ``unbound_call`` instead to check directly. bound_call : callable, optional Check this bound method instead of ``cls`` unbound_call : callable, optional Check this unbound function instead of ``cls`` attr : string, optional Check this attribute instead of ``_call``, e.g. ``__call__`` Returns ------- has_out : bool Whether the call has an ``out`` argument out_is_optional : bool Whether the ``out`` argument is optional spec : `inspect.ArgSpec` or `inspect.FullArgSpec` Argument specification of the checked call function Raises ------ ValueError if the signature of the function is malformed
382,605
def is_value_type_valid_for_exact_conditions(self, value): if isinstance(value, string_types) or isinstance(value, (numbers.Integral, float)): return True return False
Method to validate if the value is valid for exact match type evaluation. Args: value: Value to validate. Returns: Boolean: True if value is a string, boolean, or number. Otherwise False.
382,606
def unhandled(self, key): self.key = key self.size = self.tui.get_cols_rows() if self.search is True: if self.enter is False and self.no_matches is False: if len(key) == 1 and key.isprintable(): self.search_string += key self._search() elif self.enter is True and not self.search_string: self.search = False self.enter = False return if not self.urls and key not in "Qq": return if self.help_menu is False: try: self.keys[key]() except KeyError: pass
Handle other keyboard actions not handled by the ListBox widget.
382,607
def setInstrumentParameters(self, instrpars): pri_header = self._image[0].header self.proc_unit = instrpars[] if self._isNotValid (instrpars[], instrpars[]): instrpars[] = if self._isNotValid (instrpars[], instrpars[]): instrpars[] = None if self._isNotValid (instrpars[], instrpars[]): instrpars[] = for chip in self.returnAllChips(extname=self.scienceExt): chip._gain= 5.4 chip._rdnoise = self.getInstrParameter( instrpars[], pri_header, instrpars[] ) chip._exptime = self.getInstrParameter( instrpars[], pri_header, instrpars[] ) if chip._gain is None or self._exptime is None: print() raise ValueError if chip._rdnoise is None: chip._rdnoise = self._getDefaultReadnoise() chip._darkrate=self._getDarkRate() chip.darkcurrent = self.getdarkcurrent() chip._effGain = chip._gain self._assignSignature(chip._chip) self.doUnitConversions()
This method overrides the superclass to set default values into the parameter dictionary, in case empty entries are provided.
382,608
def matrix2lha(M): l = [] ind = np.indices(M.shape).reshape(M.ndim, M.size).T for i in ind: l.append([j+1 for j in i] + [M[tuple(i)]]) return l
Inverse function to lha2matrix: return a LHA-like list given a tensor.
382,609
def enable_mfa_device(self, user_name, serial_number, auth_code_1, auth_code_2): params = { : user_name, : serial_number, : auth_code_1, : auth_code_2} return self.get_response(, params)
Enables the specified MFA device and associates it with the specified user. :type user_name: string :param user_name: The username of the user :type serial_number: string :param seriasl_number: The serial number which uniquely identifies the MFA device. :type auth_code_1: string :param auth_code_1: An authentication code emitted by the device. :type auth_code_2: string :param auth_code_2: A subsequent authentication code emitted by the device.
382,610
def receipt(df): mutated_df = df[[, ]].astype(str) mutated_df[] = ( f"{mutated_df[]}/{mutated_df[]}" ) return ( mutated_df .set_index([]) )
Return a dataframe to verify if a item has a receipt.
382,611
def control_surface_encode(self, target, idSurface, mControl, bControl): return MAVLink_control_surface_message(target, idSurface, mControl, bControl)
Control for surface; pending and order to origin. target : The system setting the commands (uint8_t) idSurface : ID control surface send 0: throttle 1: aileron 2: elevator 3: rudder (uint8_t) mControl : Pending (float) bControl : Order to origin (float)
382,612
def lock(self, timeout=10): logger.debug("Locking %s", self.lock_file) if not os.path.exists(self.lock_file): self.ensure_path(self.lock_file) with open(self.lock_file, "w"): os.utime(self.lock_file) self._lock.acquire(timeout=timeout)
Advisory lock. Use to ensure that only one LocalSyncClient is working on the Target at the same time.
382,613
def start_with(self, x): _args = [] for arg in self.all: if is_collection(x): for _x in x: if arg.startswith(x): _args.append(arg) break else: if arg.startswith(x): _args.append(arg) return Args(_args, no_argv=True)
Returns all arguments beginning with given string (or list thereof)
382,614
def start_transports(self): self.transport = Transport( self.queue, self.batch_size, self.batch_interval, self.session_factory) thread = threading.Thread(target=self.transport.loop) self.threads.append(thread) thread.daemon = True thread.start()
start thread transports.
382,615
def scopus_url(self): scopus_url = self.coredata.find(, ns) try: return scopus_url.get() except AttributeError: return None
URL to the abstract page on Scopus.
382,616
def known(self, words: List[str]) -> List[str]: return list(w for w in words if w in self.__WORDS)
Return a list of given words that found in the spelling dictionary :param str words: A list of words to check if they are in the spelling dictionary
382,617
def makeCloneMap(columnsShape, outputCloningWidth, outputCloningHeight=-1): if outputCloningHeight < 0: outputCloningHeight = outputCloningWidth columnsHeight, columnsWidth = columnsShape numDistinctMasters = outputCloningWidth * outputCloningHeight a = numpy.empty((columnsHeight, columnsWidth), ) for row in xrange(columnsHeight): for col in xrange(columnsWidth): a[row, col] = (col % outputCloningWidth) + \ (row % outputCloningHeight) * outputCloningWidth return a, numDistinctMasters
Make a two-dimensional clone map mapping columns to clone master. This makes a map that is (numColumnsHigh, numColumnsWide) big that can be used to figure out which clone master to use for each column. Here are a few sample calls >>> makeCloneMap(columnsShape=(10, 6), outputCloningWidth=4) (array([[ 0, 1, 2, 3, 0, 1], [ 4, 5, 6, 7, 4, 5], [ 8, 9, 10, 11, 8, 9], [12, 13, 14, 15, 12, 13], [ 0, 1, 2, 3, 0, 1], [ 4, 5, 6, 7, 4, 5], [ 8, 9, 10, 11, 8, 9], [12, 13, 14, 15, 12, 13], [ 0, 1, 2, 3, 0, 1], [ 4, 5, 6, 7, 4, 5]], dtype=uint32), 16) >>> makeCloneMap(columnsShape=(7, 8), outputCloningWidth=3) (array([[0, 1, 2, 0, 1, 2, 0, 1], [3, 4, 5, 3, 4, 5, 3, 4], [6, 7, 8, 6, 7, 8, 6, 7], [0, 1, 2, 0, 1, 2, 0, 1], [3, 4, 5, 3, 4, 5, 3, 4], [6, 7, 8, 6, 7, 8, 6, 7], [0, 1, 2, 0, 1, 2, 0, 1]], dtype=uint32), 9) >>> makeCloneMap(columnsShape=(7, 11), outputCloningWidth=5) (array([[ 0, 1, 2, 3, 4, 0, 1, 2, 3, 4, 0], [ 5, 6, 7, 8, 9, 5, 6, 7, 8, 9, 5], [10, 11, 12, 13, 14, 10, 11, 12, 13, 14, 10], [15, 16, 17, 18, 19, 15, 16, 17, 18, 19, 15], [20, 21, 22, 23, 24, 20, 21, 22, 23, 24, 20], [ 0, 1, 2, 3, 4, 0, 1, 2, 3, 4, 0], [ 5, 6, 7, 8, 9, 5, 6, 7, 8, 9, 5]], dtype=uint32), 25) >>> makeCloneMap(columnsShape=(7, 8), outputCloningWidth=3, outputCloningHeight=4) (array([[ 0, 1, 2, 0, 1, 2, 0, 1], [ 3, 4, 5, 3, 4, 5, 3, 4], [ 6, 7, 8, 6, 7, 8, 6, 7], [ 9, 10, 11, 9, 10, 11, 9, 10], [ 0, 1, 2, 0, 1, 2, 0, 1], [ 3, 4, 5, 3, 4, 5, 3, 4], [ 6, 7, 8, 6, 7, 8, 6, 7]], dtype=uint32), 12) The basic idea with this map is that, if you imagine things stretching off to infinity, every instance of a given clone master is seeing the exact same thing in all directions. That includes: - All neighbors must be the same - The "meaning" of the input to each of the instances of the same clone master must be the same. If input is pixels and we have translation invariance--this is easy. At higher levels where input is the output of lower levels, this can be much harder. - The "meaning" of the inputs to neighbors of a clone master must be the same for each instance of the same clone master. The best way to think of this might be in terms of 'inputCloningWidth' and 'outputCloningWidth'. - The 'outputCloningWidth' is the number of columns you'd have to move horizontally (or vertically) before you get back to the same the same clone that you started with. MUST BE INTEGRAL! - The 'inputCloningWidth' is the 'outputCloningWidth' of the node below us. If we're getting input from an sensor where every element just represents a shift of every other element, this is 1. At a conceptual level, it means that if two different inputs are shown to the node and the only difference between them is that one is shifted horizontally (or vertically) by this many pixels, it means we are looking at the exact same real world input, but shifted by some number of pixels (doesn't have to be 1). MUST BE INTEGRAL! At level 1, I think you could have this: * inputCloningWidth = 1 * sqrt(coincToInputRatio^2) = 2.5 * outputCloningWidth = 5 ...in this case, you'd end up with 25 masters. Let's think about this case: input: - - - 0 1 2 3 4 5 - - - - - columns: 0 1 2 3 4 0 1 2 3 4 0 1 2 3 4 0 1 2 3 4 ...in other words, input 0 is fed to both column 0 and column 1. Input 1 is fed to columns 2, 3, and 4, etc. Hopefully, you can see that you'll get the exact same output (except shifted) with: input: - - - - - 0 1 2 3 4 5 - - - columns: 0 1 2 3 4 0 1 2 3 4 0 1 2 3 4 0 1 2 3 4 ...in other words, we've shifted the input 2 spaces and the output shifted 5 spaces. *** The outputCloningWidth MUST ALWAYS be an integral multiple of the *** *** inputCloningWidth in order for all of our rules to apply. *** *** NOTE: inputCloningWidth isn't passed here, so it's the caller's *** *** responsibility to ensure that this is true. *** *** The outputCloningWidth MUST ALWAYS be an integral multiple of *** *** sqrt(coincToInputRatio^2), too. *** @param columnsShape The shape (height, width) of the columns. @param outputCloningWidth See docstring above. @param outputCloningHeight If non-negative, can be used to make rectangular (instead of square) cloning fields. @return cloneMap An array (numColumnsHigh, numColumnsWide) that contains the clone index to use for each column. @return numDistinctClones The number of distinct clones in the map. This is just outputCloningWidth*outputCloningHeight.
382,618
def deserialize_header_auth(stream, algorithm, verifier=None): _LOGGER.debug("Starting header auth deserialization") format_string = ">{iv_len}s{tag_len}s".format(iv_len=algorithm.iv_len, tag_len=algorithm.tag_len) return MessageHeaderAuthentication(*unpack_values(format_string, stream, verifier))
Deserializes a MessageHeaderAuthentication object from a source stream. :param stream: Source data stream :type stream: io.BytesIO :param algorithm: The AlgorithmSuite object type contained in the header :type algorith: aws_encryption_sdk.identifiers.AlgorithmSuite :param verifier: Signature verifier object (optional) :type verifier: aws_encryption_sdk.internal.crypto.Verifier :returns: Deserialized MessageHeaderAuthentication object :rtype: aws_encryption_sdk.internal.structures.MessageHeaderAuthentication
382,619
def stats(request, server_name): server_name = server_name.strip() data = _context_data({ : _() % server_name, : _get_cache_stats(server_name), }, request) return render_to_response(, data, RequestContext(request))
Show server statistics.
382,620
def _short_string_handler_factory(): def before(c, ctx, is_field_name, is_clob): assert not (is_clob and is_field_name) is_string = not is_clob and not is_field_name if is_string: ctx.set_ion_type(IonType.STRING) val = ctx.value if is_field_name: assert not val ctx.set_pending_symbol() val = ctx.pending_symbol return val, is_string def on_close(ctx): ctx.set_self_delimiting(True) return ctx.event_transition(IonEvent, IonEventType.SCALAR, ctx.ion_type, ctx.value.as_text()) def after(c, ctx, is_field_name): ctx.set_quoted_text(False).set_self_delimiting(True) return ctx.immediate_transition( ctx.whence if is_field_name else _clob_end_handler(c, ctx), ) return _quoted_text_handler_factory(_DOUBLE_QUOTE, lambda c: c == _DOUBLE_QUOTE, before, after, append_first=False, on_close=on_close)
Generates the short string (double quoted) handler.
382,621
def populate(self, size, names_library=None, reuse_names=False, random_branches=False, branch_range=(0, 1), support_range=(0, 1)): NewNode = self.__class__ if len(self.children) > 1: connector = NewNode() for ch in self.get_children(): ch.detach() connector.add_child(child = ch) root = NewNode() self.add_child(child = connector) self.add_child(child = root) else: root = self next_deq = deque([root]) for i in range(size-1): if random.randint(0, 1): p = next_deq.pop() else: p = next_deq.popleft() c1 = p.add_child() c2 = p.add_child() next_deq.extend([c1, c2]) if random_branches: c1.dist = random.uniform(*branch_range) c2.dist = random.uniform(*branch_range) c1.support = random.uniform(*branch_range) c2.support = random.uniform(*branch_range) else: c1.dist = 1.0 c2.dist = 1.0 c1.support = 1.0 c2.support = 1.0 charset = "abcdefghijklmnopqrstuvwxyz" if names_library: names_library = deque(names_library) else: avail_names = itertools.combinations_with_replacement(charset, 10) for n in next_deq: if names_library: if reuse_names: tname = random.sample(names_library, 1)[0] else: tname = names_library.pop() else: tname = .join(next(avail_names)) n.name = tname
Generates a random topology by populating current node. :argument None names_library: If provided, names library (list, set, dict, etc.) will be used to name nodes. :argument False reuse_names: If True, node names will not be necessarily unique, which makes the process a bit more efficient. :argument False random_branches: If True, branch distances and support values will be randomized. :argument (0,1) branch_range: If random_branches is True, this range of values will be used to generate random distances. :argument (0,1) support_range: If random_branches is True, this range of values will be used to generate random branch support values.
382,622
def add_translation(self, rna: Rna, protein: Protein) -> str: return self.add_unqualified_edge(rna, protein, TRANSLATED_TO)
Add a translation relation from a RNA to a protein. :param rna: An RNA node :param protein: A protein node
382,623
def _create_autostart_entry(autostart_data: AutostartSettings, autostart_file: Path): try: source_desktop_file = get_source_desktop_file(autostart_data.desktop_file_name) except FileNotFoundError: _logger.exception("Failed to find a usable .desktop file! Unable to find: {}".format( autostart_data.desktop_file_name)) else: _logger.debug("Found source desktop file that will be placed into the autostart directory: {}".format( source_desktop_file)) with open(str(source_desktop_file), "r") as opened_source_desktop_file: desktop_file_content = opened_source_desktop_file.read() desktop_file_content = "\n".join(_manage_autostart_desktop_file_launch_flags( desktop_file_content, autostart_data.switch_show_configure )) + "\n" with open(str(autostart_file), "w", encoding="UTF-8") as opened_autostart_file: opened_autostart_file.write(desktop_file_content) _logger.debug("Written desktop file: {}".format(autostart_file))
Create an autostart .desktop file in the autostart directory, if possible.
382,624
def post(self, value, addend, unit): value = value or dt.datetime.utcnow() if unit == "minutes": delta = dt.timedelta(minutes=addend) else: delta = dt.timedelta(days=addend) result = value + delta return {"result": result.isoformat()}
A date adder endpoint.
382,625
def delete_publisher_asset(self, publisher_name, asset_type=None): route_values = {} if publisher_name is not None: route_values[] = self._serialize.url(, publisher_name, ) query_parameters = {} if asset_type is not None: query_parameters[] = self._serialize.query(, asset_type, ) self._send(http_method=, location_id=, version=, route_values=route_values, query_parameters=query_parameters)
DeletePublisherAsset. [Preview API] Delete publisher asset like logo :param str publisher_name: Internal name of the publisher :param str asset_type: Type of asset. Default value is 'logo'.
382,626
def does_external_program_run(prog, verbose): try: with open() as null: subprocess.call([prog, ], stdout=null, stderr=null) result = True except OSError: if verbose > 1: print("couldn't run {}".format(prog)) result = False return result
Test to see if the external programs can be run.
382,627
def merge_data(*data_frames, **kwargs): from .specialized import build_merge_expr from ..utils import ML_ARG_PREFIX if len(data_frames) <= 1: raise ValueError() norm_data_pairs = [] df_tuple = collections.namedtuple(, ) for pair in data_frames: if isinstance(pair, tuple): if len(pair) == 2: df, cols = pair exclude = False else: df, cols, exclude = pair if isinstance(cols, six.string_types): cols = cols.split() else: df, cols, exclude = pair, None, False norm_data_pairs.append(df_tuple(df, cols, exclude)) auto_rename = kwargs.get(, False) sel_cols_dict = dict((idx, tp.cols) for idx, tp in enumerate(norm_data_pairs) if tp.cols and not tp.exclude) ex_cols_dict = dict((idx, tp.cols) for idx, tp in enumerate(norm_data_pairs) if tp.cols and tp.exclude) merge_expr = build_merge_expr(len(norm_data_pairs)) arg_dict = dict(_params={: str(auto_rename)}, selected_cols=sel_cols_dict, excluded_cols=ex_cols_dict) for idx, dp in enumerate(norm_data_pairs): arg_dict[ML_ARG_PREFIX + % (1 + idx)] = dp.df out_df = merge_expr(register_expr=True, _exec_id=uuid.uuid4(), _output_name=, **arg_dict) out_df._ml_uplink = [dp.df for dp in norm_data_pairs] out_df._perform_operation(op.MergeFieldsOperation(auto_rename, sel_cols_dict, ex_cols_dict)) out_df._rebuild_df_schema() return out_df
Merge DataFrames by column. Number of rows in tables must be the same. This method can be called both outside and as a DataFrame method. :param list[DataFrame] data_frames: DataFrames to be merged. :param bool auto_rename: if True, fields in source DataFrames will be renamed in the output. :return: merged data frame. :rtype: DataFrame :Example: >>> merged1 = merge_data(df1, df2) >>> merged2 = df1.merge_with(df2, auto_rename_col=True)
382,628
def assign_rates(self, mu=1.0, pi=None, W=None): n = len(self.alphabet) self.mu = np.copy(mu) if pi is not None and pi.shape[0]==n: self.seq_len = pi.shape[-1] Pi = np.copy(pi) else: if pi is not None and len(pi)!=n: self.logger("length of equilibrium frequency vector does not match alphabet length", 4, warn=True) self.logger("Ignoring input equilibrium frequencies", 4, warn=True) Pi = np.ones(shape=(n,self.seq_len)) self.Pi = Pi/np.sum(Pi, axis=0) if W is None or W.shape!=(n,n): if (W is not None) and W.shape!=(n,n): self.logger("Substitution matrix size does not match alphabet size", 4, warn=True) self.logger("Ignoring input substitution matrix", 4, warn=True) W = np.ones((n,n)) else: W=0.5*(np.copy(W)+np.copy(W).T) np.fill_diagonal(W,0) avg_pi = self.Pi.mean(axis=-1) average_rate = W.dot(avg_pi).dot(avg_pi) self.W = W/average_rate self.mu *=average_rate self._eig()
Overwrite the GTR model given the provided data Parameters ---------- mu : float Substitution rate W : nxn matrix Substitution matrix pi : n vector Equilibrium frequencies
382,629
def _gwf_channel(path, series_class=TimeSeries, verbose=False): channels = list(io_gwf.iter_channel_names(file_path(path))) if issubclass(series_class, StateVector): regex = DQMASK_CHANNEL_REGEX else: regex = STRAIN_CHANNEL_REGEX found, = list(filter(regex.match, channels)) if verbose: print("Using channel {0!r}".format(found)) return found
Find the right channel name for a LOSC GWF file
382,630
def add_marccountry_tag(dom): marccountry = dom.find("mods:placeTerm", {"authority": "marccountry"}) if marccountry: return marccountry_tag = dhtmlparser.HTMLElement( "mods:place", [ dhtmlparser.HTMLElement( "mods:placeTerm", {"type": "code", "authority": "marccountry"}, [dhtmlparser.HTMLElement("xr-")] ) ] ) insert_tag( marccountry_tag, dom.match("mods:mods", "mods:originInfo", "mods:place"), first(dom.find("mods:originInfo")) )
Add ``<mods:placeTerm>`` tag with proper content.
382,631
def setup_new_conf(self): super(Broker, self).setup_new_conf() with self.conf_lock: self.got_initial_broks = False for link_type in [, , ]: if link_type not in self.cur_conf[]: logger.error("No %s in the configuration!", link_type) continue my_satellites = getattr(self, link_type, {}) received_satellites = self.cur_conf[][link_type] for link_uuid in received_satellites: rs_conf = received_satellites[link_uuid] logger.debug("- received %s - %s: %s", rs_conf[], rs_conf[], rs_conf[]) already_got = rs_conf[] in my_satellites broks = [] actions = {} wait_homerun = {} external_commands = {} running_id = 0 if already_got: logger.warning("I already got: %s", rs_conf[]) running_id = my_satellites[link_uuid].running_id (broks, actions, wait_homerun, external_commands) = \ my_satellites[link_uuid].get_and_clear_context() del my_satellites[link_uuid] new_link = SatelliteLink.get_a_satellite_link(link_type[:-1], rs_conf) my_satellites[new_link.uuid] = new_link logger.info("I got a new %s satellite: %s", link_type[:-1], new_link) new_link.running_id = running_id new_link.external_commands = external_commands new_link.broks = broks new_link.wait_homerun = wait_homerun new_link.actions = actions if not self.have_modules: try: self.modules = unserialize(self.cur_conf[], no_load=True) except AlignakClassLookupException as exp: logger.error( , exp) if self.modules: logger.info("I received some modules configuration: %s", self.modules) self.have_modules = True self.do_load_modules(self.modules) self.modules_manager.start_external_instances() else: logger.info("I do not have modules") logger.info("Initializing connection with my schedulers:") my_satellites = self.get_links_of_type(s_type=) for satellite in list(my_satellites.values()): logger.info("- %s/%s", satellite.type, satellite.name) if not self.daemon_connection_init(satellite): logger.error("Satellite connection failed: %s", satellite) logger.info("Initializing connection with my satellites:") for sat_type in [, , , ]: my_satellites = self.get_links_of_type(s_type=sat_type) for satellite in list(my_satellites.values()): logger.info("- %s/%s", satellite.type, satellite.name) if not self.daemon_connection_init(satellite): logger.error("Satellite connection failed: %s", satellite) self.have_conf = True
Broker custom setup_new_conf method This function calls the base satellite treatment and manages the configuration needed for a broker daemon: - get and configure its pollers, reactionners and receivers relation - configure the modules :return: None
382,632
def _cleanup_closed(self) -> None: if self._cleanup_closed_handle: self._cleanup_closed_handle.cancel() for transport in self._cleanup_closed_transports: if transport is not None: transport.abort() self._cleanup_closed_transports = [] if not self._cleanup_closed_disabled: self._cleanup_closed_handle = helpers.weakref_handle( self, , self._cleanup_closed_period, self._loop)
Double confirmation for transport close. Some broken ssl servers may leave socket open without proper close.
382,633
def lwp_cookie_str(cookie): h = [(cookie.name, cookie.value), ("path", cookie.path), ("domain", cookie.domain)] if cookie.port is not None: h.append(("port", cookie.port)) if cookie.path_specified: h.append(("path_spec", None)) if cookie.port_specified: h.append(("port_spec", None)) if cookie.domain_initial_dot: h.append(("domain_dot", None)) if cookie.secure: h.append(("secure", None)) if cookie.expires: h.append(("expires", time2isoz(float(cookie.expires)))) if cookie.discard: h.append(("discard", None)) if cookie.comment: h.append(("comment", cookie.comment)) if cookie.comment_url: h.append(("commenturl", cookie.comment_url)) keys = sorted(cookie._rest.keys()) for k in keys: h.append((k, str(cookie._rest[k]))) h.append(("version", str(cookie.version))) return join_header_words([h])
Return string representation of Cookie in an the LWP cookie file format. Actually, the format is extended a bit -- see module docstring.
382,634
def get_draft_secret_key(): draft_secret_key, created = Text.objects.get_or_create( name=, defaults=dict( value=get_random_string(50), )) return draft_secret_key.value
Return the secret key used to generate draft mode HMACs. It will be randomly generated on first access. Existing draft URLs can be invalidated by deleting or updating the ``DRAFT_SECRET_KEY`` setting.
382,635
def on_patch(self, req, resp, handler=None, **kwargs): self.handle( handler or self.create_bulk, req, resp, **kwargs ) resp.status = falcon.HTTP_CREATED
Respond on POST HTTP request assuming resource creation flow. This request handler assumes that POST requests are associated with resource creation. Thus default flow for such requests is: * Create new resource instances and prepare their representation by calling its bulk creation method handler. * Set response status code to ``201 Created``. **Note:** this handler does not set ``Location`` header by default as it would be valid only for single resource creation. Args: req (falcon.Request): request object instance. resp (falcon.Response): response object instance to be modified handler (method): creation method handler to be called. Defaults to ``self.create``. **kwargs: additional keyword arguments retrieved from url template.
382,636
def decrypt(source, dest=None, passphrase=None): if not os.path.exists(source): raise CryptoritoError("Encrypted file %s not found" % source) cmd = [gnupg_bin(), gnupg_verbose(), "--decrypt", gnupg_home(), passphrase_file(passphrase)] if dest: cmd.append(["--output", dest]) cmd.append([source]) stderr_output(flatten(cmd)) return True
Attempts to decrypt a file
382,637
def _notify_fn(self): self._notifyrunning = True while self._notifyrunning: try: with IHCController._mutex: if self._newnotifyids: self.client.enable_runtime_notifications( self._newnotifyids) self._newnotifyids = [] changes = self.client.wait_for_resource_value_changes() if changes is False: self.re_authenticate(True) continue for ihcid in changes: value = changes[ihcid] if ihcid in self._ihcevents: for callback in self._ihcevents[ihcid]: callback(ihcid, value) except Exception as exp: self.re_authenticate(True)
The notify thread function.
382,638
def _setsetting(setting, default): value = _getsetting(setting, default) setattr(_self, setting, value)
Dynamically sets the variable named in `setting` This method uses `_getsetting()` to either fetch the setting from Django's settings module, or else fallback to the default value; it then sets a variable in this module with the returned value.
382,639
def get_child_values(parent, names): vals = [] for name in names: if parent.hasElement(name): vals.append(XmlHelper.as_value(parent.getElement(name))) else: vals.append(np.nan) return vals
return a list of values for the specified child fields. If field not in Element then replace with nan.
382,640
def keep(self, diff): self._keepVol(diff.toVol) self._keepVol(diff.fromVol)
Mark this diff (or volume) to be kept in path.
382,641
def authenticate_credentials(self, payload): User = get_user_model() username = jwt_get_username_from_payload_handler(payload) if not username: msg = _() raise exceptions.AuthenticationFailed(msg) try: user = User.objects.get(email=username) except User.DoesNotExist: msg = _() raise exceptions.AuthenticationFailed(msg) return user
Returns an active user that matches the payload's user id and email.
382,642
def __load_child_classes(self, ac: AssetClass): db = self.__get_session() entities = ( db.query(dal.AssetClass) .filter(dal.AssetClass.parentid == ac.id) .order_by(dal.AssetClass.sortorder) .all() ) for entity in entities: child_ac = self.__map_entity(entity) child_ac.depth = ac.depth + 1 ac.classes.append(child_ac) self.model.asset_classes.append(child_ac) self.__load_child_classes(child_ac)
Loads child classes/stocks
382,643
def dump(self, force=False): self._contents = self.chosen.dump(force=force) if self._header is None or force: self._header = b if self.explicit is not None: for class_, tag in self.explicit: self._header = _dump_header(class_, 1, tag, self._header + self._contents) + self._header return self._header + self._contents
Encodes the value using DER :param force: If the encoded contents already exist, clear them and regenerate to ensure they are in DER format instead of BER format :return: A byte string of the DER-encoded value
382,644
def main(): args = parser.parse_args() initialize_logging(args) host = args.host or config_or_none("server", "host") if not host: parser.error( "IRC host must be specified if not in config file.") port = args.port or config_or_none("server", "port", integer=True) or 6667 ssl = args.ssl or config_or_none("server", "ssl", boolean=True) password = args.password or config_or_none("server", "password") username = args.username or config_or_none("server", "username") or nick realname = args.realname or config_or_none("server", "realname") or username controller.start() client.connect( nick, host=host, port=port, username=username, realname=realname, password=password, ssl=ssl, ) try: client.run() except KeyboardInterrupt: client.disconnect()
Run the bot.
382,645
def throttle( self, wait=True ): with self.thread_start_lock: if not self.thread_started: self.thread.start( ) self.thread_started = True return self.semaphore.acquire( blocking=wait )
If the wait parameter is True, this method returns True after suspending the current thread as necessary to ensure that no less than the configured minimum interval passed since the most recent time an invocation of this method returned True in any thread. If the wait parameter is False, this method immediatly returns True if at least the configured minimum interval has passed since the most recent time this method returned True in any thread, or False otherwise.
382,646
def confirm_user_avatar(self, user, cropping_properties): data = cropping_properties url = self._get_url() r = self._session.post(url, params={: user}, data=json.dumps(data)) return json_loads(r)
Confirm the temporary avatar image previously uploaded with the specified cropping. After a successful registry with :py:meth:`create_temp_user_avatar`, use this method to confirm the avatar for use. The final avatar can be a subarea of the uploaded image, which is customized with the ``cropping_properties``: the return value of :py:meth:`create_temp_user_avatar` should be used for this argument. :param user: the user to confirm the avatar for :type user: str :param cropping_properties: a dict of cropping properties from :py:meth:`create_temp_user_avatar` :type cropping_properties: Dict[str,Any]
382,647
def binormalize(A, tol=1e-5, maxiter=10): if not isspmatrix(A): raise TypeError() if A.dtype == complex: raise NotImplementedError() n = A.shape[0] it = 0 x = np.ones((n, 1)).ravel() B = A.multiply(A).tocsc() d = B.diagonal().ravel() beta = B * x betabar = (1.0/n) * np.dot(x, beta) stdev = rowsum_stdev(x, beta) while stdev > tol and it < maxiter: for i in range(0, n): return A else: xnew = (2*c0)/(-c1 - np.sqrt(c1*c1 - 4*c0*c2)) dx = xnew - x[i] ii = B.indptr[i] iii = B.indptr[i+1] dot_Bcol = np.dot(x[B.indices[ii:iii]], B.data[ii:iii]) betabar = betabar + (1.0/n)*dx*(dot_Bcol + beta[i] + d[i]*dx) beta[B.indices[ii:iii]] += dx*B.data[ii:iii] x[i] = xnew stdev = rowsum_stdev(x, beta) it += 1 d = np.sqrt(x) D = spdiags(d.ravel(), [0], n, n) C = D * A * D C = C.tocsr() beta = C.multiply(C).sum(axis=1) scale = np.sqrt((1.0/n) * np.sum(beta)) return (1/scale)*C
Binormalize matrix A. Attempt to create unit l_1 norm rows. Parameters ---------- A : csr_matrix sparse matrix (n x n) tol : float tolerance x : array guess at the diagonal maxiter : int maximum number of iterations to try Returns ------- C : csr_matrix diagonally scaled A, C=DAD Notes ----- - Goal: Scale A so that l_1 norm of the rows are equal to 1: - B = DAD - want row sum of B = 1 - easily done with tol=0 if B=DA, but this is not symmetric - algorithm is O(N log (1.0/tol)) Examples -------- >>> from pyamg.gallery import poisson >>> from pyamg.classical import binormalize >>> A = poisson((10,),format='csr') >>> C = binormalize(A) References ---------- .. [1] Livne, Golub, "Scaling by Binormalization" Tech Report SCCM-03-12, SCCM, Stanford, 2003 http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.3.1679
382,648
def update(self): from ambry.orm.exc import NotFoundError from requests.exceptions import ConnectionError, HTTPError from boto.exception import S3ResponseError d = {} try: for k, v in self.list(full=True): if not v: continue d[v[]] = { : v[], : v.get(), : v.get(), : v.get() } self.data[] = d except (NotFoundError, ConnectionError, S3ResponseError, HTTPError) as e: raise RemoteAccessError("Failed to update {}: {}".format(self.short_name, e))
Cache the list into the data section of the record
382,649
def vatm(model, x, logits, eps, num_iterations=1, xi=1e-6, clip_min=None, clip_max=None, scope=None): with tf.name_scope(scope, "virtual_adversarial_perturbation"): d = tf.random_normal(tf.shape(x), dtype=tf_dtype) for _ in range(num_iterations): d = xi * utils_tf.l2_batch_normalize(d) logits_d = model.get_logits(x + d) kl = utils_tf.kl_with_logits(logits, logits_d) Hd = tf.gradients(kl, d)[0] d = tf.stop_gradient(Hd) d = eps * utils_tf.l2_batch_normalize(d) adv_x = x + d if (clip_min is not None) and (clip_max is not None): adv_x = tf.clip_by_value(adv_x, clip_min, clip_max) return adv_x
Tensorflow implementation of the perturbation method used for virtual adversarial training: https://arxiv.org/abs/1507.00677 :param model: the model which returns the network unnormalized logits :param x: the input placeholder :param logits: the model's unnormalized output tensor (the input to the softmax layer) :param eps: the epsilon (input variation parameter) :param num_iterations: the number of iterations :param xi: the finite difference parameter :param clip_min: optional parameter that can be used to set a minimum value for components of the example returned :param clip_max: optional parameter that can be used to set a maximum value for components of the example returned :param seed: the seed for random generator :return: a tensor for the adversarial example
382,650
def register_provider(self, provider): if provider.get_vocabulary_id() in self.providers: raise RegistryException( ) self.providers[provider.get_vocabulary_id()] = provider if provider.concept_scheme.uri in self.concept_scheme_uri_map: raise RegistryException( % provider.concept_scheme.uri ) self.concept_scheme_uri_map[provider.concept_scheme.uri] = provider.get_vocabulary_id()
Register a :class:`skosprovider.providers.VocabularyProvider`. :param skosprovider.providers.VocabularyProvider provider: The provider to register. :raises RegistryException: A provider with this id or uri has already been registered.
382,651
def _fill_schemas_from_definitions(self, obj): if obj.get(): self.schemas.clear() all_of_stack = [] for name, definition in obj[].items(): if in definition: all_of_stack.append((name, definition)) else: self.schemas.create_schema( definition, name, SchemaTypes.DEFINITION, root=self) while all_of_stack: name, definition = all_of_stack.pop(0) self.schemas.create_schema( definition, name, SchemaTypes.DEFINITION, root=self)
At first create schemas without 'AllOf' :param obj: :return: None
382,652
def fa(arr, t, dist=, mode=): t = np.atleast_1d(t) dc = get_dist(dist) p = fit(arr, dist) if mode in [, ]: def func(x): return dc.isf(1./t, *x) elif mode in [, ]: def func(x): return dc.ppf(1./t, *x) else: raise ValueError("mode `{}` should be either or ".format(mode)) data = dask.array.apply_along_axis(func, p.get_axis_num(), p) coords = dict(p.coords.items()) coords.pop() coords[] = t dims = list(p.dims) dims.remove() dims.insert(0, u) out = xr.DataArray(data=data, coords=coords, dims=dims) out.attrs = p.attrs out.attrs[] = .format(dist) out.attrs[] = .format(dist, getattr(arr, , )) out.attrs[] = (out.attrs.get(, ) + ).strip() out.attrs[] = arr.attrs.get(, ) out.attrs[] = mode out.attrs[] = out.attrs.get(, ) + "Compute values corresponding to return periods." return out
Return the value corresponding to the given return period. Parameters ---------- arr : xarray.DataArray Maximized/minimized input data with a `time` dimension. t : int or sequence Return period. The period depends on the resolution of the input data. If the input array's resolution is yearly, then the return period is in years. dist : str Name of the univariate distribution, such as beta, expon, genextreme, gamma, gumbel_r, lognorm, norm (see scipy.stats). mode : {'min', 'max} Whether we are looking for a probability of exceedance (max) or a probability of non-exceedance (min). Returns ------- xarray.DataArray An array of values with a 1/t probability of exceedance (if mode=='max').
382,653
def _post(self, url, data=None, json=None, params=None, headers=None): url = self.clean_url(url) response = requests.post(url, data=data, json=json, params=params, headers=headers, timeout=self.timeout, verify=self.verify) return response
Wraps a POST request with a url check
382,654
def list_relations(self): for node in self.iter_nodes(): for relation, target in self.relations_of(node.obj, True): yield node.obj, relation, target
list every relation in the database as (src, relation, dst)
382,655
def get_attribute(self, attribute, value=None, features=False): if attribute in self.filters: valid_gff_objects = self.fast_attributes[attribute] if not value else\ [i for i in self.fast_attributes[attribute] if i.attributes.get(attribute, False) == value] if features: valid_ids = [gff_object.attributes.get(self.id_tag, None) for gff_object in valid_gff_objects] return [self.feature_map[gff_id] for gff_id in valid_ids if gff_id] else: return valid_gff_objects else: valid_gff_objects = [gff_object for gff_feature in self.feature_map.values() for gff_object in gff_feature.features if gff_object.attributes.get(attribute, False)] valid_gff_objects = valid_gff_objects if not value else [gff_object for gff_object in valid_gff_objects if gff_object.attributes[attribute] == value] if features: valid_ids = [gff_object.attributes.get(self.id_tag, None) for gff_object in valid_gff_objects] return [self.feature_map[gff_id] for gff_id in valid_ids if gff_id] else: return valid_gff_objects
This returns a list of GFF objects (or GFF Features) with the given attribute and if supplied, those attributes with the specified value :param attribute: The 'info' field attribute we are querying :param value: Optional keyword, only return attributes equal to this value :param features: Optional keyword, return GFF Features instead of GFF Objects :return: A list of GFF objects (or GFF features if requested)
382,656
def tcp_ping( task: Task, ports: List[int], timeout: int = 2, host: Optional[str] = None ) -> Result: if isinstance(ports, int): ports = [ports] if isinstance(ports, list): if not all(isinstance(port, int) for port in ports): raise ValueError("Invalid value for ") else: raise ValueError("Invalid value for ") host = host or task.host.hostname result = {} for port in ports: s = socket.socket() s.settimeout(timeout) try: status = s.connect_ex((host, port)) if status == 0: connection = True else: connection = False except (socket.gaierror, socket.timeout, socket.error): connection = False finally: s.close() result[port] = connection return Result(host=task.host, result=result)
Tests connection to a tcp port and tries to establish a three way handshake. To be used for network discovery or testing. Arguments: ports (list of int): tcp ports to ping timeout (int, optional): defaults to 2 host (string, optional): defaults to ``hostname`` Returns: Result object with the following attributes set: * result (``dict``): Contains port numbers as keys with True/False as values
382,657
def getAccounts(self): pubkeys = self.getPublicKeys() accounts = [] for pubkey in pubkeys: if pubkey[: len(self.prefix)] == self.prefix: accounts.extend(self.getAccountsFromPublicKey(pubkey)) return accounts
Return all accounts installed in the wallet database
382,658
def visit_EnumeratorList(self, node): for type, enum in node.children(): if enum.value is None: pass elif isinstance(enum.value, (c_ast.BinaryOp, c_ast.UnaryOp)): enum.value = c_ast.Constant("int", "...") elif hasattr(enum.value, "type"): enum.value = c_ast.Constant(enum.value.type, "...")
Replace enumerator expressions with '...' stubs.
382,659
def pencil3(): repo_name = repo_dir = flo() print_msg() checkup_git_repo_legacy(url=, name=repo_name) run(flo(), msg=) install_user_command_legacy(, pencil3_repodir=repo_dir) print_msg( )
Install or update latest Pencil version 3, a GUI prototyping tool. While it is the newer one and the GUI is more fancy, it is the "more beta" version of pencil. For exmaple, to display a svg export may fail from within a reveal.js presentation. More info: Homepage: http://pencil.evolus.vn/Next.html github repo: https://github.com/evolus/pencil
382,660
def open_with_external_spyder(self, text): match = get_error_match(to_text_string(text)) if match: fname, lnb = match.groups() builtins.open_in_spyder(fname, int(lnb))
Load file in external Spyder's editor, if available This method is used only for embedded consoles (could also be useful if we ever implement the magic %edit command)
382,661
def value(self): return .join(map(str, self.evaluate(self.trigger.user)))
Return the current evaluation of a condition statement
382,662
def sync(self): for i in range(4): self.elk.send(ps_encode(i)) self.get_descriptions(TextDescriptions.LIGHT.value)
Retrieve lights from ElkM1
382,663
def new_table(self, name, add_id=True, **kwargs): return self.dataset.new_table(name=name, add_id=add_id, **kwargs)
Create a new table, if it does not exist, or update an existing table if it does :param name: Table name :param add_id: If True, add an id field ( default is True ) :param kwargs: Other options passed to table object :return:
382,664
def bsp_father(node: tcod.bsp.BSP) -> Optional[tcod.bsp.BSP]: return node.parent
.. deprecated:: 2.0 Use :any:`BSP.parent` instead.
382,665
def _repr_pretty_(self, p, cycle): if cycle: p.text() else: with p.group(7, , ): p.pretty(self._asdict())
method that defines ``Struct``'s pretty printing rules for iPython Args: p (IPython.lib.pretty.RepresentationPrinter): pretty printer object cycle (bool): is ``True`` if pretty detected a cycle
382,666
def retry_on_integrity_error(self): session = self.session assert session.info.get(_ATOMIC_FLAG_SESSION_INFO_KEY), \ session.flush() try: yield session.flush() except IntegrityError: raise DBSerializationError
Re-raise :class:`~sqlalchemy.exc.IntegrityError` as `DBSerializationError`. This is mainly useful to handle race conditions in atomic blocks. For example, even if prior to a database INSERT we have verified that there is no existing row with the given primary key, we still may get an :class:`~sqlalchemy.exc.IntegrityError` if another transaction inserted a row with this primary key in the meantime. But if we do (within an atomic block):: with db.retry_on_integrity_error(): db.session.add(instance) then if the before-mentioned race condition occurs, `DBSerializationError` will be raised instead of :class:`~sqlalchemy.exc.IntegrityError`, so that the transaction will be retried (by the atomic block), and the second time our prior-to-INSERT check will correctly detect a primary key collision. Note: :meth:`retry_on_integrity_error` triggers a session flush.
382,667
def get_selection(self): Gdk.threads_enter() text = self.selection.wait_for_text() Gdk.threads_leave() if text is not None: return text else: raise Exception("No text found in X selection")
Read text from the X selection Usage: C{clipboard.get_selection()} @return: text contents of the mouse selection @rtype: C{str} @raise Exception: if no text was found in the selection
382,668
def _add_item(self, dim_vals, data, sort=True, update=True): sort = sort and self.sort if not isinstance(dim_vals, tuple): dim_vals = (dim_vals,) self._item_check(dim_vals, data) dim_types = zip([kd.type for kd in self.kdims], dim_vals) dim_vals = tuple(v if None in [t, v] else t(v) for t, v in dim_types) valid_vals = zip(self.kdims, dim_vals) for dim, val in valid_vals: if dim.values and val is not None and val not in dim.values: raise KeyError( % (dim, repr(val))) if (update and (dim_vals in self.data) and isinstance(self.data[dim_vals], (MultiDimensionalMapping, OrderedDict))): self.data[dim_vals].update(data) else: self.data[dim_vals] = data if sort: self._resort()
Adds item to the data, applying dimension types and ensuring key conforms to Dimension type and values.
382,669
def _validate(self, msg): if not isinstance(msg, self._message_type): raise TypeError(, self._message_type.__name__, self._code_name or self._name)
Validate an Enum value. Raises: TypeError if the value is not an instance of self._message_type.
382,670
def _makeColorableInstance(self, clazz, args, kwargs): kwargs = dict(kwargs) fill = kwargs.get(, self._canvas.fillcolor) if not isinstance(fill, Color): fill = Color(fill, mode=, color_range=1) kwargs[] = fill stroke = kwargs.get(, self._canvas.strokecolor) if not isinstance(stroke, Color): stroke = Color(stroke, mode=, color_range=1) kwargs[] = stroke kwargs[] = kwargs.get(, self._canvas.strokewidth) inst = clazz(self, *args, **kwargs) return inst
Create an object, if fill, stroke or strokewidth is not specified, get them from the _canvas :param clazz: :param args: :param kwargs: :return:
382,671
def GpuUsage(**kargs): usage = (False, None) gpu_status = {: {: [], : {}}} path_dirs = PathDirs(**kargs) path_dirs.host_config() template = Template(template=path_dirs.cfg_file) try: d_client = docker.from_env() c = d_client.containers.list(all=False, filters={: }) for container in c: if ( in container.attrs[][] and container.attrs[][][] == ): device = container.attrs[][][] if ( in container.attrs[][] and container.attrs[][][] == ): gpu_status[][].append(device) elif in container.attrs[][]: if device not in gpu_status[][]: gpu_status[][][device] = 0 gpu_status[][][device] += int( container.attrs[][][]) except Exception as e: logger.error( + str(e)) port = host = result = template.option(, ) if result[0]: port = result[1] result = template.option(, ) if result[0]: host = result[1] else: try: route = check_output((, )).decode().split() default = for device in route: if in device: default = device.split()[4] break ip_addr = check_output((, default)).decode() ip_addr = ip_addr.split()[1].split()[1] host = ip_addr except Exception as e: logger.error( + str(e)) nd_url = + host + + port + try: r = requests.get(nd_url) if r.status_code == 200: status = r.json() for i, device in enumerate(status[]): gm = int(round(math.log(int(device[][]), 2))) gpu_status[i] = {: 2**gm, : device[]} else: usage = (False, + str(r.status_code)) except Exception as e: usage = (False, + str(e)) nd_url = + host + + port + try: r = requests.get(nd_url) if r.status_code == 200: status = r.json() for i, device in enumerate(status[]): if i not in gpu_status: gpu_status[i] = {} gpu_status[i][] = device[] gpu_status[i][] = device[] gpu_status[i][] = device[] usage = (True, gpu_status) else: usage = (False, + str(r.status_code)) except Exception as e: usage = (False, + str(e)) return usage
Get the current GPU usage of available GPUs
382,672
def set(self, id, translation, domain=): assert isinstance(id, (str, unicode)) assert isinstance(translation, (str, unicode)) assert isinstance(domain, (str, unicode)) self.add({id: translation}, domain)
Sets a message translation.
382,673
def workers(self, pattern=None, negate=False, stats=True): request = clearly_pb2.FilterWorkersRequest( workers_filter=clearly_pb2.PatternFilter(pattern=pattern or , negate=negate), ) for worker in about_time(ClearlyClient._fetched_callback, self._stub.filter_workers(request)): ClearlyClient._display_worker(worker, stats)
Filters known workers and prints their current status. Args: Filter args: pattern (Optional[str]): a pattern to filter workers ex.: '^dispatch|^email' to filter names starting with that or 'dispatch.*123456' to filter that exact name and number or even '123456' to filter that exact number anywhere. negate (bool): if True, finds tasks that do not match criteria Display args: stats (bool): if True shows worker stats
382,674
def frames(self): f=0 if self.isVideo() or self.isAudio(): if self.__dict__[]: try: f=int(self.__dict__[]) except Exception as e: print "None integer frame count" return f
Returns the length of a video stream in frames. Returns 0 if not a video stream.
382,675
def get_required_status_checks(self): headers, data = self._requester.requestJsonAndCheck( "GET", self.protection_url + "/required_status_checks" ) return github.RequiredStatusChecks.RequiredStatusChecks(self._requester, headers, data, completed=True)
:calls: `GET /repos/:owner/:repo/branches/:branch/protection/required_status_checks <https://developer.github.com/v3/repos/branches>`_ :rtype: :class:`github.RequiredStatusChecks.RequiredStatusChecks`
382,676
def create_legacy_graph_tasks(): return [ transitive_hydrated_targets, transitive_hydrated_target, hydrated_targets, hydrate_target, find_owners, hydrate_sources, hydrate_bundles, RootRule(OwnersRequest), ]
Create tasks to recursively parse the legacy graph.
382,677
def _recomputeRecordFromKNN(self, record): inputs = { "categoryIn": [None], "bottomUpIn": self._getStateAnomalyVector(record), } outputs = {"categoriesOut": numpy.zeros((1,)), "bestPrototypeIndices":numpy.zeros((1,)), "categoryProbabilitiesOut":numpy.zeros((1,))} classifier_indexes = numpy.array( self._knnclassifier.getParameter()) valid_idx = numpy.where( (classifier_indexes >= self.getParameter()) & (classifier_indexes < record.ROWID) )[0].tolist() if len(valid_idx) == 0: return None self._knnclassifier.setParameter(, None, True) self._knnclassifier.setParameter(, None, False) self._knnclassifier.compute(inputs, outputs) self._knnclassifier.setParameter(, None, True) classifier_distances = self._knnclassifier.getLatestDistances() valid_distances = classifier_distances[valid_idx] if valid_distances.min() <= self._classificationMaxDist: classifier_indexes_prev = classifier_indexes[valid_idx] rowID = classifier_indexes_prev[valid_distances.argmin()] indexID = numpy.where(classifier_indexes == rowID)[0][0] category = self._knnclassifier.getCategoryList()[indexID] return category return None
returns the classified labeling of record
382,678
def _GetArgsDescription(self, args_type): args = {} if args_type: for type_descriptor in args_type.type_infos: if not type_descriptor.hidden: args[type_descriptor.name] = { "description": type_descriptor.description, "default": type_descriptor.default, "type": "", } if type_descriptor.type: args[type_descriptor.name]["type"] = type_descriptor.type.__name__ return args
Get a simplified description of the args_type for a flow.
382,679
def eventFilter(self, widget, event): if event.type() == QEvent.Wheel: modifiers = QApplication.keyboardModifiers() if modifiers == Qt.ControlModifier: if event.angleDelta().y() > 0: self.zoom_in() else: self.zoom_out() return True else: return False elif event.type() == QEvent.MouseButtonPress: if event.button() == Qt.LeftButton: QApplication.setOverrideCursor(Qt.ClosedHandCursor) self._ispanning = True self.xclick = event.globalX() self.yclick = event.globalY() elif event.type() == QEvent.MouseButtonRelease: QApplication.restoreOverrideCursor() self._ispanning = False elif event.type() == QEvent.MouseMove: if self._ispanning: dx = self.xclick - event.globalX() self.xclick = event.globalX() dy = self.yclick - event.globalY() self.yclick = event.globalY() scrollBarH = self.horizontalScrollBar() scrollBarH.setValue(scrollBarH.value() + dx) scrollBarV = self.verticalScrollBar() scrollBarV.setValue(scrollBarV.value() + dy) return QWidget.eventFilter(self, widget, event)
A filter to control the zooming and panning of the figure canvas.
382,680
def build_schema(m, c_c): schema = ET.Element() schema.set(, ) global_filter = lambda selected: ooaofooa.is_global(selected) for s_dt in m.select_many(, global_filter): datatype = build_type(s_dt) if datatype is not None: schema.append(datatype) scope_filter = lambda selected: ooaofooa.is_contained_in(selected, c_c) for s_dt in m.select_many(, scope_filter): datatype = build_type(s_dt) if datatype is not None: schema.append(datatype) component = build_component(m, c_c) schema.append(component) return schema
Build an xsd schema from a bridgepoint component.
382,681
def metadata_path(self): xml_name = _granule_identifier_to_xml_name(self.granule_identifier) metadata_path = os.path.join(self.granule_path, xml_name) try: assert os.path.isfile(metadata_path) or \ metadata_path in self.dataset._zipfile.namelist() except AssertionError: raise S2ReaderIOError( "Granule metadata XML does not exist:", metadata_path) return metadata_path
Determine the metadata path.
382,682
def grok_template_file(src): if not src.startswith(): return abspath(src) builtin = src.split()[1] builtin = "templates/%s.j2" % builtin return resource_filename(__name__, builtin)
Determine the real deal template file
382,683
def add_options(self, parser, env=None): if env is None: env = os.environ try: self.options(parser, env) self.can_configure = True except OptionConflictError, e: warn("Plugin %s has conflicting option string: %s and will " "be disabled" % (self, e), RuntimeWarning) self.enabled = False self.can_configure = False
Non-camel-case version of func name for backwards compatibility. .. warning :: DEPRECATED: Do not use this method, use :meth:`options <nose.plugins.base.IPluginInterface.options>` instead.
382,684
def set_token(self): super(ServicePrincipalCredentials, self).set_token() try: token = self._context.acquire_token_with_client_credentials( self.resource, self.id, self.secret ) self.token = self._convert_token(token) except adal.AdalError as err: raise_with_traceback(AuthenticationError, "", err)
Get token using Client ID/Secret credentials. :raises: AuthenticationError if credentials invalid, or call fails.
382,685
def customize_form_field(self, name, field): if isinstance(field, forms.fields.DateField) and isinstance(field.widget, forms.widgets.DateInput): field.widget = widgets.DatePickerWidget() field.input_formats = [field.widget.input_format[1]] + list(field.input_formats) if isinstance(field, forms.fields.ImageField) and isinstance(field.widget, forms.widgets.ClearableFileInput): field.widget = widgets.ImageThumbnailWidget() return field
Allows views to customize their form fields. By default, Smartmin replaces the plain textbox date input with it's own DatePicker implementation.
382,686
def rename_state_fluent(name: str) -> str: i = name.index() functor = name[:i] arity = name[i+1:] return "{}'/{}".format(functor, arity)
Returns current state fluent canonical name. Args: name (str): The next state fluent name. Returns: str: The current state fluent name.
382,687
def convert(self, request, response, data): result = [] for conv, datum in zip(self.conversions, data):
Performs the desired formatting. :param request: The webob Request object describing the request. :param response: The webob Response object describing the response. :param data: The data dictionary list returned by the prepare() method. :returns: A string, the results of which are the desired conversion.
382,688
def update_detail(self, request): entity = request.context_params[self.detail_property_name] updated_entity = self.update_entity( request, entity, **request.context_params[]) request.context_params[self.updated_property_name] = updated_entity return request
:param request: an apiv2 request object :return: request if successful with entities set on request
382,689
def alias(cls, typemap, base, *names): cls.parameter_alias[base] = (typemap, base) for i in names: cls.parameter_alias[i] = (typemap, base)
Declare an alternate (humane) name for a measurement protocol parameter
382,690
def send(x, inter=0, loop=0, count=None, verbose=None, realtime=None, *args, **kargs): __gen_send(conf.L3socket(*args, **kargs), x, inter=inter, loop=loop, count=count,verbose=verbose, realtime=realtime)
Send packets at layer 3 send(packets, [inter=0], [loop=0], [verbose=conf.verb]) -> None
382,691
def get_queues(self, service_desk_id, include_count=False, start=0, limit=50): url = .format(service_desk_id) params = {} if include_count is not None: params[] = bool(include_count) if start is not None: params[] = int(start) if limit is not None: params[] = int(limit) return self.get(url, headers=self.experimental_headers, params=params)
Returns a page of queues defined inside a service desk, for a given service desk ID. The returned queues will include an issue count for each queue (represented in issueCount field) if the query param includeCount is set to true (defaults to false). Permissions: The calling user must be an agent of the given service desk. :param service_desk_id: str :param include_count: bool :param start: int :param limit: int :return: a page of queues
382,692
def scoreatpercentile(data,per,axis=0): a = np.sort(data,axis=axis) idx = per/100. * (data.shape[axis]-1) if (idx % 1 == 0): return a[[slice(None) if ii != axis else idx for ii in range(a.ndim)]] else: lowerweight = 1-(idx % 1) upperweight = (idx % 1) idx = int(np.floor(idx)) return lowerweight * a[[slice(None) if ii != axis else idx for ii in range(a.ndim)]] \ + upperweight * a[[slice(None) if ii != axis else idx+1 for ii in range(a.ndim)]]
like the function in scipy.stats but with an axis argument and works on arrays
382,693
def extend( self, itemseq ): if isinstance(itemseq, ParseResults): self += itemseq else: self.__toklist.extend(itemseq)
Add sequence of elements to end of ParseResults list of elements. Example:: patt = OneOrMore(Word(alphas)) # use a parse action to append the reverse of the matched strings, to make a palindrome def make_palindrome(tokens): tokens.extend(reversed([t[::-1] for t in tokens])) return ''.join(tokens) print(patt.addParseAction(make_palindrome).parseString("lskdj sdlkjf lksd")) # -> 'lskdjsdlkjflksddsklfjkldsjdksl'
382,694
def expected_number_of_transactions_in_first_n_periods(self, n): r params = self._unload_params("alpha", "beta", "gamma", "delta") alpha, beta, gamma, delta = params x_counts = self.data.groupby("frequency")["weights"].sum() x = np.asarray(x_counts.index) p1 = binom(n, x) * exp( betaln(alpha + x, beta + n - x) - betaln(alpha, beta) + betaln(gamma, delta + n) - betaln(gamma, delta) ) I = np.arange(x.min(), n) @np.vectorize def p2(j, x): i = I[int(j) :] return np.sum( binom(i, x) * exp( betaln(alpha + x, beta + i - x) - betaln(alpha, beta) + betaln(gamma + 1, delta + i) - betaln(gamma, delta) ) ) p1 += np.fromfunction(p2, (x.shape[0],), x=x) idx = pd.Index(x, name="frequency") return DataFrame(p1 * x_counts.sum(), index=idx, columns=["model"])
r""" Return expected number of transactions in first n n_periods. Expected number of transactions occurring across first n transaction opportunities. Used by Fader and Hardie to assess in-sample fit. .. math:: Pr(X(n) = x| \alpha, \beta, \gamma, \delta) See (7) in Fader & Hardie 2010. Parameters ---------- n: float number of transaction opportunities Returns ------- DataFrame: Predicted values, indexed by x
382,695
def run(self, visitor): if __debug__: self.__log_run(visitor) visitor.prepare() if self.__root_is_sequence: if not self._tgt_prx is None: tgts = iter(self._tgt_prx) else: tgts = None if not self._src_prx is None: srcs = iter(self._src_prx) else: srcs = None self.traverse_many(None, srcs, tgts, visitor) else: self.traverse_one(None, self._src_prx, self._tgt_prx, visitor) visitor.finalize()
:param visitor: visitor to call with every node in the domain tree. :type visitor: subclass of :class:`everest.entities.traversal.DomainVisitor`
382,696
def detach_all_classes(self): classes = list(self._observers.keys()) for cls in classes: self.detach_class(cls)
Detach from all tracked classes.
382,697
def list_directories(dir_pathname, recursive=True, topdown=True, followlinks=False): for root, dirnames, filenames\ in walk(dir_pathname, recursive, topdown, followlinks): for dirname in dirnames: yield absolute_path(os.path.join(root, dirname))
Enlists all the directories using their absolute paths within the specified directory, optionally recursively. :param dir_pathname: The directory to traverse. :param recursive: ``True`` for walking recursively through the directory tree; ``False`` otherwise. :param topdown: Please see the documentation for :func:`os.walk` :param followlinks: Please see the documentation for :func:`os.walk`
382,698
def on_click_dispatcher(self, module_name, event, command): if command is None: return elif command == "refresh_all": self.py3_wrapper.refresh_modules() elif command == "refresh": self.py3_wrapper.refresh_modules(module_name) else: if "$OUTPUT" in command or "$OUTPUT_PART" in command: full_text, partial_text = self.get_module_text(module_name, event) command = command.replace("$OUTPUT_PART", shell_quote(partial_text)) command = command.replace("$OUTPUT", shell_quote(full_text)) self.wm_msg(module_name, command) self.py3_wrapper.refresh_modules(module_name)
Dispatch on_click config parameters to either: - Our own methods for special py3status commands (listed below) - The i3-msg program which is part of i3wm
382,699
def lazy_approximate_personalized_pagerank(s, r, w_i, a_i, out_degree, in_degree, seed_node, rho=0.2, epsilon=0.00001, laziness_factor=0.5): r[seed_node] = 1.0 pushable = deque() pushable.append(seed_node) push_node = pushable.popleft() pagerank_lazy_push(s, r, w_i[push_node], a_i[push_node], push_node, rho, laziness_factor) number_of_push_operations = 1 i = np.where(np.divide(r[a_i[push_node]], in_degree[a_i[push_node]]) >= epsilon)[0] if i.size > 0: pushable.extend(a_i[push_node][i]) while r[push_node]/in_degree[push_node] >= epsilon: pagerank_lazy_push(s, r, w_i[push_node], a_i[push_node], push_node, rho, laziness_factor) number_of_push_operations += 1 while len(pushable) > 0: push_node = pushable.popleft() if r[push_node]/in_degree[push_node] >= epsilon: pagerank_lazy_push(s, r, w_i[push_node], a_i[push_node], push_node, rho, laziness_factor) number_of_push_operations += 1 i = np.where(np.divide(r[a_i[push_node]], in_degree[a_i[push_node]]) >= epsilon)[0] if i.size > 0: pushable.extend(a_i[push_node][i]) while r[push_node]/in_degree[push_node] >= epsilon: pagerank_lazy_push(s, r, w_i[push_node], a_i[push_node], push_node, rho, laziness_factor) number_of_push_operations += 1 return number_of_push_operations
Calculates the approximate personalized PageRank starting from a seed node with self-loops. Introduced in: Andersen, R., Chung, F., & Lang, K. (2006, October). Local graph partitioning using pagerank vectors. In Foundations of Computer Science, 2006. FOCS'06. 47th Annual IEEE Symposium on (pp. 475-486). IEEE.