Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
380,400
def is_unary_operator(oper): symbols = [, , , , , , , ] if not isinstance(oper, calldef_members.operator_t): return False if oper.symbol not in symbols: return False if isinstance(oper, calldef_members.member_operator_t): if len(oper.arguments) == 0: return True elif oper.symbol in [, ] and \ isinstance(oper.arguments[0].decl_type, cpptypes.int_t): return True return False if len(oper.arguments) == 1: return True elif oper.symbol in [, ] \ and len(oper.arguments) == 2 \ and isinstance(oper.arguments[1].decl_type, cpptypes.int_t): return True return False
returns True, if operator is unary operator, otherwise False
380,401
def Power(base: vertex_constructor_param_types, exponent: vertex_constructor_param_types, label: Optional[str]=None) -> Vertex: return Double(context.jvm_view().PowerVertex, label, cast_to_double_vertex(base), cast_to_double_vertex(exponent))
Raises a vertex to the power of another :param base: the base vertex :param exponent: the exponent vertex
380,402
def word(self, value): self._validate_word(value=value) _word = [] for a in value: if not (isinstance(a, int) or isinstance(a, str) or isinstance(a, unicode)): raise ValueError( ) if sys.version_info[0] == 2 and isinstance(a, unicode): _a = str(a) else: _a = a if self.wordtype == DigitWord.DIGIT: _digit = Digit(_a) elif self.wordtype == DigitWord.HEXDIGIT: _digit = HexDigit(_a) else: raise TypeError() _word.append(_digit) self._word = _word
Property of the DigitWord returning (or setting) the DigitWord as a list of integers (or string representations) of DigitModel. The property is called during instantiation as the property validates the value passed and ensures that all digits are valid. The values can be passed as ANY iterable
380,403
def set(self, id, value): id = self.key_name(id) self.redis.set(id, json_dumps(value))
根据 id 写入数据。 :param id: 要写入的 id :param value: 要写入的数据,可以是一个 ``dict`` 对象
380,404
def _make_stream_handler_nodes(self, dsk_graph, array, iteration_order, masked): nodes = {} handler = array.streams_handler(masked) input_iteration_order = handler.input_iteration_order(iteration_order) def input_keys_transform(input_array, keys): if hasattr(input_array, ): handler = input_array.streams_handler(masked) input_transformer = getattr(handler, , None) if input_transformer is not None: keys = input_transformer(keys) return keys sources_keys = [] sources_chunks = [] for input_array in array.sources: source_chunks_by_key = {} sources_chunks.append(source_chunks_by_key) source_keys = [] sources_keys.append(source_keys) if not all(t_keys == normalize_keys(keys, array.shape) for keys in source_keys): combined = self.collect(array[t_keys], masked, chunk=True) new_task = (combined, ) + dependencies new_id = ( .format(array[t_keys].shape, uuid.uuid())) dsk_graph[new_id] = new_task dependencies = (new_id, ) all_chunks.append(dependencies) pivoted = all_chunks sub_array = array[t_keys] handler = sub_array.streams_handler(masked) name = getattr(handler, , handler.__class__.__name__) if hasattr(handler, ): name += .format(handler.axis) if hasattr(handler, ): name = handler.operator.__name__ n_sources = len(array.sources) handler_of_chunks_fn = self.create_chunks_handler_fn(handler, n_sources, name) shape = sub_array.shape if all(key == slice(None) for key in t_keys): subset = else: pretty_index = .join(map(slice_repr, t_keys)) subset = .format(pretty_index) source_chunks = [item for sublist in pivoted for item in sublist] task = tuple([handler_of_chunks_fn, t_keys] + source_chunks) shape_repr = .join(map(str, shape)) chunk_id = .format(shape_repr, subset, uuid.uuid4()) assert chunk_id not in dsk_graph dsk_graph[chunk_id] = task nodes[chunk_id] = task return nodes
Produce task graph entries for an array that comes from a biggus StreamsHandler. This is essentially every type of array that isn't already a thing on disk/in-memory. StreamsHandler arrays include all aggregations and elementwise operations.
380,405
def upload(self, local_fn: str, remote_fn: str = , dont_overwrite: bool = False): raise NotImplementedError()
Uploads given file to the task. If remote_fn is not specified, dumps it into task current directory with the same name. Args: local_fn: location of file locally remote_fn: location of file on task dont_overwrite: if True, will be no-op if target file exists
380,406
def context_teardown(func: Callable): @wraps(func) async def wrapper(*args, **kwargs) -> None: async def teardown_callback(exception: Optional[Exception]): try: await generator.asend(exception) except StopAsyncIteration: pass finally: await generator.aclose() try: ctx = next(arg for arg in args[:2] if isinstance(arg, Context)) except StopIteration: raise RuntimeError( .format(callable_name(func))) from None generator = func(*args, **kwargs) try: await generator.asend(None) except StopAsyncIteration: pass except BaseException: await generator.aclose() raise else: ctx.add_teardown_callback(teardown_callback, True) if iscoroutinefunction(func): func = async_generator(func) elif not isasyncgenfunction(func): raise TypeError(.format(callable_name(func))) return wrapper
Wrap an async generator function to execute the rest of the function at context teardown. This function returns an async function, which, when called, starts the wrapped async generator. The wrapped async function is run until the first ``yield`` statement (``await async_generator.yield_()`` on Python 3.5). When the context is being torn down, the exception that ended the context, if any, is sent to the generator. For example:: class SomeComponent(Component): @context_teardown async def start(self, ctx: Context): service = SomeService() ctx.add_resource(service) exception = yield service.stop() :param func: an async generator function :return: an async function
380,407
async def say(self, body, user, options): return await self.send_text_message_to_all_interfaces( recipient=user, text=body, options=options)
say something to user :param body: :param user: :return:
380,408
def write_observation_zone(self, num, **kw): if not self.in_task_section: raise RuntimeError( u) fields = [u % num] if in kw: fields.append(u % kw[]) if in kw: fields.append(u + self.format_distance(kw[])) if in kw: fields.append(u + self.format_angle(kw[])) if in kw: fields.append(u + self.format_distance(kw[])) if in kw: fields.append(u + self.format_angle(kw[])) if in kw: fields.append(u + self.format_angle(kw[])) if in kw: fields.append(u + ( if kw[] else )) self.write_fields(fields)
Write observation zone information for a taskpoint:: writer.write_task_options( start_time=time(12, 34, 56), task_time=timedelta(hours=1, minutes=45, seconds=12), waypoint_distance=False, distance_tolerance=(0.7, 'km'), altitude_tolerance=300.0, ) # -> Options,NoStart=12:34:56,TaskTime=01:45:12,WpDis=False,NearDis=0.7km,NearAlt=300.0m :param num: consecutive number of a waypoint (``0``: Start) :param style: direction (``0``: Fixed value, ``1``: Symmetrical, ``2``: To next point, ``3``: To previous point, ``4``: To start point :param radius: radius 1 in meter or as ``(radius, unit)`` tuple :param angle: angle 1 in degrees :param radius2: radius 2 in meter or as ``(radius, unit)`` tuple :param angle 2: angle 2 in degrees :param angle12: angle 12 in degress :param line: should be ``True`` if start or finish line
380,409
async def update_champs(self): html = await self._get(self.URL) self.rune_links = utils.parse_rune_links(html)
A method which updates ``self.rune_links``. This is useful because runeforge.gg is frequently updating. Raises ------ RuneConnectionError If the request does not return with a status of 200.
380,410
def _handle_response(self, response): if not response.ok: raise ScrapydResponseError( "Scrapyd returned a {0} error: {1}".format( response.status_code, response.text)) try: json = response.json() except ValueError: raise ScrapydResponseError("Scrapyd returned an invalid JSON " "response: {0}".format(response.text)) if json[] == : json.pop() return json elif json[] == : raise ScrapydResponseError(json[])
Handles the response received from Scrapyd.
380,411
def add_organism(self, common_name, directory, blatdb=None, genus=None, species=None, public=False): data = { : common_name, : directory, : public, } if blatdb is not None: data[] = blatdb if genus is not None: data[] = genus if species is not None: data[] = species response = self.post(, data) if type(response) is not list: return response return [x for x in response if x[] == common_name][0]
Add an organism :type common_name: str :param common_name: Organism common name :type directory: str :param directory: Server-side directory :type blatdb: str :param blatdb: Server-side Blat directory for the organism :type genus: str :param genus: Genus :type species: str :param species: Species :type public: bool :param public: User's email :rtype: dict :return: a dictionary with information about the new organism
380,412
def get_best_fit_parameters_translated_grouped(self): result_dict = dict() result_dict[] = [parameters[] for parameters in self.best_fit_parameters_translated] result_dict[] = [parameters[] for parameters in self.best_fit_parameters_translated] for i in range(self.circuits): result_dict[ + str(i)] = [parameters[ + str(i)] for parameters in self.best_fit_parameters_translated] result_dict[ + str(i)] = [parameters[ + str(i)] for parameters in self.best_fit_parameters_translated] return result_dict
Returns the parameters as a dictionary of the 'real units' for the best fit.
380,413
def _send_command(self, cmd=""): self.connection.write_channel(cmd + "\n") time.sleep(1) output = self.connection._read_channel_timing() output = self.connection.strip_ansi_escape_codes(output) output = self.connection.strip_backspaces(output) return output
Handle reading/writing channel directly. It is also sanitizing the output received. Parameters ---------- cmd : str, optional The command to send to the remote device (default : "", just send a new line) Returns ------- output : str The output from the command sent
380,414
def find_matches(self, content, file_to_handle): groups = [match.groupdict() for match in self.match_expression.finditer(content)] matches = [group[] for group in groups if group.get()] logger.info(, len(matches), file_to_handle) return list(set(matches))
Find all matches of an expression in a file
380,415
def backfill_fields(self, fields, forms): if forms and not fields: new_fields = [self.def_field] elif fields and self.def_field not in fields: new_fields = list(fields) if self.def_field not in fields: new_fields.append(self.def_field) elif not fields: new_fields = self.field_names else: new_fields = list(fields) return new_fields
Properly backfill fields to explicitly request specific keys. The issue is that >6.X servers *only* return requested fields so to improve backwards compatiblity for PyCap clients, add specific fields when required. Parameters ---------- fields: list requested fields forms: list requested forms Returns ------- new fields, forms
380,416
def replace_seqres(self, pdb, update_atoms = True): newpdb = PDB() inserted_seqres = False entries_before_seqres = set(["HEADER", "OBSLTE", "TITLE", "CAVEAT", "COMPND", "SOURCE", "KEYWDS", "EXPDTA", "AUTHOR", "REVDAT", "SPRSDE", "JRNL", "REMARK", "DBREF", "SEQADV"]) mutated_resids = {} if update_atoms: old_seqs = ChainSequences() chainresnums = old_seqs.parse_atoms(pdb) assert self.keys() == old_seqs.keys() for chain in self.keys(): assert len(self[chain]) == len(old_seqs[chain]) for i in xrange(len(self[chain])): if self[chain][i] != old_seqs[chain][i]: resid = chain + chainresnums[chain][i] mutated_resids[resid] = self[chain][i] for line in pdb.lines: entry = line[0:6] if (not inserted_seqres) and entry not in entries_before_seqres: inserted_seqres = True newpdb.lines += self.seqres_lines() if update_atoms and entry == "ATOM ": resid = line[21:27] atom = line[12:16].strip() if not mutated_resids.has_key(resid): newpdb.lines += [line] else: newpdb.lines += [line[:17] + mutated_resids[resid] + line[20:]] elif entry != "SEQRES": newpdb.lines += [line] if update_atoms: newpdb.remove_nonbackbone_atoms(mutated_resids.keys()) return newpdb
Replace SEQRES lines with a new sequence, optionally removing mutated sidechains
380,417
def loguniform(low, high, random_state): assert low > 0, return np.exp(uniform(np.log(low), np.log(high), random_state))
low: an float that represent an lower bound high: an float that represent an upper bound random_state: an object of numpy.random.RandomState
380,418
def read(self, filename): try: SafeConfigParser.read(self, filename) except SafeConfigParserError as exc: msg = ("%s: parsing error in eapi conf file: %s" % (type(exc).__name__, filename)) debug(msg) self._add_default_connection() for name in self.sections(): if name.startswith() and \ not in dict(self.items(name)): self.set(name, , name.split()[1]) self.generate_tags()
Reads the file specified by filename This method will load the eapi.conf file specified by filename into the instance object. It will also add the default connection localhost if it was not defined in the eapi.conf file Args: filename (str): The full path to the file to load
380,419
def get_output(self): self._closing = True if not self.has_finished(): if self._debug: underrun_debug_timer = timeit.default_timer() logger.warning("NBSR underrun") self._thread.join() if not self.has_finished(): if self._debug: logger.debug( "NBSR join after %f seconds, underrun not resolved" % (timeit.default_timer() - underrun_debug_timer) ) raise Exception("thread did not terminate") if self._debug: logger.debug( "NBSR underrun resolved after %f seconds" % (timeit.default_timer() - underrun_debug_timer) ) if self._closed: raise Exception("streamreader double-closed") self._closed = True data = self._buffer.getvalue() self._buffer.close() return data
Retrieve the stored data in full. This call may block if the reading thread has not yet terminated.
380,420
def periodic_service_rpcs(self): to_remove = [] now = monotonic() for rpc_tag, rpc in self.in_flight_rpcs.items(): expiry = rpc.sent_timestamp + rpc.timeout if now > expiry: to_remove.append(rpc_tag) for tag in to_remove: del self.in_flight_rpcs[tag]
Check if any RPC has expired and remove it from the in flight list. This function should be called periodically to expire any RPCs that never complete.
380,421
def setup_menu(self): self.copy_action = create_action(self, _(), shortcut=keybinding(), icon=ima.icon(), triggered=self.copy, context=Qt.WidgetShortcut) menu = QMenu(self) add_actions(menu, [self.copy_action, ]) return menu
Setup context menu
380,422
def transmit(self, payload, **kwargs): kwargs[] = kwargs[] = kwargs[] = super(DegreedLearnerTransmitter, self).transmit(payload, **kwargs)
Send a completion status call to Degreed using the client. Args: payload: The learner completion data payload to send to Degreed
380,423
def write_tabular(obj, filepath): _, fn, ext = splitext2(filepath) if ext == : _write_tabular_h5(obj, filepath) elif ext == : _write_tabular_pickle(obj, filepath) else: raise NotImplementedError
Write tabular object in HDF5 or pickle format Args: obj (array or DataFrame): tabular object to write filepath (path-like): path to write to; must end in '.h5' or '.pkl'
380,424
def include(prop): s meaningful for one-to-many relations only.' if isinstance(prop, QueryableAttribute): prop = prop.property assert isinstance(prop, (Column, ColumnProperty, RelationshipProperty)) _included.add(prop)
Replicate property that is normally not replicated. Right now it's meaningful for one-to-many relations only.
380,425
def do(self): data = None if self.body is not None and self.body != b: data = self.body return requests.request(self.method, str(self.url), data=data, headers=self.header)
Executes the request represented by this object. The requests library will be used for this purpose. Returns an instance of requests.Response.
380,426
def deserialize(cls, target_class, array): installation = target_class.__new__(target_class) server_public_key_wrapped = array[cls._INDEX_SERVER_PUBLIC_KEY] installation.__dict__ = { cls._ATTRIBUTE_ID: converter.deserialize( core.Id, array[cls._INDEX_ID][cls._FIELD_ID] ), cls._ATTRIBUTE_TOKEN: converter.deserialize( core.SessionToken, array[cls._INDEX_TOKEN][cls._FIELD_TOKEN] ), cls._ATTRIBUTE_SERVER_PUBLIC_KEY: converter.deserialize( core.PublicKeyServer, server_public_key_wrapped[cls._FIELD_SERVER_PUBLIC_KEY] ), } return installation
:type target_class: core.Installation|type :type array: list :rtype: core.Installation
380,427
def train_epoch(self, epoch_info: EpochInfo, interactive=True): epoch_info.on_epoch_begin() if interactive: iterator = tqdm.trange(epoch_info.batches_per_epoch, file=sys.stdout, desc="Training", unit="batch") else: iterator = range(epoch_info.batches_per_epoch) for batch_idx in iterator: batch_info = BatchInfo(epoch_info, batch_idx) batch_info.on_batch_begin() self.train_batch(batch_info) batch_info.on_batch_end() epoch_info.result_accumulator.freeze_results() epoch_info.on_epoch_end()
Train model on an epoch of a fixed number of batch updates
380,428
def clean_path_middleware(environ, start_response=None): path = environ[] if path and in path: url = re.sub("/+", , path) if not url.startswith(): url = % url qs = environ[] if qs: url = % (url, qs) raise HttpRedirect(url)
Clean url from double slashes and redirect if needed.
380,429
def crl(self): revoked_certs = self.get_revoked_certs() crl = crypto.CRL() now_str = timezone.now().strftime(generalized_time) for cert in revoked_certs: revoked = crypto.Revoked() revoked.set_serial(bytes_compat(cert.serial_number)) revoked.set_reason(b) revoked.set_rev_date(bytes_compat(now_str)) crl.add_revoked(revoked) return crl.export(self.x509, self.pkey, days=1, digest=b)
Returns up to date CRL of this CA
380,430
def resolve_page(self, request, context, is_staff): path = context[] lang = context[] page = Page.objects.from_path( path, lang, exclude_drafts=(not is_staff)) if page: return page if not settings.PAGE_USE_STRICT_URL: path = remove_slug(path) while path is not None: page = Page.objects.from_path( path, lang, exclude_drafts=(not is_staff)) if page: if page.delegate_to: return page path = remove_slug(path) return None
Return the appropriate page according to the path.
380,431
def start_engines(opts, proc_mgr, proxy=None): utils = salt.loader.utils(opts, proxy=proxy) if opts[] == : runners = salt.loader.runner(opts, utils=utils) else: runners = [] funcs = salt.loader.minion_mods(opts, utils=utils, proxy=proxy) engines = salt.loader.engines(opts, funcs, runners, utils, proxy=proxy) engines_opt = opts.get(, []) if isinstance(engines_opt, dict): engines_opt = [{k: v} for k, v in engines_opt.items()] if salt.utils.platform.is_windows(): runners = None utils = None funcs = None for engine in engines_opt: if isinstance(engine, dict): engine, engine_opts = next(iter(engine.items())) else: engine_opts = None engine_name = None if engine_opts is not None and in engine_opts: fun = .format(engine_opts[]) engine_name = engine del engine_opts[] else: fun = .format(engine) if fun in engines: start_func = engines[fun] if engine_name: name = .format(__name__, start_func.__module__, engine_name) else: name = .format(__name__, start_func.__module__) log.info(, name) proc_mgr.add_process( Engine, args=( opts, fun, engine_opts, funcs, runners, proxy ), name=name )
Fire up the configured engines!
380,432
def list_gebouwen_by_huisnummer(self, huisnummer): try: id = huisnummer.id except AttributeError: id = huisnummer def creator(): res = crab_gateway_request( self.client, , id ) try: return [ Gebouw( r.IdentificatorGebouw, r.AardGebouw, r.StatusGebouw )for r in res.GebouwItem ] except AttributeError: return [] if self.caches[].is_configured: key = % (id) gebouwen = self.caches[].get_or_create(key, creator) else: gebouwen = creator() for r in gebouwen: r.set_gateway(self) return gebouwen
List all `gebouwen` for a :class:`Huisnummer`. :param huisnummer: The :class:`Huisnummer` for which the \ `gebouwen` are wanted. :rtype: A :class:`list` of :class:`Gebouw`
380,433
def cmd(binary, subcommand, *args, **kwargs): ret = [binary, subcommand] for key, value in kwargs.items(): key = + key.replace(, ) ret.extend(_keyword_arguments(value, key)) ret.extend(args) return ret
Construct a command line for a "modern UNIX" command. Modern UNIX command do a closely-related-set-of-things and do it well. Examples include :code:`apt-get` or :code:`git`. :param binary: the name of the command :param subcommand: the subcommand used :param args: positional arguments (put last) :param kwargs: options :returns: list of arguments that is suitable to be passed to :code:`subprocess.Popen` and friends. When specifying options, the following assumptions are made: * Option names begin with :code:`--` and any :code:`_` is assumed to be a :code:`-` * If the value is :code:`NO_VALUE`, this is a "naked" option. * If the value is a string or an int, these are presented as the value of the option. * If the value is a list, the option will be repeated multiple times. * If the value is a dict, the option will be repeated multiple times, and its values will be :code:`<KEY>=<VALUE>`.
380,434
def mouse_click(self, widget, event=None): if event.type == Gdk.EventType._2BUTTON_PRESS and event.get_button()[1] == 1: (model, row) = self.history_tree.get_selection().get_selected() if row is not None: histroy_item_path = self.history_tree_store.get_path(row) histroy_item_iter = self.history_tree_store.get_iter(histroy_item_path) if histroy_item_path is not None and self.history_tree_store.iter_n_children(histroy_item_iter): if self.history_tree.row_expanded(histroy_item_path): self.history_tree.collapse_row(histroy_item_path) else: self.history_tree.expand_to_path(histroy_item_path) sm = self.get_history_item_for_tree_iter(histroy_item_iter).state_reference.get_state_machine() if sm: if sm.state_machine_id != self.model.selected_state_machine_id: self.model.selected_state_machine_id = sm.state_machine_id else: logger.info("No state machine could be found for selected item's state reference and " "therefore no selection is performed.") return active_sm_m = self.model.get_selected_state_machine_model() assert active_sm_m.state_machine is sm state_path = self.get_history_item_for_tree_iter(histroy_item_iter).state_reference.get_path() ref_state_m = active_sm_m.get_state_model_by_path(state_path) if ref_state_m and active_sm_m: active_sm_m.selection.set(ref_state_m) return True if event.type == Gdk.EventType.BUTTON_PRESS and event.get_button()[1] == 2: x = int(event.x) y = int(event.y) pthinfo = self.history_tree.get_path_at_pos(x, y) if pthinfo is not None: path, col, cellx, celly = pthinfo self.history_tree.grab_focus() self.history_tree.set_cursor(path, col, 0) self.open_selected_history_separately(None) if event.type == Gdk.EventType.BUTTON_PRESS and event.get_button()[1] == 3: x = int(event.x) y = int(event.y) time = event.time pthinfo = self.history_tree.get_path_at_pos(x, y) if pthinfo is not None: path, col, cellx, celly = pthinfo self.history_tree.grab_focus() self.history_tree.set_cursor(path, col, 0) popup_menu = Gtk.Menu() model, row = self.history_tree.get_selection().get_selected() history_item = model[row][self.HISTORY_ITEM_STORAGE_ID] if not isinstance(history_item, ScopedDataItem) or history_item.scoped_data is None: return scoped_data = history_item.scoped_data input_output_data = history_item.child_state_input_output_data state_reference = history_item.state_reference self.append_string_to_menu(popup_menu, "------------------------") self.append_string_to_menu(popup_menu, "Scoped Data: ") self.append_string_to_menu(popup_menu, "------------------------") for key, data in scoped_data.items(): menu_item_string = " %s (%s - %s):\t%s" % ( data.name.replace("_", "__"), key, data.value_type, data.value) self.append_string_to_menu(popup_menu, menu_item_string) if input_output_data: if isinstance(history_item, CallItem): self.append_string_to_menu(popup_menu, "------------------------") self.append_string_to_menu(popup_menu, "Input Data:") self.append_string_to_menu(popup_menu, "------------------------") else: self.append_string_to_menu(popup_menu, "------------------------") self.append_string_to_menu(popup_menu, "Output Data:") self.append_string_to_menu(popup_menu, "------------------------") for key, data in input_output_data.items(): menu_item_string = " %s :\t%s" % (key.replace("_", "__"), data) self.append_string_to_menu(popup_menu, menu_item_string) if state_reference: if history_item.outcome: self.append_string_to_menu(popup_menu, "------------------------") final_outcome_menu_item_string = "Final outcome: " + str(history_item.outcome) self.append_string_to_menu(popup_menu, final_outcome_menu_item_string) self.append_string_to_menu(popup_menu, "------------------------") popup_menu.show() popup_menu.popup(None, None, None, None, event.get_button()[1], time) return True
Triggered when mouse click is pressed in the history tree. The method shows all scoped data for an execution step as tooltip or fold and unfold the tree by double-click and select respective state for double clicked element.
380,435
def info( self, page: ) -> : params = { : , : , : page.title, : .join([ , , , , , , , , , , ]) } raw = self._query( page, params ) self._common_attributes(raw[], page) pages = raw[][] for k, v in pages.items(): if k == : page._attributes[] = -1 return page else: return self._build_info(v, page) return page
https://www.mediawiki.org/w/api.php?action=help&modules=query%2Binfo https://www.mediawiki.org/wiki/API:Info
380,436
def hash_from_func(cls, func): new = cls._func_hash[func].new return new() if new else None
Return a hashlib-compatible object for the multihash `func`. If the `func` is registered but no hashlib-compatible constructor is available for it, `None` is returned. If the `func` is not registered, a `KeyError` is raised. >>> h = FuncReg.hash_from_func(Func.sha2_256) >>> h.name 'sha256'
380,437
def data(self, value): if value == self._defaults[] and in self._values: del self._values[] else: self._values[] = value
The data property. Args: value (object). the property value.
380,438
def _ConvertFloat(value): if value == : raise ParseError(t parse float "nan", use "NaN" instead.-infinfnanCouldn\.format(value))
Convert an floating point number.
380,439
def _unique_constrains(cls): unique = [{c.name for c in u.columns} for u in cls.__table_args__ if isinstance(u, UniqueConstraint)] unique.extend({c.name} for c in cls.__table__.columns if c.unique) return unique
Get all (single column and multi column) unique constraints
380,440
def Subclasses(cls, sort_by=None, reverse=False): l = list() for attr, value in get_all_attributes(cls): try: if issubclass(value, Constant): l.append((attr, value)) except: pass if sort_by is None: sort_by = "__creation_index__" l = list( sorted(l, key=lambda x: getattr(x[1], sort_by), reverse=reverse)) return l
Get all nested Constant class and it's name pair. :param sort_by: the attribute name used for sorting. :param reverse: if True, return in descend order. :returns: [(attr, value),...] pairs. :: >>> class MyClass(Constant): ... a = 1 # non-class attributre ... b = 2 # non-class attributre ... ... class C(Constant): ... pass ... ... class D(Constant): ... pass >>> MyClass.Subclasses() [("C", MyClass.C), ("D", MyClass.D)] .. versionadded:: 0.0.3
380,441
def parse_uri(config_uri): if isinstance(config_uri, PlasterURL): return config_uri isabs = os.path.isabs(config_uri) if isabs: config_uri = + config_uri parts = urlparse.urlparse(config_uri) path = urlparse.ParseResult( scheme=, netloc=parts.netloc, path=parts.path, params=, query=, fragment=, ).geturl() if path.startswith(): path = path[2:] if parts.scheme and not isabs: scheme = parts.scheme else: scheme = os.path.splitext(path)[1] if scheme.startswith(): scheme = scheme[1:] if scheme: scheme = + scheme query = parts.query if parts.query else None options = OrderedDict() if query: options.update(urlparse.parse_qsl(query)) fragment = parts.fragment if parts.fragment else None if not scheme: raise InvalidURI(config_uri, ( .format(config_uri))) return PlasterURL( scheme=scheme, path=path, options=options, fragment=fragment, )
Parse the ``config_uri`` into a :class:`plaster.PlasterURL` object. ``config_uri`` can be a relative or absolute file path such as ``development.ini`` or ``/path/to/development.ini``. The file must have an extension that can be handled by a :class:`plaster.ILoader` registered with the system. Alternatively, ``config_uri`` may be a :rfc:`1738`-style string.
380,442
def set_plain_text(self, text, is_code): if type(text) is dict: name = text[] if name: rst_title = .join([*len(name), , name, , *len(name), ]) else: rst_title = if text[]: definition = .join([, name, text[], ]) else: definition = if text[]: note = .join([, text[], ]) else: note = full_text = .join([rst_title, definition, note, text[]]) else: full_text = text self.plain_text.set_text(full_text, is_code) self.save_text([self.plain_text.set_text, full_text, is_code])
Set plain text docs
380,443
def organisation_logo_path(feature, parent): _ = feature, parent organisation_logo_file = setting( inasafe_organisation_logo_path[]) if os.path.exists(organisation_logo_file): return organisation_logo_file else: LOGGER.info( ).format( logo_path=organisation_logo_file) return inasafe_default_settings[]
Retrieve the full path of used specified organisation logo.
380,444
def warp(self, order): if order.order_model == ORDER_MODEL.MARKET: if order.frequence is FREQUENCE.DAY: order.date = order.datetime[0:10] order.datetime = .format(order.date) elif order.frequence in [FREQUENCE.ONE_MIN, FREQUENCE.FIVE_MIN, FREQUENCE.FIFTEEN_MIN, FREQUENCE.THIRTY_MIN, FREQUENCE.SIXTY_MIN]: exact_time = str( datetime.datetime .strptime(str(order.datetime), ) + datetime.timedelta(minutes=1) ) order.date = exact_time[0:10] order.datetime = exact_time self.market_data = self.get_market(order) if self.market_data is None: return order order.price = ( float(self.market_data["high"]) + float(self.market_data["low"]) ) * 0.5 elif order.order_model == ORDER_MODEL.NEXT_OPEN: try: exact_time = str( datetime.datetime .strptime(str(order.datetime), ) + datetime.timedelta(day=1) ) order.date = exact_time[0:10] order.datetime = .format(order.date) except: order.datetime = .format(order.date) self.market_data = self.get_market(order) if self.market_data is None: return order order.price = float(self.market_data["close"]) elif order.order_model == ORDER_MODEL.CLOSE: try: order.datetime = self.market_data.datetime except: if len(str(order.datetime)) == 19: pass else: order.datetime = .format(order.date) self.market_data = self.get_market(order) if self.market_data is None: return order order.price = float(self.market_data["close"]) elif order.order_model == ORDER_MODEL.STRICT: if order.frequence is FREQUENCE.DAY: exact_time = str( datetime.datetime .strptime(order.datetime, ) + datetime.timedelta(day=1) ) order.date = exact_time[0:10] order.datetime = .format(order.date) elif order.frequence in [FREQUENCE.ONE_MIN, FREQUENCE.FIVE_MIN, FREQUENCE.FIFTEEN_MIN, FREQUENCE.THIRTY_MIN, FREQUENCE.SIXTY_MIN]: exact_time = str( datetime.datetime .strptime(order.datetime, ) + datetime.timedelta(minute=1) ) order.date = exact_time[0:10] order.datetime = exact_time self.market_data = self.get_market(order) if self.market_data is None: return order if order.towards == 1: order.price = float(self.market_data["high"]) else: order.price = float(self.market_data["low"]) return order
对order/market的封装 [description] Arguments: order {[type]} -- [description] Returns: [type] -- [description]
380,445
def disveclayers(self, x, y, layers, aq=None): if aq is None: aq = self.model.aq.find_aquifer_data(x, y) qxqy = self.disvec(x, y, aq) rv = np.sum(qxqy[:,np.newaxis,:] * aq.eigvec, 2) return rv[:,layers]
Returns two arrays of size len(layers) only used in building equations
380,446
def option_present(name, value, reload=False): ret = {: , : {}, : True, : } option = name current_option = __salt__[](option) if current_option: l = __salt__[](current_option) option_value = l[1] if .format(value) == option_value: return ret else: result = __salt__[](option, value) ret[] = ret[][] = else: result = __salt__[](, args=.format(option, value)) ret[] = ret[][] = if reload: if __salt__[](): ret[] += else: ret[] += ret[] = False return ret
Ensure the state of a particular option/setting in csf. name The option name in csf.conf value The value it should be set to. reload Boolean. If set to true, csf will be reloaded after.
380,447
def reader(ltsvfile, labels=None): label_pattern = re.compile(r"^[0-9A-Za-z_.-]+:") if labels is not None: prefixes = tuple(L + for L in labels if label_pattern.match(L + )) for record in ltsvfile: record = record.rstrip() yield [x.split(, 1) for x in record.split() if x.startswith(prefixes)] return for record in ltsvfile: record = record.rstrip() yield [x.split(, 1) for x in record.split() if label_pattern.match(x)]
Make LTSV Reader for reading selected labels. :param ltsvfile: iterable of lines. :param labels: sequence of labels. (optional) :return: generator of record in [[label, value], ...] form.
380,448
def append(self, value): self._values.append(self._type_checker.CheckValue(value)) if not self._message_listener.dirty: self._message_listener.Modified()
Appends an item to the list. Similar to list.append().
380,449
def get_cpds(self, node=None, time_slice=0): if node: if node not in super(DynamicBayesianNetwork, self).nodes(): raise ValueError() else: for cpd in self.cpds: if cpd.variable == node: return cpd else: return [cpd for cpd in self.cpds if set(list(cpd.variables)).issubset(self.get_slice_nodes(time_slice))]
Returns the CPDs that have been associated with the network. Parameters ---------- node: tuple (node_name, time_slice) The node should be in the following form (node_name, time_slice). Here, node_name is the node that is inserted while the time_slice is an integer value, which denotes the index of the time_slice that the node belongs to. time_slice: int The time_slice should be a positive integer greater than or equal to zero. Examples -------- >>> from pgmpy.models import DynamicBayesianNetwork as DBN >>> from pgmpy.factors.discrete import TabularCPD >>> dbn = DBN() >>> dbn.add_edges_from([(('D',0),('G',0)),(('I',0),('G',0)),(('D',0),('D',1)),(('I',0),('I',1))]) >>> grade_cpd = TabularCPD(('G',0), 3, [[0.3,0.05,0.9,0.5], ... [0.4,0.25,0.8,0.03], ... [0.3,0.7,0.02,0.2]], [('I', 0),('D', 0)],[2,2]) >>> dbn.add_cpds(grade_cpd) >>> dbn.get_cpds()
380,450
def snapshot_identifier(prefix, db_identifier): now = datetime.now() return % (prefix, db_identifier, now.strftime())
Return an identifier for a snapshot of a database or cluster.
380,451
def refresh_access_information(self, refresh_token): if self.config.grant_type == : data = {: , : self.config.user, : self.config.pswd} else: data = {: , : self.redirect_uri, : refresh_token} retval = self._handle_oauth_request(data) return {: retval[], : refresh_token, : set(retval[].split())}
Return updated access information for an OAuth2 authorization grant. :param refresh_token: the refresh token used to obtain the updated information :returns: A dictionary with the key/value pairs for access_token, refresh_token and scope. The refresh_token value will be done when the OAuth2 grant is not refreshable. The scope value will be a set containing the scopes the tokens are valid for. Password grants aren't refreshable, so use `get_access_information()` again, instead.
380,452
def move(self, u_function): if self.mesh: self.u = u_function delta = [u_function(p) for p in self.mesh.coordinates()] movedpts = self.mesh.coordinates() + delta self.polydata(False).GetPoints().SetData(numpy_to_vtk(movedpts)) self.poly.GetPoints().Modified() self.u_values = delta else: colors.printc("Warning: calling move() but actor.mesh is", self.mesh, c=3) return self
Move a mesh by using an external function which prescribes the displacement at any point in space. Useful for manipulating ``dolfin`` meshes.
380,453
def output(self, message, color=None): output_to = stderr if color == "red" else stdout secho(self.indent(message), fg=color, file=output_to)
A helper to used like print() or click's secho() tunneling all the outputs to sys.stdout or sys.stderr :param message: (str) :param color: (str) check click.secho() documentation :return: (None) prints to sys.stdout or sys.stderr
380,454
def copy_and_move_messages(from_channel, to_channel): with BlockSave(Message, query_dict={: to_channel.key}): for message in Message.objects.filter(channel=from_channel, typ=15): message.key = message.channel = to_channel message.save()
While splitting channel and moving chosen subscribers to new channel, old channel's messages are copied and moved to new channel. Args: from_channel (Channel object): move messages from channel to_channel (Channel object): move messages to channel
380,455
def get_domain_template(distro, libvirt_ver, **kwargs): env = Environment( loader=PackageLoader(, ), trim_blocks=True, lstrip_blocks=True, ) template_name = .format(distro) try: template = env.get_template(template_name) except TemplateNotFound: LOGGER.debug(, template_name) template = env.get_template() return template.render(libvirt_ver=libvirt_ver, **kwargs)
Get a rendered Jinja2 domain template Args: distro(str): domain distro libvirt_ver(int): libvirt version kwargs(dict): args for template render Returns: str: rendered template
380,456
def _from_dict(cls, _dict): args = {} if in _dict: args[] = _dict.get() else: raise ValueError( word\) if in _dict: args[] = _dict.get() else: raise ValueError( sounds_like\) if in _dict: args[] = _dict.get() else: raise ValueError( display_as\) if in _dict: args[] = _dict.get() else: raise ValueError( count\) if in _dict: args[] = _dict.get() else: raise ValueError( source\) if in _dict: args[] = [ WordError._from_dict(x) for x in (_dict.get()) ] return cls(**args)
Initialize a Word object from a json dictionary.
380,457
def merge_trd_mkt_stock_str(trd_mkt, partial_stock_str): mkt_qot = Market.NONE mkt = TRADE.REV_TRD_MKT_MAP[trd_mkt] if trd_mkt in TRADE.REV_TRD_MKT_MAP else TrdMarket.NONE if mkt == TrdMarket.HK: mkt_qot = Market.HK elif mkt == TrdMarket.US: mkt_qot = Market.US elif mkt == TrdMarket.HKCC or mkt == TrdMarket.CN: if partial_stock_str.startswith() or partial_stock_str.startswith(): mkt_qot = Market.SH else: mkt_qot = Market.SZ else: raise Exception("merge_trd_mkt_stock_str: unknown trd_mkt.") return merge_qot_mkt_stock_str(MKT_MAP[mkt_qot], partial_stock_str)
Merge the string of stocks :param market: market code :param partial_stock_str: original stock code string. i.e. "AAPL","00700", "000001" :return: unified representation of a stock code. i.e. "US.AAPL", "HK.00700", "SZ.000001"
380,458
def gen_radio_list(sig_dic): view_zuoxiang = dic_tmp = sig_dic[] for key in dic_tmp.keys(): tmp_str = {0}.format(sig_dic[], key, dic_tmp[key]) view_zuoxiang += tmp_str view_zuoxiang += return view_zuoxiang
For generating List view HTML file for RADIO. for each item.
380,459
def fileUpd(self, buffer=None, filename=None, ufilename=None, desc=None): CheckParent(self) return _fitz.Annot_fileUpd(self, buffer, filename, ufilename, desc)
Update annotation attached file.
380,460
def __add_images_to_manifest(self): xpath_expr = "//manifest:manifest[1]" for content_tree in self.content_trees: manifest_e = content_tree.xpath( xpath_expr, namespaces=self.namespaces ) if not manifest_e: continue for identifier in self.images.keys(): lxml.etree.SubElement( manifest_e[0], % self.namespaces[], attrib={ % self.namespaces[]: ( PY3O_IMAGE_PREFIX + identifier ), % self.namespaces[]: , } )
Add entries for py3o images into the manifest file.
380,461
def line_to(self, x, y): cairo.cairo_line_to(self._pointer, x, y) self._check_status()
Adds a line to the path from the current point to position ``(x, y)`` in user-space coordinates. After this call the current point will be ``(x, y)``. If there is no current point before the call to :meth:`line_to` this method will behave as ``context.move_to(x, y)``. :param x: X coordinate of the end of the new line. :param y: Y coordinate of the end of the new line. :type float: x :type float: y
380,462
def convert_tensor_float_to_float16(tensor): if not isinstance(tensor, onnx_proto.TensorProto): raise ValueError( % type(tensor)) if tensor.data_type == onnx_proto.TensorProto.FLOAT: tensor.data_type = onnx_proto.TensorProto.FLOAT16 if tensor.float_data: int_list = _npfloat16_to_int(np.float16(tensor.float_data)) tensor.int32_data[:] = int_list tensor.float_data[:] = [] if tensor.raw_data: float32_list = np.fromstring(tensor.raw_data, dtype=) float16_list = np.float16(float32_list) tensor.raw_data = float16_list.tostring() return tensor
Convert tensor float to float16. :param tensor: TensorProto object :return tensor_float16: converted TensorProto object Example: :: from onnxmltools.utils.float16_converter import convert_tensor_float_to_float16 new_tensor = convert_tensor_float_to_float16(tensor)
380,463
def delete_files_within_dir(directory: str, filenames: List[str]) -> None: for dirpath, dirnames, fnames in os.walk(directory): for f in fnames: if f in filenames: fullpath = os.path.join(dirpath, f) log.debug("Deleting {!r}", fullpath) os.remove(fullpath)
Delete files within ``directory`` whose filename *exactly* matches one of ``filenames``.
380,464
def process_rewards(self, rewards): min_reward, max_reward = self.reward_range rewards = np.clip(rewards, min_reward, max_reward) rewards = np.around(rewards, decimals=0).astype(np.int64) return rewards
Clips, rounds, and changes to integer type. Args: rewards: numpy array of raw (float) rewards. Returns: processed_rewards: numpy array of np.int64
380,465
def stix_embedding_pred(self, parent, child, ns_mapping): def extract_typeinfo(child): grandchild = child.children type_info = None while grandchild is not None: try: grandchild_attrs = extract_attributes(grandchild, prefix_key_char=) if in grandchild_attrs and grandchild.name==: type_info = grandchild_attrs[].split()[0] else: type_info = grandchild.ns().name break except: grandchild = grandchild.next if type_info: logger.debug("Found type info %s" % type_info) return type_info else: logger.debug("Embedding, but did not find type info") return True child_attributes = extract_attributes(child, prefix_key_char=) parent_attrs = extract_attributes(parent, prefix_key_char=) if parent.name==: if in parent_attrs: if in parent_attrs[]: id_and_revision_info = OpenIOC_Importer.id_and_revision_extractor(child) id_and_revision_info[] = {: } logger.debug("XXX: Found OpenIOC with %s" % id_and_revision_info) return {:child.ns().name, :id_and_revision_info} if parent.name== and child.name==: logger.debug("Found killchain phase %s" % extract_typeinfo(child)) return extract_typeinfo(child) if parent.name== and child.name== and parent.get_parent().name ==: return {: extract_typeinfo(child), : {: True}} if ( in child_attributes): return extract_typeinfo(child) if in child_attributes: return extract_typeinfo(child) if child.name== and not in child_attributes: return extract_typeinfo(child) return False
Predicate for recognizing inlined content in an XML; to be used for DINGO's xml-import hook 'embedded_predicate'. The question this predicate must answer is whether the child should be extracted into a separate object. The function returns either - False (the child is not to be extracted) - True (the child is extracted but nothing can be inferred about what kind of object is extracted) - a string giving some indication about the object type (if nothing else is known: the name of the element, often the namespace of the embedded object) - a dictionary, of the following form:: {'id_and_revision_info' : { 'id': something/None, 'ts': something/None, ... other information you want to record for this object for later usage, }, 'embedded_ns': False/True/some indication about object type as string} Note: the 'parent' and 'child' arguments are XMLNodes as defined by the Python libxml2 bindings. If you have never worked with these, have a look at - Mike Kneller's brief intro: http://mikekneller.com/kb/python/libxml2python/part1 - the functions in django-dingos core.xml_utils module
380,466
def summary(self, title, sentences=0, chars=0, auto_suggest=True, redirect=True): page_info = self.page(title, auto_suggest=auto_suggest, redirect=redirect) return page_info.summarize(sentences, chars)
Get the summary for the title in question Args: title (str): Page title to summarize sentences (int): Number of sentences to return in summary chars (int): Number of characters to return in summary auto_suggest (bool): Run auto-suggest on title before \ summarizing redirect (bool): Use page redirect on title before summarizing Returns: str: The summarized results of the page Note: Precedence for parameters: sentences then chars; if both are \ 0 then the entire first section is returned
380,467
def plot_cpu_mem_keypoints(self): plt.figure(1) plt.subplot(311) title = self._get_graph_title() plt.title(title, loc="center") mem_ins = plt.plot(self.time_axis, self.mem_axis, "-", label="Mem(MB)", color=, linestyle=, marker=) plt.legend(mem_ins, ["Mem(MB)"], loc=) plt.grid() plt.ylabel("Mem(MB)") plt.ylim(bottom=0) for method_exec in self.method_exec_info: start_date = datetime.fromtimestamp(method_exec["start_time"]) end_date = datetime.fromtimestamp(method_exec["end_time"]) plt.vlines(start_date, 0, self.max_mem, colors="c", linestyles="dashed") plt.vlines(end_date, 0, self.max_mem, colors="c", linestyles="dashed") x = datetime.fromtimestamp(method_exec["mem_max_time"]) text = "%s: %d MB" % (method_exec["name"], method_exec["mem_max"]) plt.text(x, method_exec["mem_max"], text, ha="center", va="bottom", fontsize=10) plt.plot(x, method_exec["mem_max"], , label="point") plt.subplot(312) cpu_ins = plt.plot(self.time_axis, self.cpu_axis, "-", label="CPU(%)", color=, linestyle=, marker=) plt.legend(cpu_ins, ["CPU(%)"], loc=) plt.grid() plt.xlabel("Time(s)") plt.ylabel("CPU(%)") plt.ylim(0, 120) for method_exec in self.method_exec_info: start_date = datetime.fromtimestamp(method_exec["start_time"]) end_date = datetime.fromtimestamp(method_exec["end_time"]) plt.vlines(start_date, 0, 100, colors="c", linestyles="dashed") plt.vlines(end_date, 0, 100, colors="c", linestyles="dashed") x = datetime.fromtimestamp(method_exec["cpu_max_time"]) text = "%s: %d%%" % (method_exec["name"], method_exec["cpu_max"]) plt.text(x, method_exec["cpu_max"], text, ha="center", va="bottom", fontsize=10) plt.plot(x, method_exec["cpu_max"], , label="point") plt.subplot(313) plt.xlabel() plt.ylabel() method_list, method_pts_length_list, color_list = [], [], [] for method_exec in self.method_exec_info: for item in ["kp_sch", "kp_src", "good"]: method_list.append("%s-%s" % (method_exec["name"], item)) method_pts_length_list.append(method_exec[item]) if method_exec["result"]: color_list.append(["palegreen", "limegreen", "deepskyblue"][["kp_sch", "kp_src", "good"].index(item)]) else: color_list.append("tomato") method_x = np.arange(len(method_list)) + 1 plt.bar(method_x, method_pts_length_list, width=0.35, align=, color=color_list, alpha=0.8) plt.xticks(method_x, method_list, size=, rotation=30) for x, y in zip(method_x, method_pts_length_list): plt.text(x, y + 10, "%d" % y, ha="center", va="bottom", fontsize=7) plt.ylim(0, max(method_pts_length_list) * 1.2) plt.show()
绘制CPU/Mem/特征点数量.
380,468
def best_periods(self): for (key, model) in self.models_.items(): model.optimizer = self.optimizer return dict((filt, model.best_period) for (filt, model) in self.models_.items())
Compute the scores under the various models Parameters ---------- periods : array_like array of periods at which to compute scores Returns ------- best_periods : dict Dictionary of best periods. Dictionary keys are the unique filter names passed to fit()
380,469
def hash(value, algorithm=): *I am a string if six.PY3 and isinstance(value, six.string_types): value = value.encode(__salt_system_encoding__) if hasattr(hashlib, ALGORITHMS_ATTR_NAME) and algorithm in getattr(hashlib, ALGORITHMS_ATTR_NAME): hasher = hashlib.new(algorithm) hasher.update(value) out = hasher.hexdigest() elif hasattr(hashlib, algorithm): hasher = hashlib.new(algorithm) hasher.update(value) out = hasher.hexdigest() else: raise SaltInvocationError() return out
.. versionadded:: 2014.7.0 Encodes a value with the specified encoder. value The value to be hashed. algorithm : sha512 The algorithm to use. May be any valid algorithm supported by hashlib. CLI Example: .. code-block:: bash salt '*' random.hash 'I am a string' md5
380,470
def semcor_to_offset(sensekey): synset = wn.lemma_from_key(sensekey).synset offset = % (synset.offset, synset.pos) return offset
Converts SemCor sensekey IDs to synset offset. >>> print semcor_to_offset('live%2:42:06::') 02614387-v
380,471
def status_human(self): res = None if self._deleted: return if self.status == 1: res = elif self.status == 2: if self.move == 0: res = elif self.move == 1: res = elif self.move == 2: res = elif self.status == 4: res = elif self.status == -1: res = if res is not None: return res return
Human readable status :return: * `DOWNLOADING`: the task is downloading files * `BEING TRANSFERRED`: the task is being transferred * `TRANSFERRED`: the task has been transferred to downloads \ directory * `SEARCHING RESOURCES`: the task is searching resources * `FAILED`: the task is failed * `DELETED`: the task is deleted * `UNKNOWN STATUS` :rtype: str
380,472
def show_fields(self, block=None): mapping = self._mapping() if block is None: return mapping elif block == "top": blocks = set() for key in mapping.keys(): blocks.add(key.split(".")[0]) block_map = {} for b in blocks: block_map[b] = "object" else: block_map = {} for key, value in mapping.items(): if key.startswith(block): block_map[key] = value return block_map
Retrieve and return the mapping for the given metadata block. Arguments: block (str): The top-level field to fetch the mapping for (for example, ``"mdf"``), or the special values ``None`` for everything or ``"top"`` for just the top-level fields. **Default:** ``None``. index (str): The Search index to map. **Default:** The current index. Returns: dict: ``field:datatype`` pairs.
380,473
def dataoneTypes(request): if is_v1_api(request): return d1_common.types.dataoneTypes_v1_1 elif is_v2_api(request) or is_diag_api(request): return d1_common.types.dataoneTypes_v2_0 else: raise d1_common.types.exceptions.ServiceFailure( 0, .format(request.path) )
Return the PyXB binding to use when handling a request.
380,474
def tzname(self, dt): if self.__is_daylight_time(dt): return time.tzname[1] else: return time.tzname[0]
http://docs.python.org/library/datetime.html#datetime.tzinfo.tzname
380,475
def xlim(min, max): if not isinstance(min, (int, float, complex)): min = tplot_utilities.str_to_int(min) if not isinstance(max, (int, float, complex)): max = tplot_utilities.str_to_int(max) if in tplot_opt_glob: lim_info[] = tplot_opt_glob[] else: lim_info[] = Range1d(min, max) lim_info[] = Range1d(min, max) tplot_opt_glob[] = [min, max] return
This function will set the x axis range for all time series plots Parameters: min : flt The time to start all time series plots. Can be given in seconds since epoch, or as a string in the format "YYYY-MM-DD HH:MM:SS" max : flt The time to end all time series plots. Can be given in seconds since epoch, or as a string in the format "YYYY-MM-DD HH:MM:SS" Returns: None Examples: >>> # Set the timespan to be 2017-07-17 00:00:00 plus 1 day >>> import pytplot >>> pytplot.xlim(1500249600, 1500249600 + 86400) >>> # The same as above, but using different inputs >>> pytplot.xlim("2017-07-17 00:00:00", "2017-07-18 00:00:00")
380,476
def memoize_single_arg(f): memo = {} @wraps(f) def memoized_f(arg): try: return memo[arg] except KeyError: result = memo[arg] = f(arg) return result return memoized_f
Decorator memoizing a single-argument function
380,477
def route(self, uri, *args, **kwargs): if len(args) == 0 and callable(uri): raise RuntimeError("Cannot use the @route decorator without " "arguments.") kwargs.setdefault(, frozenset({})) kwargs.setdefault(, None) kwargs.setdefault(, False) kwargs.setdefault(, False) kwargs.setdefault(, None) kwargs[] = True def wrapper(handler_f): nonlocal self, uri, args, kwargs return super(Contextualize, self).route( uri, *args, **kwargs)(handler_f) return wrapper
Create a plugin route from a decorated function. :param uri: endpoint at which the route will be accessible. :type uri: str :param args: captures all of the positional arguments passed in :type args: tuple(Any) :param kwargs: captures the keyword arguments passed in :type kwargs: dict(Any) :return: The exception function to use as the decorator :rtype: fn
380,478
def variantcall_sample(data, region=None, align_bams=None, out_file=None): if out_file is None or not os.path.exists(out_file) or not os.path.lexists(out_file): utils.safe_makedir(os.path.dirname(out_file)) ref_file = dd.get_ref_file(data) config = data["config"] caller_fns = get_variantcallers() caller_fn = caller_fns[config["algorithm"].get("variantcaller")] if len(align_bams) == 1: items = [data] else: items = multi.get_orig_items(data) assert len(items) == len(align_bams) assoc_files = tz.get_in(("genome_resources", "variation"), data, {}) if not assoc_files: assoc_files = {} for bam_file in align_bams: bam.index(bam_file, data["config"], check_timestamp=False) out_file = caller_fn(align_bams, items, ref_file, assoc_files, region, out_file) if region: data["region"] = region data["vrn_file"] = out_file return [data]
Parallel entry point for doing genotyping of a region of a sample.
380,479
def expanduser(self, filepath, ssh=False): if ssh: self._check_ssh() stdin, stdout, stderr = self.ssh.exec_command("cd; pwd") stdin.close() remotepath = filepath.replace("~", stdout.read().split()[0]) return self._get_tramp_path(remotepath) else: return os.path.expanduser(filepath)
Replaces the user root ~ with the full path on the file system. Works for local disks and remote servers. For remote servers, set ssh=True.
380,480
def GetValues(self, table_names, column_names, condition): if not self._connection: raise RuntimeError() if condition: condition = .format(condition) sql_query = .format( .join(table_names), .join(column_names), condition) self._cursor.execute(sql_query) for row in self._cursor: yield { column_name: row[column_index] for column_index, column_name in enumerate(column_names)}
Retrieves values from a table. Args: table_names (list[str]): table names. column_names (list[str]): column names. condition (str): query condition such as "log_source == 'Application Error'". Yields: sqlite3.row: row. Raises: RuntimeError: if the database is not opened.
380,481
def top_level(self): output = {} if isinstance(self.obj, dict): for name, item in self.obj.items(): if isinstance(item, dict): if item: output[name] = StrReprWrapper() else: output[name] = StrReprWrapper() elif isinstance(item, list): if item: output[name] = StrReprWrapper() else: output[name] = StrReprWrapper() else: output[name] = item return output else: return self.obj
Print just the top level of an object, being sure to show where it goes deeper
380,482
def _initialize_policy(self): with tf.device( if self._use_gpu else ): network = functools.partial( self._config.network, self._config, self._batch_env.action_space) self._network = tf.make_template(, network) output = self._network( tf.zeros_like(self._batch_env.observ)[:, None], tf.ones(len(self._batch_env))) if output.policy.event_shape != self._batch_env.action.shape[1:]: message = message = message.format( output.policy.event_shape, self._batch_env.action.shape[1:]) raise ValueError(message) self._policy_type = type(output.policy) is_tensor = lambda x: isinstance(x, tf.Tensor) policy_params = tools.nested.filter(is_tensor, output.policy.parameters) set_batch_dim = lambda x: utility.set_dimension(x, 0, len(self._batch_env)) tools.nested.map(set_batch_dim, policy_params) if output.state is not None: tools.nested.map(set_batch_dim, output.state) return policy_params, output.state
Initialize the policy. Run the policy network on dummy data to initialize its parameters for later reuse and to analyze the policy distribution. Initializes the attributes `self._network` and `self._policy_type`. Raises: ValueError: Invalid policy distribution. Returns: Parameters of the policy distribution and policy state.
380,483
def __get_blob_dimensions(self, chunk_dim): if self.selection_shape[self.freq_axis] > chunk_dim[self.freq_axis]*MAX_BLOB_MB: freq_axis_size = self.selection_shape[self.freq_axis] time_axis_size = 1 else: freq_axis_size = self.selection_shape[self.freq_axis] time_axis_size = np.min([chunk_dim[self.time_axis] * MAX_BLOB_MB * chunk_dim[self.freq_axis] / freq_axis_size, self.selection_shape[self.time_axis]]) blob_dim = (int(time_axis_size), 1, freq_axis_size) return blob_dim
Sets the blob dimmentions, trying to read around 1024 MiB at a time. This is assuming a chunk is about 1 MiB.
380,484
async def wait_until_serving(self) -> None: await asyncio.gather( self._receiving_loop_running.wait(), self._internal_loop_running.wait(), loop=self.event_loop )
Await until the ``Endpoint`` is ready to receive events.
380,485
def is_valid(self): form = super(AuthenticateForm, self).is_valid() for f, error in self.errors.items(): if f != "__all__": self.fields[f].widget.attrs.update({"class": "error", "placeholder": ", ".join(list(error))}) else: errors = list(error) if "This account is inactive." in errors: message = "Intranet access restricted" else: message = "Invalid password" self.fields["password"].widget.attrs.update({"class": "error", "placeholder": message}) return form
Validates the username and password in the form.
380,486
def delete(method, hmc, uri, uri_parms, logon_required): try: resource = hmc.lookup_by_uri(uri) except KeyError: raise InvalidResourceError(method, uri) resource.manager.remove(resource.oid)
Operation: Delete <resource>.
380,487
def cmd_link_list(self): print("%u links" % len(self.mpstate.mav_master)) for i in range(len(self.mpstate.mav_master)): conn = self.mpstate.mav_master[i] print("%u: %s" % (i, conn.address))
list links
380,488
def plot(self, value=None, pixel=None): import ugali.utils.plotting map_roi = np.array(hp.UNSEEN \ * np.ones(hp.nside2npix(self.config.params[][]))) if value is None: map_roi[self.pixels] = 1 map_roi[self.pixels_annulus] = 0 map_roi[self.pixels_target] = 2 elif value is not None and pixel is None: map_roi[self.pixels] = value elif value is not None and pixel is not None: map_roi[pixel] = value else: logger.error("CanRegion of Interestcoordsroi_radius'])
Plot the ROI
380,489
def staff_member(view_func): @functools.wraps(view_func, assigned=available_attrs(view_func)) def dec(request, *args, **kwargs): if request.user.is_staff: return view_func(request, *args, **kwargs) raise PermissionDenied(_("You haven't permissions to do this action.")) return dec
Performs user authentication check. Similar to Django's `login_required` decorator, except that this throws :exc:`~leonardo.exceptions.NotAuthenticated` exception if the user is not signed-in.
380,490
def _get_available_choices(self, queryset, value): item = queryset.filter(pk=value).first() if item: try: pk = getattr(item, self.chained_model_field + "_id") filter = {self.chained_model_field: pk} except AttributeError: try: pks = getattr(item, self.chained_model_field).all().values_list(, flat=True) filter = {self.chained_model_field + "__in": pks} except AttributeError: try: pks = getattr(item, self.chained_model_field + "_set").all().values_list(, flat=True) filter = {self.chained_model_field + "__in": pks} except AttributeError: filter = {} filtered = list(get_model(self.to_app_name, self.to_model_name).objects.filter(**filter).distinct()) if self.sort: sort_results(filtered) else: filtered = [] return filtered
get possible choices for selection
380,491
def update_abbreviations(apps, schema_editor): Group = apps.get_model("representatives", "Group") amap = { u: u, u: u, u: u, u: u, u: u, u: u, u: u, u: u, u: u, u: u, u: u, u: u, u: u, u: u, u: u, u: u, u: u, u: u, u: u, } for old, new in amap.iteritems(): for g in Group.objects.filter(abbreviation=old): g.abbreviation = new g.save()
Migrate to new FR committee abbreviations
380,492
def to_end_tag(self, tag_func): @wraps(tag_func) def tag_wrapper(parser, token): class ToEndTagNode(template.Node): def __init__(self): end_name = "end%s" % tag_func.__name__ self.nodelist = parser.parse((end_name,)) parser.delete_first_token() def render(self, context): args = (self.nodelist.render(context), context, token) return tag_func(*args[:tag_func.__code__.co_argcount]) return ToEndTagNode() return self.tag(tag_wrapper)
Creates a tag that parses until it finds the corresponding end tag, eg: for a tag named ``mytag`` it will parse until ``endmytag``. The decorated func's return value is used to render the parsed content and takes three arguments - the parsed content between the start and end tags, the template context and the tag token.
380,493
def QueryHowDoI(Query, num_answers, full_text, window:sg.Window): Query howdoi_command = HOW_DO_I_COMMAND full_text_option = if full_text else t = subprocess.Popen(howdoi_command + + Query + + str(num_answers)+full_text_option, stdout=subprocess.PIPE) (output, err) = t.communicate() window.Element().Update(.format(Query.rstrip()), append=True) window.Element().Update(*60, append=True) window.Element().Update(output.decode("utf-8"), append=True) exit_code = t.wait()
Kicks off a subprocess to send the 'Query' to HowDoI Prints the result, which in this program will route to a gooeyGUI window :param Query: text english question to ask the HowDoI web engine :return: nothing
380,494
def main(): arguments = IArguments(__doc__) content = open(arguments.filepath).read() open(arguments.filepath + ".bak", "w").write(content) try: newcontent = transliterate(content) write_newcontent(arguments.filepath, newcontent) except UnicodeEncodeError as ex: console(str(ex), color="red") newcontent = forceascii(content) write_newcontent(arguments.filepath, newcontent)
main
380,495
def _rename_full_name(self, full_name, other_trajectory, used_runs=None, new_run_idx=None): split_name = full_name.split() for idx, name in enumerate(split_name): if name in other_trajectory._reversed_wildcards: run_indices, wildcards = other_trajectory._reversed_wildcards[name] if new_run_idx is None: run_idx = None for run_jdx in run_indices: if run_jdx in used_runs: run_idx = used_runs[run_jdx] break elif run_jdx == -1: run_idx = -1 break if run_idx is None: raise RuntimeError() else: run_idx = new_run_idx new_name = self.f_wildcard(wildcards[0], run_idx) split_name[idx] = new_name full_name = .join(split_name) return full_name
Renames a full name based on the wildcards and a particular run
380,496
def get_dialog(self): dialog = self.parent() while not isinstance(dialog, QDialog): dialog = dialog.parent() return dialog
Return FormDialog instance
380,497
def layout_circle(self): n = self.num_vertices() t = np.linspace(0, 2*np.pi, n+1)[:n] return np.column_stack((np.cos(t), np.sin(t)))
Position vertices evenly around a circle.
380,498
def validate(raw_schema, target=None, **kwargs): schema = schema_validator(raw_schema, **kwargs) if target is not None: validate_object(target, schema=schema, **kwargs)
Given the python representation of a JSONschema as defined in the swagger spec, validate that the schema complies to spec. If `target` is provided, that target will be validated against the provided schema.
380,499
async def sendPhoto(self, chat_id, photo, caption=None, parse_mode=None, disable_notification=None, reply_to_message_id=None, reply_markup=None): p = _strip(locals(), more=[]) return await self._api_request_with_file(, _rectify(p), , photo)
See: https://core.telegram.org/bots/api#sendphoto :param photo: - string: ``file_id`` for a photo existing on Telegram servers - string: HTTP URL of a photo from the Internet - file-like object: obtained by ``open(path, 'rb')`` - tuple: (filename, file-like object). If the filename contains non-ASCII characters and you are using Python 2.7, make sure the filename is a unicode string.