Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
15,900
def threshold_otsu(image, multiplier=1.0): otsu_value = skimage.filters.threshold_otsu(image) return image > otsu_value * multiplier
Return image thresholded using Otsu's method.
15,901
def has_changed_since_last_deploy(file_path, bucket): msg = "Checking if {0} has changed since last deploy.".format(file_path) logger.debug(msg) with open(file_path) as f: data = f.read() file_md5 = hashlib.md5(data.encode()).hexdigest() logger.debug("file_md5 is {0}".format(file_md5)) key = bucket.get_key(file_path) logger.debug("key_md5 is {0}".format(key_md5)) else: logger.debug("File does not exist in bucket") return True if file_md5 == key_md5: logger.debug("File has not changed.") return False logger.debug("File has changed.") return True
Checks if a file has changed since the last time it was deployed. :param file_path: Path to file which should be checked. Should be relative from root of bucket. :param bucket_name: Name of S3 bucket to check against. :returns: True if the file has changed, else False.
15,902
def mmGetMetricSequencesPredictedActiveCellsShared(self): self._mmComputeTransitionTraces() numSequencesForCell = defaultdict(lambda: 0) for predictedActiveCells in ( self._mmData["predictedActiveCellsForSequence"].values()): for cell in predictedActiveCells: numSequencesForCell[cell] += 1 return Metric(self, " numSequencesForCell.values())
Metric for number of sequences each predicted => active cell appears in Note: This metric is flawed when it comes to high-order sequences. @return (Metric) metric
15,903
def latex(self): s = ( ) if len(self.authors) > 1: authors = .join([str(a.given_name) + + str(a.surname) for a in self.authors[0:-1]]) authors += ( + str(self.authors[-1].given_name) + + str(self.authors[-1].surname)) else: a = self.authors[0] authors = str(a.given_name) + + str(a.surname) title = self.title journal = self.publicationName volume = self.volume issue = self.issueIdentifier if volume and issue: volissue = .format(volume, issue) elif volume: volissue = .format(volume) else: volissue = date = self.coverDate if self.pageRange: pages = .format(self.pageRange) elif self.startingPage: pages = .format(self) elif self.article_number: pages = .format(self) else: pages = doi = .format(self.doi) scopus_url = .format(self.scopus_url, self.eid) return s.format(**locals())
Return LaTeX representation of the abstract.
15,904
def get_default_config(self): config = super(LibratoHandler, self).get_default_config() config.update({ : , : , : False, : 300, : 60, : [], }) return config
Return the default config for the handler
15,905
def QCapsulate(self, widget, name, blocking = False, nude = False): class QuickWindow(QtWidgets.QMainWindow): class Signals(QtCore.QObject): close = QtCore.Signal() show = QtCore.Signal() def __init__(self, blocking = False, parent = None, nude = False): super().__init__(parent) self.propagate = True self.setStyleSheet(style.main_gui) if (blocking): self.setWindowModality(QtCore.Qt.ApplicationModal) if (nude): self.setWindowFlags(QtCore.Qt.Dialog) self.signals = self.Signals() def closeEvent(self, e): if (self.propagate): self.signals.close.emit() e.accept() def showEvent(self, e): if (self.propagate): self.signals.show.emit() e.accept() def setPropagate(self): self.propagate = True def unSetPropagate(self): self.propagate = False win = QuickWindow(blocking = blocking, nude = nude) win.setCentralWidget(widget) win.setLayout(QtWidgets.QHBoxLayout()) win.setWindowTitle(name) return win
Helper function that encapsulates QWidget into a QMainWindow
15,906
def rand_article(num_p=(4, 10), num_s=(2, 15), num_w=(5, 40)): article = list() for _ in range(random.randint(*num_p)): p = list() for _ in range(random.randint(*num_s)): s = list() for _ in range(random.randint(*num_w)): s.append( rand_str(random.randint(1, 15), string.ascii_lowercase)) p.append(" ".join(s)) article.append(". ".join(p)) return "\n\n".join(article)
Random article text. Example:: >>> rand_article() ...
15,907
def main(): if in sys.argv: print(main.__doc__) sys.exit() if len(sys.argv) <= 1: print(main.__doc__) print() sys.exit() FIG = {} FIG[] = 1 pmagplotlib.plot_init(FIG[], 6, 6) norm = 1 in_file = pmag.get_named_arg("-f", "measurements.txt") dir_path = pmag.get_named_arg("-WD", ".") in_file = pmag.resolve_file_name(in_file, dir_path) data_model = pmag.get_named_arg("-DM", 3) data_model = int(float(data_model)) fmt = pmag.get_named_arg("-fmt", "svg") if in sys.argv: norm = 0 DIMs, Temps = [], [] for dat in specdata: DIMs.append([float(dat[dec_col]), float( dat[inc_col]), float(dat[moment_col])]) Temps.append(float(dat[temp_col])-273.) carts = pmag.dir2cart(DIMs).transpose() if norm == 1: nrm = (DIMs[0][2]) ylab = "M/M_o" else: nrm = 1.
NAME lowrie_magic.py DESCRIPTION plots intensity decay curves for Lowrie experiments SYNTAX lowrie_magic.py -h [command line options] INPUT takes measurements formatted input files OPTIONS -h prints help message and quits -f FILE: specify input file, default is magic_measurements.txt -N do not normalize by maximum magnetization -fmt [svg, pdf, eps, png] specify fmt, default is svg -sav saves plots and quits -DM [2, 3] MagIC data model number
15,908
def path_join(*args): args = (paramiko.py3compat.u(arg) for arg in args) return os.path.join(*args)
Wrapper around `os.path.join`. Makes sure to join paths of the same type (bytes).
15,909
def snr_from_loglr(loglr): singleval = isinstance(loglr, float) if singleval: loglr = numpy.array([loglr]) numpysettings = numpy.seterr(invalid=) snrs = numpy.sqrt(2*loglr) numpy.seterr(**numpysettings) snrs[numpy.isnan(snrs)] = 0. if singleval: snrs = snrs[0] return snrs
Returns SNR computed from the given log likelihood ratio(s). This is defined as `sqrt(2*loglr)`.If the log likelihood ratio is < 0, returns 0. Parameters ---------- loglr : array or float The log likelihood ratio(s) to evaluate. Returns ------- array or float The SNRs computed from the log likelihood ratios.
15,910
def _infer_sig_len(file_name, fmt, n_sig, dir_name, pb_dir=None): if pb_dir is None: file_size = os.path.getsize(os.path.join(dir_name, file_name)) else: file_size = download._remote_file_size(file_name=file_name, pb_dir=pb_dir) sig_len = int(file_size / (BYTES_PER_SAMPLE[fmt] * n_sig)) return sig_len
Infer the length of a signal from a dat file. Parameters ---------- file_name : str Name of the dat file fmt : str WFDB fmt of the dat file n_sig : int Number of signals contained in the dat file Notes ----- sig_len * n_sig * bytes_per_sample == file_size
15,911
def expect_bounded(__funcname=_qualified_name, **named): def _make_bounded_check(bounds): (lower, upper) = bounds if lower is None: def should_fail(value): return value > upper predicate_descr = "less than or equal to " + str(upper) elif upper is None: def should_fail(value): return value < lower predicate_descr = "greater than or equal to " + str(lower) else: def should_fail(value): return not (lower <= value <= upper) predicate_descr = "inclusively between %s and %s" % bounds template = ( "%(funcname)s() expected a value {predicate}" " for argument , but got %(actual)s instead." ).format(predicate=predicate_descr) return make_check( exc_type=ValueError, template=template, pred=should_fail, actual=repr, funcname=__funcname, ) return _expect_bounded(_make_bounded_check, __funcname=__funcname, **named)
Preprocessing decorator verifying that inputs fall INCLUSIVELY between bounds. Bounds should be passed as a pair of ``(min_value, max_value)``. ``None`` may be passed as ``min_value`` or ``max_value`` to signify that the input is only bounded above or below. Examples -------- >>> @expect_bounded(x=(1, 5)) ... def foo(x): ... return x + 1 ... >>> foo(1) 2 >>> foo(5) 6 >>> foo(6) # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS Traceback (most recent call last): ... ValueError: ...foo() expected a value inclusively between 1 and 5 for argument 'x', but got 6 instead. >>> @expect_bounded(x=(2, None)) ... def foo(x): ... return x ... >>> foo(100000) 100000 >>> foo(1) # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS Traceback (most recent call last): ... ValueError: ...foo() expected a value greater than or equal to 2 for argument 'x', but got 1 instead. >>> @expect_bounded(x=(None, 5)) ... def foo(x): ... return x ... >>> foo(6) # doctest: +NORMALIZE_WHITESPACE +ELLIPSIS Traceback (most recent call last): ... ValueError: ...foo() expected a value less than or equal to 5 for argument 'x', but got 6 instead.
15,912
def _parse_area(self, area_xml): area = Area(self._lutron, name=area_xml.get(), integration_id=int(area_xml.get()), occupancy_group_id=area_xml.get()) for output_xml in area_xml.find(): output = self._parse_output(output_xml) area.add_output(output) for device_group in area_xml.find(): if device_group.tag == : devs = device_group.find() elif device_group.tag == : devs = [device_group] else: _LOGGER.info("Unknown tag in DeviceGroups child %s" % devs) devs = [] for device_xml in devs: if device_xml.tag != : continue if device_xml.get() in ( , , , , ): keypad = self._parse_keypad(device_xml) area.add_keypad(keypad) elif device_xml.get() == : motion_sensor = self._parse_motion_sensor(device_xml) area.add_sensor(motion_sensor) return area
Parses an Area tag, which is effectively a room, depending on how the Lutron controller programming was done.
15,913
def update_firmware(self, filename, component_type): fw_img_processor = firmware_controller.FirmwareImageUploader(filename) LOG.debug(self._(), filename) cookie = fw_img_processor.upload_file_to((self.host, self.port), self.timeout) LOG.debug(self._(), filename) root = self._get_firmware_update_xml_for_file_and_component( filename, component_type) element = root.find() etree.SubElement(element, , VALUE=) extra_headers = {: cookie} LOG.debug(self._(), filename) d = self._request_ilo(root, extra_headers=extra_headers) common.wait_for_ribcl_firmware_update_to_complete(self) self._parse_output(d) LOG.info(self._(), filename)
Updates the given firmware on the server for the given component. :param filename: location of the raw firmware file. Extraction of the firmware file (if in compact format) is expected to happen prior to this invocation. :param component_type: Type of component to be applied to. :raises: InvalidInputError, if the validation of the input fails :raises: IloError, on an error from iLO :raises: IloConnectionError, if not able to reach iLO. :raises: IloCommandNotSupportedError, if the command is not supported on the server
15,914
def read_file_snippets(file, snippet_store): start_reg = re.compile("(.*%%SNIPPET_START%% )([a-zA-Z0-9]+)") end_reg = re.compile("(.*%%SNIPPET_END%% )([a-zA-Z0-9]+)") open_snippets = {} with open(file, encoding="utf-8") as w: lines = w.readlines() for line in lines: printd("Got Line: {}".format(line)) for snippet in open_snippets.values(): printd("Adding Line to snippet") snippet.append(line) for opened in open_snippets: record_error("Snippet {} left open - ignoring".format(opened))
Parse a file and add all snippets to the snippet_store dictionary
15,915
def write_timestamp(self, t, pack=Struct().pack): self._output_buffer.extend(pack(long(timegm(t.timetuple())))) return self
Write out a Python datetime.datetime object as a 64-bit integer representing seconds since the Unix UTC epoch.
15,916
def read_hypergraph(string): hgr = hypergraph() dom = parseString(string) for each_node in dom.getElementsByTagName("node"): hgr.add_node(each_node.getAttribute()) for each_node in dom.getElementsByTagName("hyperedge"): hgr.add_hyperedge(each_node.getAttribute()) dom = parseString(string) for each_node in dom.getElementsByTagName("node"): for each_edge in each_node.getElementsByTagName("link"): hgr.link(str(each_node.getAttribute()), str(each_edge.getAttribute())) return hgr
Read a graph from a XML document. Nodes and hyperedges specified in the input will be added to the current graph. @type string: string @param string: Input string in XML format specifying a graph. @rtype: hypergraph @return: Hypergraph
15,917
def _to_list(obj): ret = {} for attr in __attrs: if hasattr(obj, attr): ret[attr] = getattr(obj, attr) return ret
Convert snetinfo object to list
15,918
def determine_inside_container(self): tokenum, value = self.current.tokenum, self.current.value ending_container = False starting_container = False if tokenum == OP: self.containers.pop() ending_container = True self.just_ended_container = not len(self.containers) and ending_container self.just_started_container = len(self.containers) == 1 and starting_container self.in_container = len(self.containers) or self.just_ended_container or self.just_started_container
Set self.in_container if we're inside a container * Inside container * Current token starts a new container * Current token ends all containers
15,919
def show_bare_metal_state_output_bare_metal_state(self, **kwargs): config = ET.Element("config") show_bare_metal_state = ET.Element("show_bare_metal_state") config = show_bare_metal_state output = ET.SubElement(show_bare_metal_state, "output") bare_metal_state = ET.SubElement(output, "bare-metal-state") bare_metal_state.text = kwargs.pop() callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code
15,920
def get_all_responses(self, service_name, receive_timeout_in_seconds=None): handler = self._get_handler(service_name) return handler.get_all_responses(receive_timeout_in_seconds)
Receive all available responses from the service as a generator. :param service_name: The name of the service from which to receive responses :type service_name: union[str, unicode] :param receive_timeout_in_seconds: How long to block without receiving a message before raising `MessageReceiveTimeout` (defaults to five seconds unless the settings are otherwise). :type receive_timeout_in_seconds: int :return: A generator that yields (request ID, job response) :rtype: generator :raise: ConnectionError, MessageReceiveError, MessageReceiveTimeout, InvalidMessage, StopIteration
15,921
async def create_new_sticker_set(self, user_id: base.Integer, name: base.String, title: base.String, png_sticker: typing.Union[base.InputFile, base.String], emojis: base.String, contains_masks: typing.Union[base.Boolean, None] = None, mask_position: typing.Union[types.MaskPosition, None] = None) -> base.Boolean: mask_position = prepare_arg(mask_position) payload = generate_payload(**locals(), exclude=[]) files = {} prepare_file(payload, files, , png_sticker) result = await self.request(api.Methods.CREATE_NEW_STICKER_SET, payload, files) return result
Use this method to create new sticker set owned by a user. The bot will be able to edit the created sticker set. Source: https://core.telegram.org/bots/api#createnewstickerset :param user_id: User identifier of created sticker set owner :type user_id: :obj:`base.Integer` :param name: Short name of sticker set, to be used in t.me/addstickers/ URLs (e.g., animals) :type name: :obj:`base.String` :param title: Sticker set title, 1-64 characters :type title: :obj:`base.String` :param png_sticker: Png image with the sticker, must be up to 512 kilobytes in size, dimensions must not exceed 512px, and either width or height must be exactly 512px. :type png_sticker: :obj:`typing.Union[base.InputFile, base.String]` :param emojis: One or more emoji corresponding to the sticker :type emojis: :obj:`base.String` :param contains_masks: Pass True, if a set of mask stickers should be created :type contains_masks: :obj:`typing.Union[base.Boolean, None]` :param mask_position: A JSON-serialized object for position where the mask should be placed on faces :type mask_position: :obj:`typing.Union[types.MaskPosition, None]` :return: Returns True on success :rtype: :obj:`base.Boolean`
15,922
def visit_repr(self, node, parent): newnode = nodes.Repr(node.lineno, node.col_offset, parent) newnode.postinit(self.visit(node.value, newnode)) return newnode
visit a Backquote node by returning a fresh instance of it
15,923
def orderrun_detail(dk_api, kitchen, pd): if DKCloudCommandRunner.SUMMARY in pd: display_summary = True else: display_summary = False pd[DKCloudCommandRunner.SUMMARY] = True rc = dk_api.orderrun_detail(kitchen, pd) s = if not rc.ok() or not isinstance(rc.get_payload(), list): s = % rc.get_message() rc.set_message(s) return rc serving_list = rc.get_payload() serving = None if DKCloudCommandRunner.ORDER_RUN_ID in pd: order_run_id = pd[DKCloudCommandRunner.ORDER_RUN_ID] for serv in serving_list: if serv[DKCloudCommandRunner.ORDER_RUN_ID] == order_run_id: serving = serv break elif DKCloudCommandRunner.ORDER_ID in pd: order_id = pd[DKCloudCommandRunner.ORDER_ID] for serv in serving_list: if serv[DKCloudCommandRunner.ORDER_ID] == order_id: serving = serv break else: dex = -1 latest = None for i, serving in enumerate(serving_list): if DKCloudCommandRunner.ORDER_ID in serving and serving[DKCloudCommandRunner.ORDER_ID] > latest: latest = serving[DKCloudCommandRunner.ORDER_ID] dex = i if dex != -1: serving = serving_list[dex] if serving is None: rc.set(rc.DK_FAIL, "No OrderRun information. Try using to see what is available." % kitchen) return rc if serving and display_summary: s += summary = None if DKCloudCommandRunner.SUMMARY in serving: summary = serving[DKCloudCommandRunner.SUMMARY] pass s += % serving[DKCloudCommandRunner.ORDER_ID] orid_from_serving = serving[DKCloudCommandRunner.ORDER_RUN_ID] s += % orid_from_serving s += % serving[] s += % kitchen if summary and in summary: s += % summary[] else: s += % s += % orid_from_serving.split()[3] if summary and in summary: start_time = summary[] if isinstance(start_time, basestring): s += % summary[].split()[0] else: s += % else: s += % run_time = None if summary and in summary: run_time = summary[] if isinstance(run_time, basestring): s += serving[DKCloudCommandRunner.ORDER_RUN_ID] rc.set_message(s) return rc
returns a string. :param dk_api: -- api object :param kitchen: string :param pd: dict :rtype: DKReturnCode
15,924
def get_txn_outputs(raw_tx_hex, output_addr_list, coin_symbol): s expected of it. Must supply a list of output addresses so that the library can try to convert from script to address using both pubkey and script. Returns a list of the following form: [{: 12345, : }, ...] Uses @vbuterin err_msg = % coin_symbol assert lib_can_deserialize_cs(coin_symbol), err_msg assert isinstance(output_addr_list, (list, tuple)) for output_addr in output_addr_list: assert is_valid_address(output_addr), output_addr output_addr_set = set(output_addr_list) outputs = [] deserialized_tx = deserialize(str(raw_tx_hex)) for out in deserialized_tx.get(, []): output = {: out[]} pubkey_addr = script_to_address(out[], vbyte=COIN_SYMBOL_MAPPINGS[coin_symbol][]) script_addr = script_to_address(out[], vbyte=COIN_SYMBOL_MAPPINGS[coin_symbol][]) nulldata = out[] if out[][0:2] == else None if pubkey_addr in output_addr_set: address = pubkey_addr output[] = address elif script_addr in output_addr_set: address = script_addr output[] = address elif nulldata: output[] = nulldata output[] = else: raise Exception( % ( out[], output_addr_set, )) outputs.append(output) return outputs
Used to verify a transaction hex does what's expected of it. Must supply a list of output addresses so that the library can try to convert from script to address using both pubkey and script. Returns a list of the following form: [{'value': 12345, 'address': '1abc...'}, ...] Uses @vbuterin's decoding methods.
15,925
def bootstrapSampleFromData(data,weights=None,seed=0): RNG = np.random.RandomState(seed) N = data.shape[0] if weights is not None: cutoffs = np.cumsum(weights) else: cutoffs = np.linspace(0,1,N) indices = np.searchsorted(cutoffs,RNG.uniform(size=N)) new_data = deepcopy(data[indices,]) return new_data
Samples rows from the input array of data, generating a new data array with an equal number of rows (records). Rows are drawn with equal probability by default, but probabilities can be specified with weights (must sum to 1). Parameters ---------- data : np.array An array of data, with each row representing a record. weights : np.array A weighting array with length equal to data.shape[0]. seed : int A seed for the random number generator. Returns ------- new_data : np.array A resampled version of input data.
15,926
def __error_middleware(self, res, res_json): if(res.status_code in [400, 401, 402, 403, 404, 405, 406, 409]): err_dict = res_json.get(, {}) raise UpCloudAPIError(error_code=err_dict.get(), error_message=err_dict.get()) return res_json
Middleware that raises an exception when HTTP statuscode is an error code.
15,927
def mean(self, axis=None, keepdims=False): return self._stat(axis, name=, keepdims=keepdims)
Return the mean of the array over the given axis. Parameters ---------- axis : tuple or int, optional, default=None Axis to compute statistic over, if None will compute over all axes keepdims : boolean, optional, default=False Keep axis remaining after operation with size 1.
15,928
def parse_vote_data(self, vote_data): if not in vote_data.keys(): logger.debug(, vote_data[]) return dossier_pk = self.get_dossier(vote_data[]) if not dossier_pk: logger.debug(, vote_data[]) return return self.parse_proposal_data( proposal_data=vote_data, dossier_pk=dossier_pk )
Parse data from parltrack votes db dumps (1 proposal)
15,929
def parse_buffer(buffer, mode="exec", flags=[], version=None, engine=None): if version is None: version = sys.version_info[0:2] if engine is None: engine = pythonparser_diagnostic.Engine() lexer = pythonparser_lexer.Lexer(buffer, version, engine) if mode in ("single", "eval"): lexer.interactive = True parser = pythonparser_parser.Parser(lexer, version, engine) parser.add_flags(flags) if mode == "exec": return parser.file_input(), lexer.comments elif mode == "single": return parser.single_input(), lexer.comments elif mode == "eval": return parser.eval_input(), lexer.comments
Like :meth:`parse`, but accepts a :class:`source.Buffer` instead of source and filename, and returns comments as well. :see: :meth:`parse` :return: (:class:`ast.AST`, list of :class:`source.Comment`) Abstract syntax tree and comments
15,930
def restore(file_name, jail=None, chroot=None, root=None): *** return __salt__[]( _pkg(jail, chroot, root) + [, , file_name], output_loglevel=, python_shell=False )
Reads archive created by pkg backup -d and recreates the database. CLI Example: .. code-block:: bash salt '*' pkg.restore /tmp/pkg jail Restore database to the specified jail. Note that this will run the command within the jail, and so the path to the file from which the pkg database will be restored is relative to the root of the jail. CLI Example: .. code-block:: bash salt '*' pkg.restore /tmp/pkg jail=<jail name or id> chroot Restore database to the specified chroot (ignored if ``jail`` is specified). Note that this will run the command within the chroot, and so the path to the file from which the pkg database will be restored is relative to the root of the chroot. root Restore database to the specified root (ignored if ``jail`` is specified). Note that this will run the command within the root, and so the path to the file from which the pkg database will be restored is relative to the root of the root. CLI Example: .. code-block:: bash salt '*' pkg.restore /tmp/pkg chroot=/path/to/chroot
15,931
def contour(self, win, ngr=20, layers=0, levels=20, layout=True, labels=True, decimals=0, color=None, newfig=True, figsize=None, legend=True): x1, x2, y1, y2 = win if np.isscalar(ngr): nx = ny = ngr else: nx, ny = ngr layers = np.atleast_1d(layers) xg = np.linspace(x1, x2, nx) yg = np.linspace(y1, y2, ny) h = self.headgrid(xg, yg, layers) if newfig: plt.figure(figsize=figsize) if color is None: c = plt.rcParams[].by_key()[] elif type(color) is str: c = len(layers) * [color] elif type(color) is list: c = color if len(c) < len(layers): n = np.ceil(self.aq.naq / len(c)) c = n * c cscollectionlist = [] for i in range(len(layers)): cs = plt.contour(xg, yg, h[i], levels, colors=c[i]) cscollectionlist.append(cs.collections[0]) if labels: fmt = + str(decimals) + plt.clabel(cs, fmt=fmt) if type(legend) is list: plt.legend(cscollectionlist, legend) elif legend: legendlist = [ + str(i) for i in layers] plt.legend(cscollectionlist, legendlist) plt.axis() if layout: self.plot(win=[x1, x2, y1, y2], newfig=False)
Contour plot Parameters ---------- win : list or tuple [x1, x2, y1, y2] ngr : scalar, tuple or list if scalar: number of grid points in x and y direction if tuple or list: nx, ny, number of grid points in x and y direction layers : integer, list or array layers for which grid is returned levels : integer or array (default 20) levels that are contoured layout : boolean (default True) plot layout of elements labels : boolean (default True) print labels along contours decimals : integer (default 0) number of decimals of labels along contours color : str or list of strings color of contour lines newfig : boolean (default True) create new figure figsize : tuple of 2 values (default is mpl default) size of figure legend : list or boolean (default True) add legend to figure if list of strings: use strings as names in legend
15,932
def get_value(file, element): * try: root = ET.parse(file) element = root.find(element) return element.text except AttributeError: log.error("Unable to find element matching %s", element) return False
Returns the value of the matched xpath element CLI Example: .. code-block:: bash salt '*' xml.get_value /tmp/test.xml ".//element"
15,933
def prepare_attrib_mapping(self, primitive): buffer_info = [] for name, accessor in primitive.attributes.items(): info = VBOInfo(*accessor.info()) info.attributes.append((name, info.components)) if buffer_info and buffer_info[-1].buffer_view == info.buffer_view: if buffer_info[-1].interleaves(info): buffer_info[-1].merge(info) continue buffer_info.append(info) return buffer_info
Pre-parse buffer mappings for each VBO to detect interleaved data for a primitive
15,934
def optimize(self, optimizer=None, start=None, messages=False, max_iters=1000, ipython_notebook=True, clear_after_finish=False, **kwargs): if self.is_fixed or self.size == 0: print() return if not self.update_model(): print("updates were off, setting updates on again") self.update_model(True) if start is None: start = self.optimizer_array if optimizer is None: optimizer = self.preferred_optimizer if isinstance(optimizer, optimization.Optimizer): opt = optimizer opt.model = self else: optimizer = optimization.get_optimizer(optimizer) opt = optimizer(max_iters=max_iters, **kwargs) with VerboseOptimization(self, opt, maxiters=max_iters, verbose=messages, ipython_notebook=ipython_notebook, clear_after_finish=clear_after_finish) as vo: opt.run(start, f_fp=self._objective_grads, f=self._objective, fp=self._grads) self.optimizer_array = opt.x_opt self.optimization_runs.append(opt) return opt
Optimize the model using self.log_likelihood and self.log_likelihood_gradient, as well as self.priors. kwargs are passed to the optimizer. They can be: :param max_iters: maximum number of function evaluations :type max_iters: int :messages: True: Display messages during optimisation, "ipython_notebook": :type messages: bool"string :param optimizer: which optimizer to use (defaults to self.preferred optimizer) :type optimizer: string Valid optimizers are: - 'scg': scaled conjugate gradient method, recommended for stability. See also GPy.inference.optimization.scg - 'fmin_tnc': truncated Newton method (see scipy.optimize.fmin_tnc) - 'simplex': the Nelder-Mead simplex method (see scipy.optimize.fmin), - 'lbfgsb': the l-bfgs-b method (see scipy.optimize.fmin_l_bfgs_b), - 'lbfgs': the bfgs method (see scipy.optimize.fmin_bfgs), - 'sgd': stochastic gradient decsent (see scipy.optimize.sgd). For experts only!
15,935
def _varian(self, varian): if varian == self.bentuk_tidak_baku: nama = "Bentuk tidak baku" elif varian == self.varian: nama = "Varian" else: return return nama + + .join(varian)
Mengembalikan representasi string untuk varian entri ini. Dapat digunakan untuk "Varian" maupun "Bentuk tidak baku". :param varian: List bentuk tidak baku atau varian :type varian: list :returns: String representasi varian atau bentuk tidak baku :rtype: str
15,936
def as_encodable(self, index_name): if self.facets: encoded_facets = {} for name, facet in self.facets.items(): encoded_facets[name] = facet.encodable self._json_[] = encoded_facets if self._ms: sv_val = { : , : { index_name: self._ms._to_fts_encodable() } } self._json_.setdefault(, {})[] = sv_val if self.sort: if isinstance(self.sort, Sort): self._json_[] = self.sort.as_encodable() else: self._json_[] = self.sort return self._json_
:param index_name: The name of the index for the query :return: A dict suitable for passing to `json.dumps()`
15,937
def draw_rect(grid, attr, dc, rect): dc.SetBrush(wx.Brush(wx.Colour(15, 255, 127), wx.SOLID)) dc.SetPen(wx.Pen(wx.BLUE, 1, wx.SOLID)) dc.DrawRectangleRect(rect)
Draws a rect
15,938
def tica(data=None, lag=10, dim=-1, var_cutoff=0.95, kinetic_map=True, commute_map=False, weights=, stride=1, remove_mean=True, skip=0, reversible=True, ncov_max=float(), chunksize=None, **kwargs): r from pyemma.coordinates.transform.tica import TICA from pyemma.coordinates.estimation.koopman import _KoopmanEstimator import types from pyemma.util.reflection import get_default_args cs = _check_old_chunksize_arg(chunksize, get_default_args(tica)[], **kwargs) if isinstance(weights, _string_types): if weights == "koopman": if data is None: raise ValueError("Data must be supplied for reweighting=") if not reversible: raise ValueError("Koopman re-weighting is designed for reversible processes, set reversible=True") koop = _KoopmanEstimator(lag=lag, stride=stride, skip=skip, ncov_max=ncov_max) koop.estimate(data, chunksize=cs) weights = koop.weights elif weights == "empirical": weights = None else: raise ValueError("reweighting must be either , " "or an object with a weights(data) method.") elif hasattr(weights, ) and type(getattr(weights, )) == types.MethodType: weights = weights elif isinstance(weights, (list, tuple)) and all(isinstance(w, _np.ndarray) for w in weights): if data is not None and len(data) != len(weights): raise ValueError("len of weights({}) must match len of data({}).".format(len(weights), len(data))) else: raise ValueError("reweighting must be either , or an object with a weights(data) method.") if not remove_mean: import warnings user_msg = \ \ \ warnings.warn( user_msg, category=_PyEMMA_DeprecationWarning) res = TICA(lag, dim=dim, var_cutoff=var_cutoff, kinetic_map=kinetic_map, commute_map=commute_map, skip=skip, stride=stride, weights=weights, reversible=reversible, ncov_max=ncov_max) if data is not None: res.estimate(data, chunksize=cs) else: res.chunksize = cs return res
r""" Time-lagged independent component analysis (TICA). TICA is a linear transformation method. In contrast to PCA, which finds coordinates of maximal variance, TICA finds coordinates of maximal autocorrelation at the given lag time. Therefore, TICA is useful in order to find the *slow* components in a dataset and thus an excellent choice to transform molecular dynamics data before clustering data for the construction of a Markov model. When the input data is the result of a Markov process (such as thermostatted molecular dynamics), TICA finds in fact an approximation to the eigenfunctions and eigenvalues of the underlying Markov operator [1]_. It estimates a TICA transformation from *data*. When input data is given as an argument, the estimation will be carried out straight away, and the resulting object can be used to obtain eigenvalues, eigenvectors or project input data onto the slowest TICA components. If no data is given, this object is an empty estimator and can be put into a :func:`pipeline` in order to use TICA in the streaming mode. Parameters ---------- data : ndarray (T, d) or list of ndarray (T_i, d) or a reader created by source function array with the data, if available. When given, the TICA transformation is immediately computed and can be used to transform data. lag : int, optional, default = 10 the lag time, in multiples of the input time step dim : int, optional, default -1 the number of dimensions (independent components) to project onto. A call to the :func:`map <pyemma.coordinates.transform.TICA.map>` function reduces the d-dimensional input to only dim dimensions such that the data preserves the maximum possible autocorrelation amongst dim-dimensional linear projections. -1 means all numerically available dimensions will be used unless reduced by var_cutoff. Setting dim to a positive value is exclusive with var_cutoff. var_cutoff : float in the range [0,1], optional, default 0.95 Determines the number of output dimensions by including dimensions until their cumulative kinetic variance exceeds the fraction subspace_variance. var_cutoff=1.0 means all numerically available dimensions (see epsilon) will be used, unless set by dim. Setting var_cutoff smaller than 1.0 is exclusive with dim kinetic_map : bool, optional, default True Eigenvectors will be scaled by eigenvalues. As a result, Euclidean distances in the transformed data approximate kinetic distances [4]_. This is a good choice when the data is further processed by clustering. commute_map : bool, optional, default False Eigenvector_i will be scaled by sqrt(timescale_i / 2). As a result, Euclidean distances in the transformed data will approximate commute distances [5]_. stride : int, optional, default = 1 If set to 1, all input data will be used for estimation. Note that this could cause this calculation to be very slow for large data sets. Since molecular dynamics data is usually correlated at short timescales, it is often sufficient to estimate transformations at a longer stride. Note that the stride option in the get_output() function of the returned object is independent, so you can parametrize at a long stride, and still map all frames through the transformer. weights : optional, default="empirical" Re-weighting strategy to be used in order to compute equilibrium covariances from non-equilibrium data. * "empirical": no re-weighting * "koopman": use re-weighting procedure from [6]_ * weights: An object that allows to compute re-weighting factors. It must possess a method weights(X) that accepts a trajectory X (np.ndarray(T, n)) and returns a vector of re-weighting factors (np.ndarray(T,)). remove_mean: bool, optional, default True remove mean during covariance estimation. Should not be turned off. skip : int, default=0 skip the first initial n frames per trajectory. reversible: bool, default=True symmetrize correlation matrices C_0, C_{\tau}. ncov_max : int, default=infinity limit the memory usage of the algorithm from [7]_ to an amount that corresponds to ncov_max additional copies of each correlation matrix chunksize: int, default=None Number of data frames to process at once. Choose a higher value here, to optimize thread usage and gain processing speed. If None is passed, use the default value of the underlying reader/data source. Choose zero to disable chunking at all. Returns ------- tica : a :class:`TICA <pyemma.coordinates.transform.TICA>` transformation object Object for time-lagged independent component (TICA) analysis. it contains TICA eigenvalues and eigenvectors, and the projection of input data to the dominant TICA Notes ----- Given a sequence of multivariate data :math:`X_t`, it computes the mean-free covariance and time-lagged covariance matrix: .. math:: C_0 &= (X_t - \mu)^T \mathrm{diag}(w) (X_t - \mu) \\ C_{\tau} &= (X_t - \mu)^T \mathrm{diag}(w) (X_t + \tau - \mu) where w is a vector of weights for each time step. By default, these weights are all equal to one, but different weights are possible, like the re-weighting to equilibrium described in [6]_. Subsequently, the eigenvalue problem .. math:: C_{\tau} r_i = C_0 \lambda_i r_i, is solved,where :math:`r_i` are the independent components and :math:`\lambda_i` are their respective normalized time-autocorrelations. The eigenvalues are related to the relaxation timescale by .. math:: t_i = -\frac{\tau}{\ln |\lambda_i|}. When used as a dimension reduction method, the input data is projected onto the dominant independent components. TICA was originally introduced for signal processing in [2]_. It was introduced to molecular dynamics and as a method for the construction of Markov models in [1]_ and [3]_. It was shown in [1]_ that when applied to molecular dynamics data, TICA is an approximation to the eigenvalues and eigenvectors of the true underlying dynamics. Examples -------- Invoke TICA transformation with a given lag time and output dimension: >>> import numpy as np >>> from pyemma.coordinates import tica >>> data = np.random.random((100,3)) >>> projected_data = tica(data, lag=2, dim=1).get_output()[0] For a brief explaination why TICA outperforms PCA to extract a good reaction coordinate have a look `here <http://docs.markovmodel.org/lecture_tica.html#Example:-TICA-versus-PCA-in-a-stretched-double-well-potential>`_. See also -------- :class:`TICA <pyemma.coordinates.transform.TICA>` : tica object :func:`pca <pyemma.coordinates.pca>` : for principal component analysis .. autoclass:: pyemma.coordinates.transform.tica.TICA :members: :undoc-members: .. rubric:: Methods .. autoautosummary:: pyemma.coordinates.transform.tica.TICA :methods: .. rubric:: Attributes .. autoautosummary:: pyemma.coordinates.transform.tica.TICA :attributes: References ---------- .. [1] Perez-Hernandez G, F Paul, T Giorgino, G De Fabritiis and F Noe. 2013. Identification of slow molecular order parameters for Markov model construction J. Chem. Phys. 139, 015102. doi:10.1063/1.4811489 .. [2] L. Molgedey and H. G. Schuster. 1994. Separation of a mixture of independent signals using time delayed correlations Phys. Rev. Lett. 72, 3634. .. [3] Schwantes C, V S Pande. 2013. Improvements in Markov State Model Construction Reveal Many Non-Native Interactions in the Folding of NTL9 J. Chem. Theory. Comput. 9, 2000-2009. doi:10.1021/ct300878a .. [4] Noe, F. and Clementi, C. 2015. Kinetic distance and kinetic maps from molecular dynamics simulation. J. Chem. Theory. Comput. doi:10.1021/acs.jctc.5b00553 .. [5] Noe, F., Banisch, R., Clementi, C. 2016. Commute maps: separating slowly-mixing molecular configurations for kinetic modeling. J. Chem. Theory. Comput. doi:10.1021/acs.jctc.6b00762 .. [6] Wu, H., Nueske, F., Paul, F., Klus, S., Koltai, P., and Noe, F. 2016. Bias reduced variational approximation of molecular kinetics from short off-equilibrium simulations. J. Chem. Phys. (submitted), https://arxiv.org/abs/1610.06773. .. [7] Chan, T. F., Golub G. H., LeVeque R. J. 1979. Updating formulae and pairwiese algorithms for computing sample variances. Technical Report STAN-CS-79-773, Department of Computer Science, Stanford University.
15,939
def getOverlayTransformAbsolute(self, ulOverlayHandle): fn = self.function_table.getOverlayTransformAbsolute peTrackingOrigin = ETrackingUniverseOrigin() pmatTrackingOriginToOverlayTransform = HmdMatrix34_t() result = fn(ulOverlayHandle, byref(peTrackingOrigin), byref(pmatTrackingOriginToOverlayTransform)) return result, peTrackingOrigin, pmatTrackingOriginToOverlayTransform
Gets the transform if it is absolute. Returns an error if the transform is some other type.
15,940
def series_resistors(target, pore_area=, throat_area=, pore_conductivity=, throat_conductivity=, conduit_lengths=, conduit_shape_factors=): r return generic_conductance(target=target, transport_type=, pore_area=pore_area, throat_area=throat_area, pore_diffusivity=pore_conductivity, throat_diffusivity=throat_conductivity, conduit_lengths=conduit_lengths, conduit_shape_factors=conduit_shape_factors)
r""" Calculate the electrical conductance of conduits in network, where a conduit is ( 1/2 pore - full throat - 1/2 pore ). See the notes section. Parameters ---------- target : OpenPNM Object The object which this model is associated with. This controls the length of the calculated array, and also provides access to other necessary properties. pore_thermal_conductivity : string Dictionary key of the pore thermal conductivity values throat_thermal_conductivity : string Dictionary key of the throat thermal conductivity values pore_area : string Dictionary key of the pore area values throat_area : string Dictionary key of the throat area values conduit_shape_factors : string Dictionary key of the conduit DIFFUSION shape factor values Returns ------- g : ndarray Array containing electrical conductance values for conduits in the geometry attached to the given physics object. Notes ----- (1) This function requires that all the necessary phase properties already be calculated. (2) This function calculates the specified property for the *entire* network then extracts the values for the appropriate throats at the end. (3) This function assumes cylindrical throats with constant cross-section area. Corrections for different shapes and variable cross-section area can be imposed by passing the proper flow_shape_factor argument.
15,941
def convert_row(self, keyed_row, schema, fallbacks): for key, value in list(keyed_row.items()): field = schema.get_field(key) if not field: del keyed_row[key] if key in fallbacks: value = _uncast_value(value, field=field) else: value = field.cast_value(value) keyed_row[key] = value return keyed_row
Convert row to SQL
15,942
def create_class(self): if self.target_language in [, ]: n_indents = 1 if self.target_language == else 0 class_head_temp = self.temp(.format( self.prefix), n_indents=n_indents, skipping=True) self.class_head = class_head_temp.format(**self.__dict__) return self.temp().format(**self.__dict__)
Build the estimator class. Returns ------- :return : string The built class as string.
15,943
def part(self, target, reason=None): if reason: target += + reason self.send_line( % target)
quit a channel
15,944
def capture(self, pattern=None, negate=False, workers=None, negate_workers=False, params=None, success=False, error=True, stats=False): request = clearly_pb2.CaptureRequest( tasks_capture=clearly_pb2.PatternFilter(pattern=pattern or , negate=negate), workers_capture=clearly_pb2.PatternFilter(pattern=workers or , negate=negate_workers), ) try: for realtime in self._stub.capture_realtime(request): if realtime.HasField(): ClearlyClient._display_task(realtime.task, params, success, error) elif realtime.HasField(): ClearlyClient._display_worker(realtime.worker, stats) else: print(, realtime) break except KeyboardInterrupt: pass
Starts capturing selected events in real-time. You can filter exactly what you want to see, as the Clearly Server handles all tasks and workers updates being sent to celery. Several clients can see different sets of events at the same time. This runs in the foreground, so you can see in real-time exactly what your clients and celery workers are doing. Press CTRL+C at any time to stop it. Args: Filter args: pattern (Optional[str]): a pattern to filter tasks to capture. ex.: '^dispatch|^email' to filter names starting with that or 'dispatch.*123456' to filter that exact name and number or even '123456' to filter that exact number anywhere. negate (bool): if True, finds tasks that do not match criteria. workers (Optional[str]): a pattern to filter workers to capture. ex.: 'service|priority' to filter names containing that negate_workers (bool): if True, finds workers that do not match criteria. Display args: params (Optional[bool]): if True shows args and kwargs in the first and last seen states, if False never shows, and if None follows the success and error arguments. default is None success (bool): if True shows successful tasks' results. default is False error (bool): if True shows failed and retried tasks' tracebacks. default is True, as you're monitoring to find errors, right? stats (bool): if True shows complete workers' stats. default is False
15,945
def gcmt_to_simple_array(self, centroid_location=True): catalogue = np.zeros([self.get_number_tensors(), 29], dtype=float) for iloc, tensor in enumerate(self.gcmts): catalogue[iloc, 0] = iloc if centroid_location: catalogue[iloc, 1] = float(tensor.centroid.date.year) catalogue[iloc, 2] = float(tensor.centroid.date.month) catalogue[iloc, 3] = float(tensor.centroid.date.day) catalogue[iloc, 4] = float(tensor.centroid.time.hour) catalogue[iloc, 5] = float(tensor.centroid.time.minute) catalogue[iloc, 6] = np.round( np.float(tensor.centroid.time.second) + np.float(tensor.centroid.time.microsecond) / 1000000., 2) catalogue[iloc, 7] = tensor.centroid.longitude catalogue[iloc, 8] = tensor.centroid.latitude catalogue[iloc, 9] = tensor.centroid.depth else: catalogue[iloc, 1] = float(tensor.hypocentre.date.year) catalogue[iloc, 2] = float(tensor.hypocentre.date.month) catalogue[iloc, 3] = float(tensor.hypocentre.date.day) catalogue[iloc, 4] = float(tensor.hypocentre.time.hour) catalogue[iloc, 5] = float(tensor.hypocentre.time.minute) catalogue[iloc, 6] = np.round( np.float(tensor.centroid.time.second) + np.float(tensor.centroid.time.microsecond) / 1000000., 2) catalogue[iloc, 7] = tensor.hypocentre.longitude catalogue[iloc, 8] = tensor.hypocentre.latitude catalogue[iloc, 9] = tensor.hypocentre.depth catalogue[iloc, 10] = tensor.magnitude catalogue[iloc, 11] = tensor.moment catalogue[iloc, 12] = tensor.f_clvd catalogue[iloc, 13] = tensor.e_rel catalogue[iloc, 14] = tensor.nodal_planes.nodal_plane_1[] catalogue[iloc, 15] = tensor.nodal_planes.nodal_plane_1[] catalogue[iloc, 16] = tensor.nodal_planes.nodal_plane_1[] catalogue[iloc, 17] = tensor.nodal_planes.nodal_plane_2[] catalogue[iloc, 18] = tensor.nodal_planes.nodal_plane_2[] catalogue[iloc, 19] = tensor.nodal_planes.nodal_plane_2[] catalogue[iloc, 20] = tensor.principal_axes.b_axis[] catalogue[iloc, 21] = tensor.principal_axes.b_axis[] catalogue[iloc, 22] = tensor.principal_axes.b_axis[] catalogue[iloc, 23] = tensor.principal_axes.p_axis[] catalogue[iloc, 24] = tensor.principal_axes.p_axis[] catalogue[iloc, 25] = tensor.principal_axes.p_axis[] catalogue[iloc, 26] = tensor.principal_axes.t_axis[] catalogue[iloc, 27] = tensor.principal_axes.t_axis[] catalogue[iloc, 28] = tensor.principal_axes.t_axis[] return catalogue
Converts the GCMT catalogue to a simple array of [ID, year, month, day, hour, minute, second, long., lat., depth, Mw, strike1, dip1, rake1, strike2, dip2, rake2, b-plunge, b-azimuth, b-eigenvalue, p-plunge, p-azimuth, p-eigenvalue, t-plunge, t-azimuth, t-eigenvalue, moment, f_clvd, erel]
15,946
def get_one_file_in(dirname): files = os.listdir(dirname) if len(files) > 1: raise Failure( % (dirname, .join(sorted(files)))) elif not files: raise Failure( % dirname) return os.path.join(dirname, files[0])
Return the pathname of the one file in a directory. Raises if the directory has no files or more than one file.
15,947
def generate_covalent_bond_graph(covalent_bonds): bond_graph=networkx.Graph() for inter in covalent_bonds: bond_graph.add_edge(inter.a, inter.b) return bond_graph
Generates a graph of the covalent bond network described by the interactions. Parameters ---------- covalent_bonds: [CovalentBond] List of `CovalentBond`. Returns ------- bond_graph: networkx.Graph A graph of the covalent bond network.
15,948
def describe(self, *cols): if len(cols) == 1 and isinstance(cols[0], list): cols = cols[0] jdf = self._jdf.describe(self._jseq(cols)) return DataFrame(jdf, self.sql_ctx)
Computes basic statistics for numeric and string columns. This include count, mean, stddev, min, and max. If no columns are given, this function computes statistics for all numerical or string columns. .. note:: This function is meant for exploratory data analysis, as we make no guarantee about the backward compatibility of the schema of the resulting DataFrame. >>> df.describe(['age']).show() +-------+------------------+ |summary| age| +-------+------------------+ | count| 2| | mean| 3.5| | stddev|2.1213203435596424| | min| 2| | max| 5| +-------+------------------+ >>> df.describe().show() +-------+------------------+-----+ |summary| age| name| +-------+------------------+-----+ | count| 2| 2| | mean| 3.5| null| | stddev|2.1213203435596424| null| | min| 2|Alice| | max| 5| Bob| +-------+------------------+-----+ Use summary for expanded statistics and control over which statistics to compute.
15,949
def get_reconciler(config, metrics, rrset_channel, changes_channel, **kw): builder = reconciler.GDNSReconcilerBuilder( config, metrics, rrset_channel, changes_channel, **kw) return builder.build_reconciler()
Get a GDNSReconciler client. A factory function that validates configuration, creates an auth and :class:`GDNSClient` instance, and returns a GDNSReconciler provider. Args: config (dict): Google Cloud Pub/Sub-related configuration. metrics (obj): :interface:`IMetricRelay` implementation. rrset_channel (asyncio.Queue): Queue from which to consume record set messages to validate. changes_channel (asyncio.Queue): Queue to publish message to make corrections to Cloud DNS. kw (dict): Additional keyword arguments to pass to the Reconciler. Returns: A :class:`GDNSReconciler` instance.
15,950
def tensor_info_proto_maps_match(map_a, map_b): iter_a = sorted(parse_tensor_info_map(map_a).items()) iter_b = sorted(parse_tensor_info_map(map_b).items()) if len(iter_a) != len(iter_b): return False for info_a, info_b in zip(iter_a, iter_b): if info_a[0] != info_b[0]: return False if _is_sparse(info_a[1]) != _is_sparse(info_b[1]): return False if info_a[1].dtype != info_b[1].dtype: return False if not _shape_match(info_a[1].get_shape(), info_b[1].get_shape()): return False return True
Whether two signature inputs/outputs match in dtype, shape and sparsity. Args: map_a: A proto map<string,TensorInfo>. map_b: A proto map<string,TensorInfo>. Returns: A boolean whether `map_a` and `map_b` tensors have the same dtype, shape and sparsity.
15,951
def _get_headers(environ): for key, value in environ.items(): key = str(key) if key.startswith("HTTP_") and key not in ( "HTTP_CONTENT_TYPE", "HTTP_CONTENT_LENGTH", ): yield key[5:].replace("_", "-").title(), value elif key in ("CONTENT_TYPE", "CONTENT_LENGTH"): yield key.replace("_", "-").title(), value
Returns only proper HTTP headers.
15,952
def get_cameras_properties(self): resource = "cameras" resource_event = self.publish_and_get_event(resource) if resource_event: self._last_refresh = int(time.time()) self._camera_properties = resource_event.get()
Return camera properties.
15,953
def datalog(self, parameter, run, maxrun=None, det_id=): "Retrieve datalogs for given parameter, run(s) and detector" parameter = parameter.lower() if maxrun is None: maxrun = run with Timer(): return self._datalog(parameter, run, maxrun, det_id)
Retrieve datalogs for given parameter, run(s) and detector
15,954
def gain(abf): Ys=np.nan_to_num(swhlab.ap.getAvgBySweep(abf,)) Xs=abf.clampValues(abf.dataX[int(abf.protoSeqX[1]+.01)]) swhlab.plot.new(abf,title="gain function",xlabel="command current (pA)", ylabel="average inst. freq. (Hz)") pylab.plot(Xs,Ys,,ms=20,alpha=.5,color=) pylab.axhline(0,alpha=.5,lw=2,color=,ls="--") pylab.margins(.1,.1)
easy way to plot a gain function.
15,955
def host_inventory_get(hostids, **kwargs): s docstring) :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see modules docstring) :return: Array with host interfaces details, False if no convenient host interfaces found or on failure. CLI Example: .. code-block:: bash salt zabbix.host_inventory_get 101054 host.gethostidsurlauthresultinventoryresultinventory'] else False else: raise KeyError except KeyError: return ret
Retrieve host inventory according to the given parameters. See: https://www.zabbix.com/documentation/2.4/manual/api/reference/host/object#host_inventory .. versionadded:: 2019.2.0 :param hostids: Return only host interfaces used by the given hosts. :param _connection_user: Optional - zabbix user (can also be set in opts or pillar, see module's docstring) :param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see module's docstring) :param _connection_url: Optional - url of zabbix frontend (can also be set in opts, pillar, see module's docstring) :return: Array with host interfaces details, False if no convenient host interfaces found or on failure. CLI Example: .. code-block:: bash salt '*' zabbix.host_inventory_get 101054
15,956
async def delete(self, _id=None): if not _id: return {"error":400, "reason":"Missed required fields"} document = await self.collection.find_one({"id": _id}) if not document: return {"error":404, "reason":"Not found"} deleted_count = await self.collection.delete_one( {"id": _id}).deleted_count return deleted_count
Delete entry from database table. Accepts id. delete(id) => 1 (if exists) delete(id) => {"error":404, "reason":"Not found"} (if does not exist) delete() => {"error":400, "reason":"Missed required fields"}
15,957
def get_raw_mempool(self, id=None, endpoint=None): return self._call_endpoint(GET_RAW_MEMPOOL, id=id, endpoint=endpoint)
Returns the tx that are in the memorypool of the endpoint Args: id: (int, optional) id to use for response tracking endpoint: (RPCEndpoint, optional) endpoint to specify to use Returns: json object of the result or the error encountered in the RPC call
15,958
def mount(device, mountpoint, options=None, persist=False, filesystem="ext3"): cmd_args = [] if options is not None: cmd_args.extend([, options]) cmd_args.extend([device, mountpoint]) try: subprocess.check_output(cmd_args) except subprocess.CalledProcessError as e: log(.format(device, mountpoint, e.output)) return False if persist: return fstab_add(device, mountpoint, filesystem, options=options) return True
Mount a filesystem at a particular mountpoint
15,959
def _watch_file(self, filepath, trigger_event=True): is_new = filepath not in self._watched_files if trigger_event: if is_new: self.trigger_created(filepath) else: self.trigger_modified(filepath) try: self._watched_files[filepath] = self._get_modified_time(filepath) except OSError: return
Adds the file's modified time into its internal watchlist.
15,960
def get_or_none(cls, **filter_kwargs): try: video = cls.objects.get(**filter_kwargs) except cls.DoesNotExist: video = None return video
Returns a video or None.
15,961
def PrintIndented(self, file, ident, code): for entry in code: print >>file, % (ident, entry)
Takes an array, add indentation to each entry and prints it.
15,962
def move(self, direction, absolute=False, pad_name=None, refresh=True): cursor_line = [ ] scroll_only = [ ] if not pad_name: pad_name = self.current_pad pad = self.pads[pad_name] if pad_name == and self.no_streams: return (row, col) = pad.getyx() new_row = row offset = self.offsets[pad_name] new_offset = offset if pad_name in scroll_only: if absolute: if direction > 0: new_offset = pad.getmaxyx()[0] - self.pad_h + 1 else: new_offset = 0 else: if direction > 0: new_offset = min(pad.getmaxyx()[0] - self.pad_h + 1, offset + self.pad_h) elif offset > 0: new_offset = max(0, offset - self.pad_h) else: if absolute and direction >= 0 and direction < pad.getmaxyx()[0]: if direction < offset: new_offset = direction elif direction > offset + self.pad_h - 2: new_offset = direction - self.pad_h + 2 new_row = direction else: if direction == -1 and row > 0: if row == offset: new_offset -= 1 new_row = row-1 elif direction == 1 and row < len(self.filtered_streams)-1: if row == offset + self.pad_h - 2: new_offset += 1 new_row = row+1 if pad_name in cursor_line: pad.move(row, 0) pad.chgat(curses.A_NORMAL) self.offsets[pad_name] = new_offset pad.move(new_row, 0) if pad_name in cursor_line: pad.chgat(curses.A_REVERSE) if pad_name == : self.redraw_stream_footer() if refresh: self.refresh_current_pad()
Scroll the current pad direction : (int) move by one in the given direction -1 is up, 1 is down. If absolute is True, go to position direction. Behaviour is affected by cursor_line and scroll_only below absolute : (bool)
15,963
def token_info(token, refresh=True, refresh_cb=None, session=None): session = session or HTTP_SESSION params = dict(access_token=token.access_token) resp = session.get(TOKEN_INFO_URL, params=params) if resp.status_code != 200: if refresh: token = refresh_token(token, session=session) if refresh_cb is not None: try: refresh_cb(token) except Exception: LOGGER.exception() info = token_info(token, refresh=False, session=session) info.update(refreshed=True) return info raise OAuthTokenRefreshRequiredError() info = __coerce_token_info(resp.json()) info.update(token=token, refreshed=False) return nameddict(info)
:param OAuthToken token :param bool refresh: whether to attempt to refresh the OAuth token if it expired. default is `True`. :param refresh_cb: If specified, a callable object which is given the new token in parameter if it has been refreshed. :param requests.Session session: Optional `requests` session to use. :return: token information. see https://developers.google.com/identity/protocols/OAuth2UserAgent#tokeninfo-validation - `scope`: this field is not a space-delimited set of scopes but a real Python `set`. - `token`: additional field that provides the `OAuthToken` - `refreshed`: boolean that will tell if the token has been refreshed :rtype: nameddict
15,964
def p_unrelate_statement_2(self, p): p[0] = UnrelateNode(from_variable_name=p[2], to_variable_name=p[4], rel_id=p[6], phrase=p[8])
statement : UNRELATE instance_name FROM instance_name ACROSS rel_id DOT phrase
15,965
def get_config( config_path=CONFIG_PATH ): parser = SafeConfigParser() parser.read( config_path ) config_dir = os.path.dirname(config_path) immutable_key = False key_id = None blockchain_id = None hostname = socket.gethostname() wallet = None if parser.has_section(): if parser.has_option(, ): immutable_key = parser.get(, ) if immutable_key.lower() in [, , ]: immutable_key = True else: immutable_key = False if parser.has_option(, ): key_id = parser.get(, ) if parser.has_option(, ): blockchain_id = parser.get(, ) if parser.has_option(, ): hostname = parser.get(, ) if parser.has_option(, ): wallet = parser.get(, ) config = { : immutable_key, : key_id, : blockchain_id, : hostname, : wallet } return config
Get the config
15,966
def draw(self, **kwargs): x = self.n_feature_subsets_ means = self.cv_scores_.mean(axis=1) sigmas = self.cv_scores_.std(axis=1) self.ax.fill_between(x, means - sigmas, means+sigmas, alpha=0.25) self.ax.plot(x, means, ) self.ax.axvline( self.n_features_, c=, ls=, label="n_features = {}\nscore = {:0.3f}".format( self.n_features_, self.cv_scores_.mean(axis=1).max() ) ) return self.ax
Renders the rfecv curve.
15,967
def reset(cwd, opts=, git_opts=, user=None, password=None, identity=None, ignore_retcode=False, output_encoding=None): s own argument parsing. git_opts Any additional options to add to git command itself (not the ``reset`` subcommand), in a single string. This is useful for passing ``-c`` to run git with temporary changes to the git configuration. .. versionadded:: 2017.7.0 .. note:: This is only supported in git 1.7.2 and newer. user User under which to run the git command. By default, the command is run by the user under which the minion is running. password Windows only. Required when specifying ``user``. This parameter will be ignored on non-Windows platforms. .. versionadded:: 2016.3.4 identity Path to a private key to use for ssh URLs. Salt will not attempt to use passphrase-protected keys unless invoked from the minion using ``salt-call``, to prevent blocking waiting for user input. Key can also be specified as a SaltStack file server URL, eg. ``salt://location/identity_file``. .. note:: For greater security with passphraseless private keys, see the `sshd(8)`_ manpage for information on securing the keypair from the remote side in the ``authorized_keys`` file. .. _`sshd(8)`: http://www.man7.org/linux/man-pages/man8/sshd.8.html .. versionadded:: 2018.3.5,2019.2.1,Neon ignore_retcode : False If ``True``, do not log an error to the minion log if the git command returns a nonzero exit status. .. versionadded:: 2015.8.0 output_encoding Use this option to specify which encoding to use to decode the output from any git commands which are run. This should not be needed in most cases. .. note:: This should only be needed if the files in the repository were created with filenames using an encoding other than UTF-8 to handle Unicode characters. .. versionadded:: 2018.3.1 .. _`git-reset(1)`: http://git-scm.com/docs/git-reset CLI Examples: .. code-block:: bash salt myminion git.reset /path/to/repo ac3ee5c salt myminion git.reset /path/to/repo opts= gitresetstdout']
Interface to `git-reset(1)`_, returns the stdout from the git command cwd The path to the git checkout opts Any additional options to add to the command line, in a single string .. note:: On the Salt CLI, if the opts are preceded with a dash, it is necessary to precede them with ``opts=`` (as in the CLI examples below) to avoid causing errors with Salt's own argument parsing. git_opts Any additional options to add to git command itself (not the ``reset`` subcommand), in a single string. This is useful for passing ``-c`` to run git with temporary changes to the git configuration. .. versionadded:: 2017.7.0 .. note:: This is only supported in git 1.7.2 and newer. user User under which to run the git command. By default, the command is run by the user under which the minion is running. password Windows only. Required when specifying ``user``. This parameter will be ignored on non-Windows platforms. .. versionadded:: 2016.3.4 identity Path to a private key to use for ssh URLs. Salt will not attempt to use passphrase-protected keys unless invoked from the minion using ``salt-call``, to prevent blocking waiting for user input. Key can also be specified as a SaltStack file server URL, eg. ``salt://location/identity_file``. .. note:: For greater security with passphraseless private keys, see the `sshd(8)`_ manpage for information on securing the keypair from the remote side in the ``authorized_keys`` file. .. _`sshd(8)`: http://www.man7.org/linux/man-pages/man8/sshd.8.html#AUTHORIZED_KEYS_FILE_FORMAT .. versionadded:: 2018.3.5,2019.2.1,Neon ignore_retcode : False If ``True``, do not log an error to the minion log if the git command returns a nonzero exit status. .. versionadded:: 2015.8.0 output_encoding Use this option to specify which encoding to use to decode the output from any git commands which are run. This should not be needed in most cases. .. note:: This should only be needed if the files in the repository were created with filenames using an encoding other than UTF-8 to handle Unicode characters. .. versionadded:: 2018.3.1 .. _`git-reset(1)`: http://git-scm.com/docs/git-reset CLI Examples: .. code-block:: bash # Soft reset to a specific commit ID salt myminion git.reset /path/to/repo ac3ee5c # Hard reset salt myminion git.reset /path/to/repo opts='--hard origin/master'
15,968
def _take_forced_measurement(self): self._bus.write_byte_data(self._i2c_add, 0xF4, self.ctrl_meas_reg) while self._bus.read_byte_data(self._i2c_add, 0xF3) & 0x08: sleep(0.005)
Take a forced measurement. In forced mode, the BME sensor goes back to sleep after each measurement and we need to set it to forced mode once at this point, so it will take the next measurement and then return to sleep again. In normal mode simply does new measurements periodically.
15,969
def _nan_minmax_object(func, fill_value, value, axis=None, **kwargs): valid_count = count(value, axis=axis) filled_value = fillna(value, fill_value) data = getattr(np, func)(filled_value, axis=axis, **kwargs) if not hasattr(data, ): data = dtypes.fill_value(value.dtype) if valid_count == 0 else data return np.array(data, dtype=value.dtype) return where_method(data, valid_count != 0)
In house nanmin and nanmax for object array
15,970
def BuildChecks(self, request): result = [] if request.HasField("start_time") or request.HasField("end_time"): def FilterTimestamp(file_stat, request=request): return file_stat.HasField("st_mtime") and ( file_stat.st_mtime < request.start_time or file_stat.st_mtime > request.end_time) result.append(FilterTimestamp) if request.HasField("min_file_size") or request.HasField("max_file_size"): def FilterSize(file_stat, request=request): return file_stat.HasField("st_size") and ( file_stat.st_size < request.min_file_size or file_stat.st_size > request.max_file_size) result.append(FilterSize) if request.HasField("perm_mode"): def FilterPerms(file_stat, request=request): return (file_stat.st_mode & request.perm_mask) != request.perm_mode result.append(FilterPerms) if request.HasField("uid"): def FilterUID(file_stat, request=request): return file_stat.st_uid != request.uid result.append(FilterUID) if request.HasField("gid"): def FilterGID(file_stat, request=request): return file_stat.st_gid != request.gid result.append(FilterGID) if request.HasField("path_regex"): regex = request.path_regex def FilterPath(file_stat, regex=regex): return not regex.Search(file_stat.pathspec.Basename()) result.append(FilterPath) if request.HasField("data_regex"): def FilterData(file_stat, **_): return not self.TestFileContent(file_stat) result.append(FilterData) return result
Parses request and returns a list of filter callables. Each callable will be called with the StatEntry and returns True if the entry should be suppressed. Args: request: A FindSpec that describes the search. Returns: a list of callables which return True if the file is to be suppressed.
15,971
def show_script_error(self, parent): if self.service.scriptRunner.error != : dlg = Gtk.MessageDialog(type=Gtk.MessageType.INFO, buttons=Gtk.ButtonsType.OK, message_format=self.service.scriptRunner.error) self.service.scriptRunner.error = self.notifier.set_icon(cm.ConfigManager.SETTINGS[cm.NOTIFICATION_ICON]) self.notifier.errorItem.hide() self.notifier.update_visible_status() else: dlg = Gtk.MessageDialog(type=Gtk.MessageType.INFO, buttons=Gtk.ButtonsType.OK, message_format=_("No error information available")) dlg.set_title(_("View script error")) dlg.set_transient_for(parent) dlg.run() dlg.destroy()
Show the last script error (if any)
15,972
def print_tree(self) -> str: if self.ast: return self.ast.print_tree(ast_obj=self.ast) else: return ""
Convert AST object to tree view of BEL AST Returns: printed tree of BEL AST
15,973
def __arguments(self, ttype, tvalue): if ttype == "identifier": test = get_command_instance(tvalue.decode("ascii"), self.__curcommand) self.__curcommand.check_next_arg("test", test) self.__expected = test.get_expected_first() self.__curcommand = test return self.__check_command_completion(testsemicolon=False) if ttype == "left_parenthesis": self.__set_expected("identifier") return True if ttype == "comma": self.__set_expected("identifier") return True if ttype == "right_parenthesis": self.__up() return True if self.__argument(ttype, tvalue): return self.__check_command_completion(testsemicolon=False) return False
Arguments parsing method Entry point for command arguments parsing. The parser must call this method for each parsed command (either a control, action or test). Syntax: *argument [ test / test-list ] :param ttype: current token type :param tvalue: current token value :return: False if an error is encountered, True otherwise
15,974
def exec_event_handler(self, event, transactional=False): callbacks = self._options.get(, {}) handler = callbacks.get(event) if not handler: raise Exception() handler.start(transactional=transactional)
Execute the Async set to be run on event.
15,975
def _http_resp_rate_limited(response): parsed = parse.urlparse(response.request.url) duration = int(response.headers.get(, 3)) LOGGER.warning(, parsed.netloc, duration) return asyncio.sleep(duration)
Extract the ``Retry-After`` header value if the request was rate limited and return a future to sleep for the specified duration. :param tornado.httpclient.HTTPResponse response: The response :rtype: tornado.concurrent.Future
15,976
def exit(self, status=0, message=None): self.exited = True if message is not None: self.mesgs.extend(message.split()) raise s_exc.BadSyntax(mesg=message, prog=self.prog, status=status)
Argparse expects exit() to be a terminal function and not return. As such, this function must raise an exception which will be caught by Cmd.hasValidOpts.
15,977
def env_float(name, required=False, default=empty): value = get_env_value(name, required=required, default=default) if value is empty: raise ValueError( "`env_float` requires either a default value to be specified, or for " "the variable to be present in the environment" ) return float(value)
Pulls an environment variable out of the environment and casts it to an float. If the name is not present in the environment and no default is specified then a ``ValueError`` will be raised. Similarly, if the environment value is not castable to an float, a ``ValueError`` will be raised. :param name: The name of the environment variable be pulled :type name: str :param required: Whether the environment variable is required. If ``True`` and the variable is not present, a ``KeyError`` is raised. :type required: bool :param default: The value to return if the environment variable is not present. (Providing a default alongside setting ``required=True`` will raise a ``ValueError``) :type default: bool
15,978
def flatten(self) -> bk.BKTensor: N = self.qubit_nb R = self.rank return bk.reshape(self.tensor, [2**N]*R)
Return tensor with with qubit indices flattened
15,979
def cli(env, is_open): ticket_mgr = SoftLayer.TicketManager(env.client) table = formatting.Table([ , , , , , , ]) tickets = ticket_mgr.list_tickets(open_status=is_open, closed_status=not is_open) for ticket in tickets: user = formatting.blank() if ticket.get(): user = "%s %s" % (ticket[][], ticket[][]) table.add_row([ ticket[], user, click.wrap_text(ticket[]), ticket[], ticket[][], ticket.get(, 0), ticket.get(, 0) ]) env.fout(table)
List tickets.
15,980
def decode(self): fmt, len_low, len_high, device_id, report_id, sent_timestamp, signature_flags, \ origin_streamer, streamer_selector = unpack("<BBHLLLBBH", self.raw_report[:20]) assert fmt == 1 length = (len_high << 8) | len_low self.origin = device_id self.report_id = report_id self.sent_timestamp = sent_timestamp self.origin_streamer = origin_streamer self.streamer_selector = streamer_selector self.signature_flags = signature_flags assert len(self.raw_report) == length remaining = self.raw_report[20:] assert len(remaining) >= 24 readings = remaining[:-24] footer = remaining[-24:] lowest_id, highest_id, signature = unpack("<LL16s", footer) signature = bytearray(signature) self.lowest_id = lowest_id self.highest_id = highest_id self.signature = signature signed_data = self.raw_report[:-16] signer = ChainedAuthProvider() if signature_flags == AuthProvider.NoKey: self.encrypted = False else: self.encrypted = True try: verification = signer.verify_report(device_id, signature_flags, signed_data, signature, report_id=report_id, sent_timestamp=sent_timestamp) self.verified = verification[] except NotFoundError: self.verified = False except NotFoundError: return [], [] assert (len(readings) % 16) == 0 time_base = self.received_time - datetime.timedelta(seconds=sent_timestamp) parsed_readings = [] for i in range(0, len(readings), 16): reading = readings[i:i+16] stream, _, reading_id, timestamp, value = unpack("<HHLLL", reading) parsed = IOTileReading(timestamp, stream, value, time_base=time_base, reading_id=reading_id) parsed_readings.append(parsed) return parsed_readings, []
Decode this report into a list of readings
15,981
def max(self): return int(self._max) if not np.isinf(self._max) else self._max
Returns the maximum value of the domain. :rtype: `float` or `np.inf`
15,982
def set_widgets(self): clear_layout(self.gridLayoutThreshold) layer_purpose = self.parent.step_kw_purpose.selected_purpose() layer_subcategory = self.parent.step_kw_subcategory.\ selected_subcategory() classification = self.parent.step_kw_classification. \ selected_classification() if is_raster_layer(self.parent.layer): statistics = self.parent.layer.dataProvider().bandStatistics( 1, QgsRasterBandStats.All, self.parent.layer.extent(), 0) text = continuous_raster_question % ( layer_purpose[], layer_subcategory[], classification[], statistics.minimumValue, statistics.maximumValue) else: field_name = self.parent.step_kw_field.selected_fields() field_index = self.parent.layer.fields().lookupField(field_name) min_value_layer = self.parent.layer.minimumValue(field_index) max_value_layer = self.parent.layer.maximumValue(field_index) text = continuous_vector_question % ( layer_purpose[], layer_subcategory[], field_name, classification[], min_value_layer, max_value_layer) self.lblThreshold.setText(text) thresholds = self.parent.get_existing_keyword() selected_unit = self.parent.step_kw_unit.selected_unit()[] self.classes = OrderedDict() classes = classification.get() classes = sorted(classes, key=lambda k: k[]) for i, the_class in enumerate(classes): class_layout = QHBoxLayout() class_label = QLabel(the_class[]) min_label = QLabel(tr()) min_value_input = QDoubleSpinBox() min_value_input.setMinimum(0) min_value_input.setMaximum(999999) if thresholds.get(the_class[]): min_value_input.setValue(thresholds[the_class[]][0]) else: default_min = the_class[] if isinstance(default_min, dict): default_min = the_class[ ][selected_unit] min_value_input.setValue(default_min) min_value_input.setSingleStep(0.1) max_label = QLabel(tr()) max_value_input = QDoubleSpinBox() max_value_input.setMinimum(0) max_value_input.setMaximum(999999) if thresholds.get(the_class[]): max_value_input.setValue(thresholds[the_class[]][1]) else: default_max = the_class[] if isinstance(default_max, dict): default_max = the_class[ ][selected_unit] max_value_input.setValue(default_max) max_value_input.setSingleStep(0.1) class_layout.addWidget(min_label) class_layout.addWidget(min_value_input) class_layout.addWidget(max_label) class_layout.addWidget(max_value_input) self.gridLayoutThreshold.addWidget(class_label, i, 0) self.gridLayoutThreshold.addLayout(class_layout, i, 1) self.classes[the_class[]] = [min_value_input, max_value_input] self.gridLayoutThreshold.setSpacing(0) def min_max_changed(index, the_string): if the_string == : current_max_value = list(self.classes.values())[index][1] target_min_value = list(self.classes.values())[index + 1][0] if current_max_value.value() != target_min_value.value(): target_min_value.setValue(current_max_value.value()) elif the_string == : current_min_value = list(self.classes.values())[index][0] target_max_value = list(self.classes.values())[index - 1][1] if current_min_value.value() != target_max_value.value(): target_max_value.setValue(current_min_value.value()) for k, v in list(self.classes.items()): index = list(self.classes.keys()).index(k) if index < len(self.classes) - 1: v[1].valueChanged.connect(partial( min_max_changed, index=index, the_string=)) if index > 0: v[0].valueChanged.connect(partial( min_max_changed, index=index, the_string=))
Set widgets on the Threshold tab.
15,983
def get_rows(self, sort=False): ret = [] for _, rows in sorted(self._rows.items()) if sort else self._rows.items(): self._rows_int2date(rows) ret.extend(rows) return ret
Returns the rows of this Type2Helper. :param bool sort: If True the rows are sorted by the pseudo key.
15,984
def get_cluster_graph(self, engine="fdp", graph_attr=None, node_attr=None, edge_attr=None): from graphviz import Digraph g = Digraph("directory", engine=engine) g.attr(label=self.top) g.node_attr.update(color=, style=) if graph_attr is not None: fg.graph_attr.update(**graph_attr) if node_attr is not None: fg.node_attr.update(**node_attr) if edge_attr is not None: fg.edge_attr.update(**edge_attr) def node_kwargs(path): return dict( shape="record", fontsize="8.0", label=os.path.basename(path), ) edge_kwargs = dict(arrowType="vee", style="solid", minlen="1") cluster_kwargs = dict(rankdir="LR", pagedir="BL", style="rounded", bgcolor="azure2") exclude_top_node = False for root, dirs, files in os.walk(self.top): if exclude_top_node and root == self.top: continue cluster_name = "cluster_%s" % root with g.subgraph(name=cluster_name) as d: d.attr(**cluster_kwargs) d.attr(rank="source" if (files or dirs) else "sink") d.attr(label=os.path.basename(root)) for f in files: filepath = os.path.join(root, f) d.node(filepath, **node_kwargs(filepath)) if os.path.islink(filepath): realp = os.path.realpath(filepath) realp = os.path.relpath(realp, filepath) g.edge(filepath, realp, **edge_kwargs) for dirname in dirs: dirpath = os.path.join(root, dirname) new_cluster_name = "cluster_%s" % dirpath d.edge(cluster_name, new_cluster_name, **edge_kwargs) return g
Generate directory graph in the DOT language. Directories are shown as clusters .. warning:: This function scans the entire directory tree starting from top so the resulting graph can be really big. Args: engine: Layout command used. ['dot', 'neato', 'twopi', 'circo', 'fdp', 'sfdp', 'patchwork', 'osage'] graph_attr: Mapping of (attribute, value) pairs for the graph. node_attr: Mapping of (attribute, value) pairs set for all nodes. edge_attr: Mapping of (attribute, value) pairs set for all edges. Returns: graphviz.Digraph <https://graphviz.readthedocs.io/en/stable/api.html#digraph>
15,985
def _define_absl_flag(self, flag_instance, suppress): flag_name = flag_instance.name short_name = flag_instance.short_name argument_names = [ + flag_name] if short_name: argument_names.insert(0, + short_name) if suppress: helptext = argparse.SUPPRESS else: self.add_argument( *argument_names, action=_BooleanFlagAction, help=helptext, metavar=flag_instance.name.upper(), flag_instance=flag_instance) else: self.add_argument( *argument_names, action=_FlagAction, help=helptext, metavar=flag_instance.name.upper(), flag_instance=flag_instance)
Defines a flag from the flag_instance.
15,986
def filesys_decode(path): if isinstance(path, six.text_type): return path fs_enc = sys.getfilesystemencoding() or candidates = fs_enc, for enc in candidates: try: return path.decode(enc) except UnicodeDecodeError: continue
Ensure that the given path is decoded, NONE when no expected encoding works
15,987
def default_output_format(content_type=, apply_globally=False, api=None, cli=False, http=True): def decorator(formatter): formatter = hug.output_format.content_type(content_type)(formatter) if apply_globally: if http: hug.defaults.output_format = formatter if cli: hug.defaults.cli_output_format = formatter else: apply_to_api = hug.API(api) if api else hug.api.from_object(formatter) if http: apply_to_api.http.output_format = formatter if cli: apply_to_api.cli.output_format = formatter return formatter return decorator
A decorator that allows you to override the default output format for an API
15,988
def get_namespace(self, uri): key = (self, uri) if key in self.context.namespaces: return self.context.namespaces[key] else: ns = TemplateNamespace(uri, self.context._copy(), templateuri=uri, calling_uri=self._templateuri) self.context.namespaces[key] = ns return ns
Return a :class:`.Namespace` corresponding to the given ``uri``. If the given ``uri`` is a relative URI (i.e. it does not contain a leading slash ``/``), the ``uri`` is adjusted to be relative to the ``uri`` of the namespace itself. This method is therefore mostly useful off of the built-in ``local`` namespace, described in :ref:`namespace_local`. In most cases, a template wouldn't need this function, and should instead use the ``<%namespace>`` tag to load namespaces. However, since all ``<%namespace>`` tags are evaluated before the body of a template ever runs, this method can be used to locate namespaces using expressions that were generated within the body code of the template, or to conditionally use a particular namespace.
15,989
def fetch_artifact(self, trial_id, prefix): local = os.path.join(self.log_dir, trial_id, prefix) if self.upload_dir: remote = .join([self.upload_dir, trial_id, prefix]) _remote_to_local_sync(remote, local) return local
Verifies that all children of the artifact prefix path are available locally. Fetches them if not. Returns the local path to the given trial's artifacts at the specified prefix, which is always just {log_dir}/{trial_id}/{prefix}
15,990
def _input_as_list(self, data): query, database, output = data if (not isabs(database)) \ or (not isabs(query)) \ or (not isabs(output)): raise ApplicationError("Only absolute paths allowed.\n%s" % .join(data)) self._database = FilePath(database) self._query = FilePath(query) self._output = ResultPath(output, IsWritten=True) if self.Parameters[].isOn() and self.Parameters[].isOn() and \ (self.Parameters[].Value, self.Parameters[].Value) not in \ self._valid_combinations: error_message = "Invalid combination of database and query " + \ "types (, ).\n" % \ (self.Paramters[].Value, self.Parameters[].Value) error_message += "Must be one of: %s\n" % \ repr(self._valid_combinations) raise ApplicationError(error_message) if self.Parameters[].isOn() and \ self.Parameters[].Value not in self._database_types: error_message = "Invalid database type %s\n" % \ self.Parameters[].Value error_message += "Allowed values: %s\n" % \ .join(self._database_types) raise ApplicationError(error_message) if self.Parameters[].isOn() and \ self.Parameters[].Value not in self._query_types: error_message = "Invalid query type %s\n" % \ self.Parameters[].Value error_message += "Allowed values: %s\n" % \ .join(self._query_types) raise ApplicationError(error_message) if self.Parameters[].isOn() and \ self.Parameters[].Value not in self._mask_types: error_message = "Invalid mask type %s\n" % \ self.Parameters[] error_message += "Allowed Values: %s\n" % \ .join(self._mask_types) raise ApplicationError(error_message) if self.Parameters[].isOn() and \ self.Parameters[].Value not in self._mask_types: error_message = "Invalid qMask type %s\n" % \ self.Parameters[].Value error_message += "Allowed values: %s\n" % \ .join(self._mask_types) raise ApplicationError(error_message) if self.Parameters[].isOn() and \ self.Parameters[].Value not in self._mask_types: error_message = "Invalid repeat type %s\n" % \ self.Parameters[].Value error_message += "Allowed values: %s\n" % \ .join(self._mask_types) raise ApplicationError(error_message) if self.Parameters[].isOn() and \ self.Parameters[].Value not in self._out_types: error_message = "Invalid output type %s\n" % \ self.Parameters[] error_message += "Allowed values: %s\n" % \ .join(self._out_types) raise ApplicationError(error_message) return
Takes the positional arguments as input in a list. The list input here should be [query_file_path, database_file_path, output_file_path]
15,991
def _set_zone(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("zone_name",zone.zone, yang_name="zone", rest_name="zone", parent=self, is_container=, user_ordered=False, path_helper=self._path_helper, yang_keys=, extensions={u: {u: u, u: None, u: None, u: None, u: None, u: u}}), is_container=, yang_name="zone", rest_name="zone", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u: {u: u, u: None, u: None, u: None, u: None, u: u}}, namespace=, defining_module=, yang_type=, is_config=True) except (TypeError, ValueError): raise ValueError({ : , : "list", : , }) self.__zone = t if hasattr(self, ): self._set()
Setter method for zone, mapped from YANG variable /zoning/defined_configuration/zone (list) If this variable is read-only (config: false) in the source YANG file, then _set_zone is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_zone() directly.
15,992
def affine_shift_matrix(wrg=(-0.1, 0.1), hrg=(-0.1, 0.1), w=200, h=200): if isinstance(wrg, tuple): tx = np.random.uniform(wrg[0], wrg[1]) * w else: tx = wrg * w if isinstance(hrg, tuple): ty = np.random.uniform(hrg[0], hrg[1]) * h else: ty = hrg * h shift_matrix = np.array([[1, 0, tx], \ [0, 1, ty], \ [0, 0, 1]]) return shift_matrix
Create an affine transform matrix for image shifting. NOTE: In OpenCV, x is width and y is height. Parameters ----------- wrg : float or tuple of floats Range to shift on width axis, -1 ~ 1. - float, a fixed distance. - tuple of 2 floats, randomly sample a value as the distance between these 2 values. hrg : float or tuple of floats Range to shift on height axis, -1 ~ 1. - float, a fixed distance. - tuple of 2 floats, randomly sample a value as the distance between these 2 values. w, h : int The width and height of the image. Returns ------- numpy.array An affine transform matrix.
15,993
def add_node(self, node_id, name, labels): node = self.graph_db.get_or_create_indexed_node(, , node_id, {: node_id, : name}) try: node.add_labels(*labels) except NotImplementedError: pass
Add the node with name and labels. Args: node_id: Id for the node. name: Name for the node. labels: Label for the node. Raises: NotImplementedError: When adding labels is not supported.
15,994
def create_function_f_i(self): return ca.Function( , [self.t, self.x, self.y, self.m, self.p, self.c, self.pre_c, self.ng, self.nu], [self.f_i], [, , , , , , , , ], [], self.func_opt)
state reinitialization (reset) function
15,995
def UV_B(Bg,gw): UV = [] p = Bwidth(gw) pp = 2**p while p: pp = pp>>1 p = p-1 if Bg&pp: uv = B012(p,gw-1) UV.append(uv) return UV
returns the implications UV based on B Bg = B(g), g∈2^M gw = |M|, M is the set of all attributes
15,996
def currentEvent(self): t ended yet, or if there are no future events, the last one to end. startTime-endTime').first() return currentEvent
Return the first event that hasn't ended yet, or if there are no future events, the last one to end.
15,997
def get_matching_service_template_file(service_name, template_files): if service_name in template_files: return template_files[service_name] return None
Return the template file that goes with the given service name, or return None if there's no match. Subservices return the parent service's file.
15,998
def flags(rule_or_module, variable_name, condition, values = []): assert isinstance(rule_or_module, basestring) assert isinstance(variable_name, basestring) assert is_iterable_typed(condition, basestring) assert is_iterable(values) and all(isinstance(v, (basestring, type(None))) for v in values) caller = bjam.caller() if not in rule_or_module and caller and caller[:-1].startswith("Jamfile"): rule_or_module = qualify_jam_action(rule_or_module, caller) else: values = [ condition ] condition = None if condition: transformed = [] for c in condition: pl = [property.create_from_string(s,False,True) for s in c.split()] pl = feature.expand_subfeatures(pl); transformed.append(property_set.create(pl)) condition = transformed property.validate_property_sets(condition) __add_flag (rule_or_module, variable_name, condition, values)
Specifies the flags (variables) that must be set on targets under certain conditions, described by arguments. rule_or_module: If contains dot, should be a rule name. The flags will be applied when that rule is used to set up build actions. If does not contain dot, should be a module name. The flags will be applied for all rules in that module. If module for rule is different from the calling module, an error is issued. variable_name: Variable that should be set on target condition A condition when this flag should be applied. Should be set of property sets. If one of those property sets is contained in build properties, the flag will be used. Implied values are not allowed: "<toolset>gcc" should be used, not just "gcc". Subfeatures, like in "<toolset>gcc-3.2" are allowed. If left empty, the flag will always used. Propery sets may use value-less properties ('<a>' vs. '<a>value') to match absent properties. This allows to separately match <architecture>/<address-model>64 <architecture>ia64/<address-model> Where both features are optional. Without this syntax we'd be forced to define "default" value. values: The value to add to variable. If <feature> is specified, then the value of 'feature' will be added.
15,999
def NDLimitExceeded_NDLimit(self, **kwargs): config = ET.Element("config") NDLimitExceeded = ET.SubElement(config, "NDLimitExceeded", xmlns="http://brocade.com/ns/brocade-notification-stream") NDLimit = ET.SubElement(NDLimitExceeded, "NDLimit") NDLimit.text = kwargs.pop() callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code