Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
16,500
def check_exclamations_ppm(text): err = "leonard.exclamation.30ppm" msg = u"More than 30 ppm of exclamations. Keep them under control." regex = r"\w!" count = len(re.findall(regex, text)) num_words = len(text.split(" ")) ppm = (count*1.0 / num_words) * 1e6 if ppm > 30 and count > 1: loc = re.search(regex, text).start() + 1 return [(loc, loc+1, err, msg, ".")] else: return []
Make sure that the exclamation ppm is under 30.
16,501
def sign_more(self, bucket, cos_path, expired): return self.app_sign(bucket, cos_path, expired)
多次签名(针对上传文件,创建目录, 获取文件目录属性, 拉取目录列表) :param bucket: bucket名称 :param cos_path: 要操作的cos路径, 以'/'开始 :param expired: 签名过期时间, UNIX时间戳, 如想让签名在30秒后过期, 即可将expired设成当前时间加上30秒 :return: 签名字符串
16,502
def recommend(self, client_data, limit, extra_data={}): preinstalled_addon_ids = client_data.get("installed_addons", []) extended_limit = limit + len(preinstalled_addon_ids) ensemble_suggestions = self._ensemble_recommender.recommend( client_data, extended_limit, extra_data ) curated_suggestions = self._curated_recommender.recommend( client_data, extended_limit, extra_data ) merged_results = set() while ( len(merged_results) < limit and len(ensemble_suggestions) > 0 and len(curated_suggestions) > 0 ): r1 = ensemble_suggestions.pop() if r1[0] not in [temp[0] for temp in merged_results]: merged_results.add(r1) if not ( len(merged_results) < limit and len(ensemble_suggestions) > 0 and len(curated_suggestions) > 0 ): break r2 = curated_suggestions.pop() if r2[0] not in [temp[0] for temp in merged_results]: merged_results.add(r2) if len(merged_results) < limit: msg = ( "Defaulting to empty results. Insufficient recommendations found for client: %s" % client_data["client_id"] ) self.logger.info(msg) return [] sorted_results = sorted( list(merged_results), key=op.itemgetter(1), reverse=True ) log_data = (client_data["client_id"], str([r[0] for r in sorted_results])) self.logger.info( "Hybrid recommendations client_id: [%s], guids: [%s]" % log_data ) return sorted_results
Hybrid recommendations simply select half recommendations from the ensemble recommender, and half from the curated one. Duplicate recommendations are accomodated by rank ordering by weight.
16,503
def populate(self, priority, address, rtr, data): assert isinstance(data, bytes) self.needs_high_priority(priority) self.needs_no_rtr(rtr) self.needs_data(data, 4) self.set_attributes(priority, address, rtr) tmp = (data[0] >> 1) & 0x03 print(tmp) self.channel = self.byte_to_channel(tmp) self.needs_valid_channel(self.channel, 2) (self.delay_time,) = struct.unpack(, bytes([0]) + data[1:])
:return: None
16,504
def _add_video_timing(self, pic): sld = self._spTree.xpath()[0] childTnLst = sld.get_or_add_childTnLst() childTnLst.add_video(pic.shape_id)
Add a `p:video` element under `p:sld/p:timing`. The element will refer to the specified *pic* element by its shape id, and cause the video play controls to appear for that video.
16,505
def new_evaluation_result(self, has_improved: bool) -> bool: if self.lr is None: assert self.base_lr is not None self.lr = self.base_lr if has_improved: self.num_not_improved = 0 else: self.num_not_improved += 1 if self.num_not_improved >= self.reduce_num_not_improved and self.reduce_factor < 1.0 and self.warmed_up: old_lr = self.lr self.lr *= self.reduce_factor logger.info("%d checkpoints since improvement or rate scaling, " "lowering learning rate: %1.2e -> %1.2e", self.num_not_improved, old_lr, self.lr) self.num_not_improved = 0 return True return False
Returns true if the parameters should be reset to the ones with the best validation score. :param has_improved: Whether the model improved on held-out validation data. :return: True if parameters should be reset to the ones with best validation score.
16,506
def getFlaskResponse(responseString, httpStatus=200): return flask.Response(responseString, status=httpStatus, mimetype=MIMETYPE)
Returns a Flask response object for the specified data and HTTP status.
16,507
def which(program, ignore_own_venv=False): if not program: return None if os.path.isabs(program): return program if is_executable(program) else None for p in os.environ.get("PATH", "").split(":"): fp = os.path.join(p, program) if (not ignore_own_venv or not fp.startswith(sys.prefix)) and is_executable(fp): return fp return None
:param str|None program: Program name to find via env var PATH :param bool ignore_own_venv: If True, do not resolve to executables in current venv :return str|None: Full path to program, if one exists and is executable
16,508
def trim(self, lower=None, upper=None): if lower is None: lower = getattr(self.subpars.eqi2, , None) if upper is None: upper = getattr(self.subpars.eqb, , None) super().trim(lower, upper)
Trim upper values in accordance with :math:`EQI2 \\leq EQI1 \\leq EQB`. >>> from hydpy.models.lland import * >>> parameterstep('1d') >>> eqb.value = 3.0 >>> eqi2.value = 1.0 >>> eqi1(0.0) >>> eqi1 eqi1(1.0) >>> eqi1(1.0) >>> eqi1 eqi1(1.0) >>> eqi1(2.0) >>> eqi1 eqi1(2.0) >>> eqi1(3.0) >>> eqi1 eqi1(3.0) >>> eqi1(4.0) >>> eqi1 eqi1(3.0)
16,509
def _build_biomart_gene_query(self, taxid, cols_to_fetch): taxid = str(taxid) if taxid != : cols_to_fetch = [x for x in cols_to_fetch if x != ] query_attributes = { "virtualSchemaName": "default", "formatter": "TSV", "header": "0", "uniqueRows": "1", "count": "0", "datasetConfigVersion": "0.6"} qry = etree.Element("Query", query_attributes) if taxid in self.localtt: object_attributes = {"name": self.localtt[taxid], "interface": "default"} dataset = etree.SubElement(qry, "Dataset", object_attributes) for col in cols_to_fetch: etree.SubElement(dataset, "Attribute", {"name": col}) query = \ + etree.tostring(qry, encoding="unicode") else: LOG.warning("not finding taxon %s in the local translation table", taxid) query = None return query
Building url to fetch equivalent identifiers via Biomart Restful API. Documentation at http://uswest.ensembl.org/info/data/biomart/biomart_restful.html :param taxid: :param array of ensembl biomart attributes to include :return:
16,510
def parse_command_line_parameters(): usage = version = parser = OptionParser(usage=usage, version=version) parser.add_option(, , action=, dest=, default=False, help= ) parser.add_option(, , action=, type=, dest=, default=None, help= ) opts, args = parser.parse_args() num_args = 1 if len(args) != num_args: parser.error() return opts, args
Parses command line arguments
16,511
def fit_left_censoring( self, durations, event_observed=None, timeline=None, label=None, alpha=None, ci_labels=None, show_progress=False, entry=None, weights=None, ): self.durations = np.asarray(pass_for_numeric_dtypes_or_raise_array(durations)) check_nans_or_infs(self.durations) check_positivity(self.durations) self._censoring_type = CensoringType.LEFT return self._fit( (None, self.durations), event_observed=event_observed, timeline=timeline, label=label, alpha=alpha, ci_labels=ci_labels, show_progress=show_progress, entry=entry, weights=weights, )
Fit the model to a left-censored dataset Parameters ---------- durations: an array, or pd.Series length n, duration subject was observed for event_observed: numpy array or pd.Series, optional length n, True if the the death was observed, False if the event was lost (right-censored). Defaults all True if event_observed==None timeline: list, optional return the estimate at the values in timeline (positively increasing) label: string, optional a string to name the column of the estimate. alpha: float, optional the alpha value in the confidence intervals. Overrides the initializing alpha for this call to fit only. ci_labels: list, optional add custom column names to the generated confidence intervals as a length-2 list: [<lower-bound name>, <upper-bound name>]. Default: <label>_lower_<alpha> show_progress: boolean, optional since this is an iterative fitting algorithm, switching this to True will display some iteration details. entry: an array, or pd.Series, of length n relative time when a subject entered the study. This is useful for left-truncated (not left-censored) observations. If None, all members of the population entered study when they were "born": time zero. weights: an array, or pd.Series, of length n integer weights per observation Returns ------- self self with new properties like ``cumulative_hazard_``, ``survival_function_``
16,512
def _absorb_z_into_w(moment_index: int, op: ops.Operation, state: _OptimizerState) -> None: t = cast(float, _try_get_known_z_half_turns(op)) q = op.qubits[0] state.held_w_phases[q] = cast(float, state.held_w_phases[q]) + t / 2 state.deletions.append((moment_index, op))
Absorbs a Z^t gate into a W(a) flip. [Where W(a) is shorthand for PhasedX(phase_exponent=a).] Uses the following identity: ───W(a)───Z^t─── ≡ ───W(a)───────────Z^t/2──────────Z^t/2─── (split Z) ≡ ───W(a)───W(a)───Z^-t/2───W(a)───Z^t/2─── (flip Z) ≡ ───W(a)───W(a)──────────W(a+t/2)───────── (phase W) ≡ ────────────────────────W(a+t/2)───────── (cancel Ws) ≡ ───W(a+t/2)───
16,513
def make_subscriber(self, my_args=None): LOGGER.debug("zeromq.Driver.make_subscriber") if my_args is None: raise exceptions.ArianeConfError() if not self.configuration_OK or self.connection_args is None: raise exceptions.ArianeConfError() subscriber = Subscriber.start(my_args, self.connection_args).proxy() self.subscribers_registry.append(subscriber) return subscriber
not implemented :return:
16,514
def apply_filter(self, strings): result = strings for filt in self.filters: result = filt.apply_filter(result) self.log([u"Applying regex: => ", strings, result]) return result
Apply the text filter filter to the given list of strings. :param list strings: the list of input strings
16,515
def _get_login_manager(self, app: FlaskUnchained, anonymous_user: AnonymousUser, ) -> LoginManager: login_manager = LoginManager() login_manager.anonymous_user = anonymous_user or AnonymousUser login_manager.localize_callback = _ login_manager.request_loader(self._request_loader) login_manager.user_loader( lambda *a, **kw: self.security_utils_service.user_loader(*a, **kw)) login_manager.login_view = login_manager.login_message = _( ) login_manager.login_message_category = login_manager.needs_refresh_message = _( ) login_manager.needs_refresh_message_category = login_manager.init_app(app) return login_manager
Get an initialized instance of Flask Login's :class:`~flask_login.LoginManager`.
16,516
def set_end_date(self, date): if date is None: raise NullArgument() if self.get_end_date_metadata().is_read_only(): raise NoAccess() if not self.my_osid_object_form._is_valid_date_time(date, self.get_end_date_metadata()): raise InvalidArgument() self.my_osid_object_form._my_map[] = date
Sets the end date. arg: date (osid.calendaring.DateTime): the new date raise: InvalidArgument - ``date`` is invalid raise: NoAccess - ``Metadata.isReadOnly()`` is ``true`` raise: NullArgument - ``date`` is ``null`` *compliance: mandatory -- This method must be implemented.*
16,517
def example_exc_handler(tries_remaining, exception, delay): print >> stderr, "Caught , {1} tries remaining, \ sleeping for {2} seconds".format(exception, tries_remaining, delay)
Example exception handler; prints a warning to stderr. tries_remaining: The number of tries remaining. exception: The exception instance which was raised.
16,518
def build_notification_message(template_context, template_configuration=None): if ( template_configuration is not None and template_configuration.html_template and template_configuration.plaintext_template ): plain_msg, html_msg = template_configuration.render_all_templates(template_context) else: plain_msg = render_to_string( , template_context ) html_msg = render_to_string( , template_context ) return plain_msg, html_msg
Create HTML and plaintext message bodies for a notification. We receive a context with data we can use to render, as well as an optional site template configration - if we don't get a template configuration, we'll use the standard, built-in template. Arguments: template_context (dict): A set of data to render template_configuration: A database-backed object with templates stored that can be used to render a notification.
16,519
def scan_file(self, filename, apikey): url = self.base_url + "file/scan" params = {: apikey} scanfile = {"file": open(filename, )} response = requests.post(url, files=scanfile, params=params) rate_limit_clear = self.rate_limit() if rate_limit_clear: if response.status_code == self.HTTP_OK: json_response = response.json() return json_response elif response.status_code == self.HTTP_RATE_EXCEEDED: time.sleep(20) else: self.logger.error("sent: %s, HTTP: %d", filename, response.status_code)
Sends a file to virus total for assessment
16,520
def check_docstring(cls): docstring = inspect.getdoc(cls) if not docstring: breadcrumbs = " -> ".join(t.__name__ for t in inspect.getmro(cls)[:-1][::-1]) msg = "docstring required for plugin (%s, defined in %s)" args = (cls.__name__, breadcrumbs, cls.__module__) raise InternalCashewException(msg % args) max_line_length = cls._class_settings.get() if max_line_length: for i, line in enumerate(docstring.splitlines()): if len(line) > max_line_length: msg = "docstring line %s of %s is %s chars too long" args = (i, cls.__name__, len(line) - max_line_length) raise Exception(msg % args) return docstring
Asserts that the class has a docstring, returning it if successful.
16,521
def schunk(string, size): return [string[i:i+size] for i in range(0, len(string), size)]
Splits string into n sized chunks.
16,522
def __get_favorites(self, favorite_type, start=0, max_items=100): if favorite_type not in (RADIO_SHOWS, RADIO_STATIONS): favorite_type = SONOS_FAVORITES response = self.contentDirectory.Browse([ (, if favorite_type is SONOS_FAVORITES else .format(favorite_type)), (, ), (, ), (, start), (, max_items), (, ) ]) result = {} favorites = [] results_xml = response[] if results_xml != : metadata = XML.fromstring(really_utf8(results_xml)) for item in metadata.findall( if favorite_type == RADIO_SHOWS else ): favorite = {} favorite[] = item.findtext( ) favorite[] = item.findtext( ) if favorite_type == SONOS_FAVORITES: favorite[] = item.findtext( ) favorites.append(favorite) result[] = response[] result[] = len(favorites) result[] = favorites return result
Helper method for `get_favorite_radio_*` methods. Args: favorite_type (str): Specify either `RADIO_STATIONS` or `RADIO_SHOWS`. start (int): Which number to start the retrieval from. Used for paging. max_items (int): The total number of results to return.
16,523
def get_fetch_headers(self, method, headers): all_headers = self.headers.copy() if headers: all_headers.update(headers) return Headers(all_headers)
merge class headers with passed in headers :param method: string, (eg, GET or POST), this is passed in so you can customize headers based on the method that you are calling :param headers: dict, all the headers passed into the fetch method :returns: passed in headers merged with global class headers
16,524
def reinterpretBits(self, sigOrVal, toType): if isinstance(sigOrVal, Value): return reinterpretBits__val(self, sigOrVal, toType) elif isinstance(toType, Bits): return fitTo_t(sigOrVal, toType) elif sigOrVal._dtype.bit_length() == toType.bit_length(): if isinstance(toType, HStruct): raise reinterpret_bits_to_hstruct(sigOrVal, toType) elif isinstance(toType, HUnion): raise NotImplementedError() elif isinstance(toType, HArray): reinterpret_bits_to_harray(sigOrVal, toType) return default_auto_cast_fn(self, sigOrVal, toType)
Cast object of same bit size between to other type (f.e. bits to struct, union or array)
16,525
def coord_pyramids(coords, zoom_start, zoom_stop): for coord in coords: for child in coord_pyramid(coord, zoom_start, zoom_stop): yield child
generate full pyramid for coords Generate the full pyramid for the list of coords. Note that zoom_stop is exclusive.
16,526
def _filter_properties(obj, property_list): if property_list is not None: property_list = [p.lower() for p in property_list] for pname in obj.properties.keys(): if pname.lower() not in property_list: del obj.properties[pname]
Remove properties from an instance or class that aren't in the plist parameter obj(:class:`~pywbem.CIMClass` or :class:`~pywbem.CIMInstance): The class or instance from which properties are to be filtered property_list(list of :term:`string`): List of properties which are to be included in the result. If None, remove nothing. If empty list, remove everything. else remove properties that are not in property_list. Duplicated names are allowed in the list and ignored.
16,527
def simulated_annealing(objective_function, initial_array, initial_temperature=10 ** 4, cooldown_rate=0.7, acceptance_criteria=None, lower_bound=-float(), max_iterations=10 ** 3): X = initial_array if acceptance_criteria is not None: acceptance_bound = acceptance_criteria(X) best_X = X iterations = 0 current_energy = objective_function(X) best_energy = current_energy temperature = initial_temperature while current_energy > lower_bound and iterations <= max_iterations: iterations += 1 candidate = element_from_neighbourhood(X) candidate_energy = objective_function(candidate) delta = candidate_energy - current_energy if (candidate_energy < best_energy and (acceptance_criteria is None or acceptance_criteria(candidate) <= acceptance_bound)): best_energy = candidate_energy best_X = candidate if delta < 0 or (temperature > 0 and np.random.random() < np.exp(-delta / temperature)): X = candidate current_energy = candidate_energy temperature *= (cooldown_rate) ** iterations if lower_bound > -float() and current_energy != lower_bound: warnings.warn(f"Lower bound {lower_bound} not achieved after {max_iterations} iterations") return best_X
Implement a simulated annealing algorithm with exponential cooling Has two stopping conditions: 1. Maximum number of iterations; 2. A known lower bound, a none is passed then this is not used. Note that starting with an initial_temperature corresponds to a hill climbing algorithm
16,528
def configure(self, options, conf): super(ProgressivePlugin, self).configure(options, conf) if (getattr(options, , 0) > 1 and getattr(options, , False)): print ( ) if options.with_bar: options.with_styling = True
Turn style-forcing on if bar-forcing is on. It'd be messy to position the bar but still have the rest of the terminal capabilities emit ''.
16,529
async def set_mode(self, mode, timeout=OTGW_DEFAULT_TIMEOUT): cmd = OTGW_CMD_MODE status = {} ret = await self._wait_for_cmd(cmd, mode, timeout) if ret is None: return if mode is OTGW_MODE_RESET: self._protocol.status = {} await self.get_reports() await self.get_status() return dict(self._protocol.status) status[OTGW_MODE] = ret self._update_status(status) return ret
Set the operating mode to either "Gateway" mode (:mode: = OTGW_MODE_GATEWAY or 1) or "Monitor" mode (:mode: = OTGW_MODE_MONITOR or 0), or use this method to reset the device (:mode: = OTGW_MODE_RESET). Return the newly activated mode, or the full renewed status dict after a reset. This method is a coroutine
16,530
def could_scope_out(self): return not self.waiting_for or \ isinstance(self.waiting_for, callable.EndOfStory) or \ self.is_breaking_a_loop()
could bubble up from current scope :return:
16,531
def toDict(self): dRet = super(Parent,self).toDict() for k,v in iteritems(self._nodes): dRet[k] = v.toDict() return dRet
To Dict Returns the Parent as a dictionary in the same format as is used in constructing it Returns: dict
16,532
def remove_unit_rules(grammar, inplace=False): if inplace is False: grammar = copy(grammar) res = find_nonterminals_reachable_by_unit_rules(grammar) for rule in grammar.rules.copy(): if _is_unit(rule): grammar.rules.remove(rule) continue for nonterm in grammar.nonterminals: path = res.path_rules(nonterm, rule.fromSymbol) if len(path) > 0 and path[0].fromSymbol != path[-1].toSymbol: created = _create_rule(path, rule) grammar.rules.add(created) return grammar
Remove unit rules from the grammar. :param grammar: Grammar where remove the rules. :param inplace: True if transformation should be performed in place. False by default. :return: Grammar without unit rules.
16,533
def semantic_similarity(go_id1, go_id2, godag, branch_dist=None): dist = semantic_distance(go_id1, go_id2, godag, branch_dist) if dist is not None: return 1.0 / float(dist)
Finds the semantic similarity (inverse of the semantic distance) between two GO terms.
16,534
def model_performance(self, test_data=None, train=False, valid=False, xval=False): if test_data is None: if not train and not valid and not xval: train = True if train: return self._model_json["output"]["training_metrics"] if valid: return self._model_json["output"]["validation_metrics"] if xval: return self._model_json["output"]["cross_validation_metrics"] else: if not isinstance(test_data, h2o.H2OFrame): raise ValueError("`test_data` must be of type H2OFrame. Got: " + type(test_data)) if (self._model_json["response_column_name"] != None) and not(self._model_json["response_column_name"] in test_data.names): print("WARNING: Model metrics cannot be calculated and metric_json is empty due to the absence of the response column in your dataset.") return res = h2o.api("POST /3/ModelMetrics/models/%s/frames/%s" % (self.model_id, test_data.frame_id)) raw_metrics = None for mm in res["model_metrics"]: if mm["frame"] is not None and mm["frame"]["name"] == test_data.frame_id: raw_metrics = mm break return self._metrics_class(raw_metrics, algo=self._model_json["algo"])
Generate model metrics for this model on test_data. :param H2OFrame test_data: Data set for which model metrics shall be computed against. All three of train, valid and xval arguments are ignored if test_data is not None. :param bool train: Report the training metrics for the model. :param bool valid: Report the validation metrics for the model. :param bool xval: Report the cross-validation metrics for the model. If train and valid are True, then it defaults to True. :returns: An object of class H2OModelMetrics.
16,535
def _smixins(self, name): return (self._mixins[name] if name in self._mixins else False)
Inner wrapper to search for mixins by name.
16,536
def get_hostfirmware(self,callb=None): if self.host_firmware_version is None: mypartial=partial(self.resp_set_hostfirmware) if callb: mycallb=lambda x,y:(mypartial(y),callb(x,y)) else: mycallb=lambda x,y:mypartial(y) response = self.req_with_resp(GetHostFirmware, StateHostFirmware,mycallb ) return (self.host_firmware_version,self.host_firmware_build_timestamp)
Convenience method to request the device firmware info from the device This method will check whether the value has already been retrieved from the device, if so, it will simply return it. If no, it will request the information from the device and request that callb be executed when a response is received. The default callback will simply cache the value. :param callb: Callable to be used when the response is received. If not set, self.resp_set_label will be used. :type callb: callable :returns: The cached value :rtype: str
16,537
def add_host(kwargs=None, call=None): DOMAIN\\userverybadpassvcenter01.domain.comrootmyhostpassword12:A3:45:B6:CD:7E:F8:90:A1:BC:23:45:D6:78:9E:FA:01:2B:34:CD if call != : raise SaltCloudSystemExit( ) host_name = kwargs.get() if kwargs and in kwargs else None cluster_name = kwargs.get() if kwargs and in kwargs else None datacenter_name = kwargs.get() if kwargs and in kwargs else None host_user = config.get_cloud_config_value( , get_configured_provider(), __opts__, search_global=False ) host_password = config.get_cloud_config_value( , get_configured_provider(), __opts__, search_global=False ) host_ssl_thumbprint = config.get_cloud_config_value( , get_configured_provider(), __opts__, search_global=False ) if not host_user: raise SaltCloudSystemExit( ) if not host_password: raise SaltCloudSystemExit( ) if not host_name: raise SaltCloudSystemExit( ) if (cluster_name and datacenter_name) or not(cluster_name or datacenter_name): raise SaltCloudSystemExit( ) si = _get_si() if cluster_name: cluster_ref = salt.utils.vmware.get_mor_by_property(si, vim.ClusterComputeResource, cluster_name) if not cluster_ref: raise SaltCloudSystemExit( ) if datacenter_name: datacenter_ref = salt.utils.vmware.get_mor_by_property(si, vim.Datacenter, datacenter_name) if not datacenter_ref: raise SaltCloudSystemExit( ) spec = vim.host.ConnectSpec( hostName=host_name, userName=host_user, password=host_password, ) if host_ssl_thumbprint: spec.sslThumbprint = host_ssl_thumbprint else: log.warning() try: log.debug() p1 = subprocess.Popen((, ), stdout=subprocess.PIPE, stderr=subprocess.PIPE) p2 = subprocess.Popen((, , , .format(host_name)), stdin=p1.stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE) p3 = subprocess.Popen((, , , , ), stdin=p2.stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out = salt.utils.stringutils.to_str(p3.stdout.read()) ssl_thumbprint = out.split()[-1].strip() log.debug(, ssl_thumbprint) spec.sslThumbprint = ssl_thumbprint except Exception as exc: log.error( , host_name, exc, exc_info_on_loglevel=logging.DEBUG ) return {host_name: } try: if cluster_name: task = cluster_ref.AddHost(spec=spec, asConnected=True) ret = .format(cluster_name) if datacenter_name: task = datacenter_ref.hostFolder.AddStandaloneHost(spec=spec, addConnected=True) ret = .format(datacenter_name) salt.utils.vmware.wait_for_task(task, host_name, , 5, ) except Exception as exc: if isinstance(exc, vim.fault.SSLVerifyFault): log.error(s SSL certificate is not verifiedTry again after setting the esxi_host_ssl_thumbprint to %s in provider configurationError while adding host %s: %sfailed to add host'} return {host_name: ret}
Add a host system to the specified cluster or datacenter in this VMware environment .. note:: To use this function, you need to specify ``esxi_host_user`` and ``esxi_host_password`` under your provider configuration set up at ``/etc/salt/cloud.providers`` or ``/etc/salt/cloud.providers.d/vmware.conf``: .. code-block:: yaml vcenter01: driver: vmware user: 'DOMAIN\\user' password: 'verybadpass' url: 'vcenter01.domain.com' # Required when adding a host system esxi_host_user: 'root' esxi_host_password: 'myhostpassword' # Optional fields that can be specified when adding a host system esxi_host_ssl_thumbprint: '12:A3:45:B6:CD:7E:F8:90:A1:BC:23:45:D6:78:9E:FA:01:2B:34:CD' The SSL thumbprint of the host system can be optionally specified by setting ``esxi_host_ssl_thumbprint`` under your provider configuration. To get the SSL thumbprint of the host system, execute the following command from a remote server: .. code-block:: bash echo -n | openssl s_client -connect <YOUR-HOSTSYSTEM-DNS/IP>:443 2>/dev/null | openssl x509 -noout -fingerprint -sha1 CLI Example: .. code-block:: bash salt-cloud -f add_host my-vmware-config host="myHostSystemName" cluster="myClusterName" salt-cloud -f add_host my-vmware-config host="myHostSystemName" datacenter="myDatacenterName"
16,538
def plotOptMod(verNObg3gray, VERgray): if VERgray is None and verNObg3gray is None: return fg = figure() ax2 = fg.gca() if VERgray is not None: z = VERgray.alt_km Ek = VERgray.energy_ev.values props = {: , : , : 0.5} fgs, axs = fg.subplots(6, 6, sharex=True, sharey=) axs = axs.ravel() fgs.subplots_adjust(hspace=0, wspace=0) fgs.suptitle() fgs.text(0.04, 0.5, , va=, rotation=) fgs.text(0.5, 0.04, , ha=) for i, e in enumerate(Ek): axs[i].semilogx(VERgray.loc[:, e], z) axs[i].set_xlim((1e-3, 1e4)) axs[i].text(0.95, 0.95, .format(e)+, transform=axs[i].transAxes, fontsize=12, va=, ha=, bbox=props) for i in range(33, 36): axs[i].axis() ax2.semilogx(VERgray.sum(axis=1), z, label=, color=) ax = figure().gca() for e in Ek: ax.semilogx(VERgray.loc[:, e], z, marker=, label=.format(e)) ax.set_title() ax.set_xlabel() ax.set_ylabel() ax.legend(loc=, fontsize=8) ax.set_xlim((1e-5, 1e5)) ax.grid(True) if verNObg3gray is not None: ax1 = figure().gca() z = verNObg3gray.alt_km Ek = verNObg3gray.energy_ev.values ax1.semilogx(verNObg3gray, z, marker=, label=, color=) ax2.semilogx(verNObg3gray.sum(axis=1), z, label=, color=) ax = figure().gca() for e in Ek: ax.semilogx(verNObg3gray.loc[:, e], z, marker=, label=.format(e)) ax.set_title() ax.set_xlabel() ax.set_ylabel() ax.legend(loc=, fontsize=8) ax.set_xlim((1e-5, 1e5)) ax.grid(True) ax1.set_title() ax1.set_xlabel() ax1.set_ylabel() ax1.grid(True) ax2.set_xlabel() ax2.set_ylabel() ax2.set_title() ax2.legend(loc=) ax2.grid(True)
called from either readTranscar.py or hist-feasibility/plotsnew.py
16,539
def remover(self, id_interface): if not is_valid_int_param(id_interface): raise InvalidParameterError( u) url = + str(id_interface) + code, xml = self.submit(None, , url) return self.response(code, xml)
Remove an interface by its identifier. :param id_interface: Interface identifier. :return: None :raise InterfaceNaoExisteError: Interface doesn't exist. :raise InterfaceError: Interface is linked to another interface. :raise InvalidParameterError: The interface identifier is invalid or none. :raise DataBaseError: Networkapi failed to access the database. :raise XMLError: Networkapi failed to generate the XML response.
16,540
def parse(self, text): if isinstance(text, bytes): text = text.decode("ascii") text = re.sub("\s+", " ", unidecode(text)) return self.communicate(text + "\n")
Call the server and return the raw results.
16,541
def _scale(x, min_x_value, max_x_value, output_min, output_max): if round(min_x_value - max_x_value, 7) == 0: raise ValueError() def _scale(x): min_x_valuef = tf.to_float(min_x_value) max_x_valuef = tf.to_float(max_x_value) output_minf = tf.to_float(output_min) output_maxf = tf.to_float(output_max) return ((((tf.to_float(x) - min_x_valuef) * (output_maxf - output_minf)) / (max_x_valuef - min_x_valuef)) + output_minf) return _scale(x)
Scale a column to [output_min, output_max]. Assumes the columns's range is [min_x_value, max_x_value]. If this is not true at training or prediction time, the output value of this scale could be outside the range [output_min, output_max]. Raises: ValueError: if min_x_value = max_x_value, as the column is constant.
16,542
def get_scripts(): proc = Popen([, ], stdout=PIPE) should_yeild = False for line in proc.stdout.readlines(): line = line.decode() if in line: should_yeild = True continue if should_yeild and re.match(r, line): yield line.strip().split()[0]
Get custom npm scripts.
16,543
def DeserializeUnsignedWithoutType(self, reader): self.Version = reader.ReadByte() self.DeserializeExclusiveData(reader) self.Attributes = reader.ReadSerializableArray(, max=self.MAX_TX_ATTRIBUTES) self.inputs = reader.ReadSerializableArray() self.outputs = reader.ReadSerializableArray()
Deserialize object without reading transaction type data. Args: reader (neo.IO.BinaryReader):
16,544
def _tp_finder(self, dcycle): last_cycle = int(self.se.cycles[len(self.se.cycles)-1]) cyc_tp = list(range(1,last_cycle + dcycle, dcycle)) all_data = array(self.get(cyc_tp,[,,,,])) c_nf = np.zeros(len(all_data)) o_nf = np.zeros(len(all_data)) for i in range(len(all_data)): c_nf[i] = all_data[i][0] + all_data[i][1] o_nf[i] = all_data[i][2] + all_data[i][3] + all_data[i][4] co_ratio = (old_div(c_nf, o_nf)) * 15.9994 / 12.0107 tp_guess = 200 tp_guess_max = 200 gi = 0 gn = 0 while gi != 1 and gn < 10000: tp_ind = list() i = 0 while i < len(co_ratio)-2: gcompar= old_div(1., (dcycle*tp_guess*100.)) slope1 = old_div((co_ratio[i+1]-co_ratio[i]),(dcycle)) slope2 = old_div((co_ratio[i+2]-co_ratio[i+1]),dcycle) if slope1 > gcompar and slope2 < gcompar and co_ratio[i+1] > co_ratio[i]: tp_ind.append(i+1) i += 3 else: i += 1 if abs(len(tp_ind) - tp_guess) < old_div(tp_guess,2): gi = 1 else: gn += 1 tp_guess /= 2 if len(tp_ind) > tp_guess_max: print() tp_startf = zeros(len(tp_ind)) for i in range(len(tp_startf)): tp_startf[i] = cyc_tp[tp_ind[i]] tp_limits = zeros(len(tp_startf)+1) for i in range(len(tp_startf)): tp_limits[i] = tp_startf[i] tp_limits[len(tp_limits)-1] = int(self.se.cycles[len(self.se.cycles)-1]) tp_pos = list() for i in range(len(tp_startf)): tp_pos.append(int(tp_limits[i] + 0.95 * (tp_limits[i+1] - tp_limits[i]))) co_return = zeros(len(tp_pos)) for i in range(len(tp_pos)): co_return[i] = co_ratio[tp_ind[i]] return tp_pos,co_return
Routine to find thermal pulses in given star and returns an index vector that gives the cycle number in which the thermal pulse occure. The routine looks for the C/O ratio jumping up and up, so only useful in TP-AGB star. A vector is given back that indicates the position of the cycle that is at 95% of the thermal pulse (to make sure it's not in the next one and that most of the processing is done). The script also returns the co_ratio vector - the C/O ratio (number fraction) at the given thermal pulse.
16,545
def getBlock(self, block_identifier, full_transactions=False): method = select_method_for_block_identifier( block_identifier, if_predefined=, if_hash=, if_number=, ) result = self.web3.manager.request_blocking( method, [block_identifier, full_transactions], ) if result is None: raise BlockNotFound(f"Block with id: {block_identifier} not found.") return result
`eth_getBlockByHash` `eth_getBlockByNumber`
16,546
def get_summary(self): func_summaries = [f.get_summary() for f in self.functions] modif_summaries = [f.get_summary() for f in self.modifiers] return (self.name, [str(x) for x in self.inheritance], [str(x) for x in self.variables], func_summaries, modif_summaries)
Return the function summary Returns: (str, list, list, list, list): (name, inheritance, variables, fuction summaries, modifier summaries)
16,547
async def reset_webhook(self, check=True) -> bool: if check: wh = await self.bot.get_webhook_info() if not wh.url: return False return await self.bot.delete_webhook()
Reset webhook :param check: check before deleting :return:
16,548
def MobileDeviceProvisioningProfile(self, data=None, subset=None): return self.factory.get_object( jssobjects.MobileDeviceProvisioningProfile, data, subset)
{dynamic_docstring}
16,549
def convert_uuid(self, in_uuid: str = str, mode: bool = 0): if not isinstance(in_uuid, str): raise TypeError(" expected a str value.") else: pass if not checker.check_is_uuid(in_uuid): raise ValueError("{} is not a correct UUID".format(in_uuid)) else: pass if not isinstance(mode, int): raise TypeError(" expects an integer value") else: pass if "isogeo:metadata" in in_uuid: in_uuid = "urn:uuid:{}".format(in_uuid.split(":")[-1]) logging.debug("Isogeo UUUID URN spotted: {}".format(in_uuid)) else: pass if mode == 0: return uuid.UUID(in_uuid).hex elif mode == 1: return uuid.UUID(in_uuid).urn elif mode == 2: urn = uuid.UUID(in_uuid).urn return "urn:isogeo:metadata:uuid:{}".format(urn.split(":")[2]) else: raise ValueError(" must be one of: 0 | 1 | 2")
Convert a metadata UUID to its URI equivalent. And conversely. :param str in_uuid: UUID or URI to convert :param int mode: conversion direction. Options: * 0 to HEX * 1 to URN (RFC4122) * 2 to URN (Isogeo specific style)
16,550
def url_defaults(self, fn): self._defer(lambda bp: bp.url_defaults(fn)) return fn
Callback function for URL defaults for this bundle. It's called with the endpoint and values and should update the values passed in place.
16,551
def readB1header(filename): return hed
Read beamline B1 (HASYLAB, Hamburg) header data Input ----- filename: string the file name. If ends with ``.gz``, it is fed through a ``gunzip`` filter Output ------ A header dictionary. Examples -------- read header data from 'ORG000123.DAT':: header=readB1header('ORG00123.DAT')
16,552
def extract_entry(self, e, decompress=): self.fileobj.seek(e.offset) stream = file_iter(self.fileobj) stream = takeexactly(stream, e.size) if decompress == : stream = auto_decompress_stream(stream) elif decompress == : stream = bz2_decompress_stream(stream) elif decompress == : stream = xz_decompress_stream(stream) elif decompress is None: pass else: raise ValueError("Unsupported decompression type: {}".format(decompress)) for block in stream: yield block
Yield blocks of data for this entry from this MAR file. Args: e (:obj:`mardor.format.index_entry`): An index_entry object that refers to this file's size and offset inside the MAR file. path (str): Where on disk to extract this file to. decompress (str, optional): Controls whether files are decompressed when extracted. Must be one of None, 'auto', 'bz2', or 'xz'. Defaults to 'auto' Yields: Blocks of data for `e`
16,553
def category(self, category_id, country=None, locale=None): route = Route(, , category_id=category_id) payload = {} if country: payload[] = country if locale: payload[] = locale return self.request(route, params=payload)
Get a single category used to tag items in Spotify. Parameters ---------- category_id : str The Spotify category ID for the category. country : COUNTRY_TP COUNTRY locale : LOCALE_TP LOCALE
16,554
def read_config(self): self.ssl = self.get_option("ssl") self.tank_type = self.get_option("tank_type") self.gatling = .join(self.get_option().split("\n")) self.method_prefix = self.get_option("method_prefix") self.method_options = self.get_option("method_options") self.source_log_prefix = self.get_option("source_log_prefix") self.phantom_http_line = self.get_option("phantom_http_line") self.phantom_http_field_num = self.get_option("phantom_http_field_num") self.phantom_http_field = self.get_option("phantom_http_field") self.phantom_http_entity = self.get_option("phantom_http_entity") self.address = self.get_option() do_test_connect = self.get_option("connection_test") explicit_port = self.get_option(, ) self.ipv6, self.resolved_ip, self.port, self.address = self.address_wizard.resolve( self.address, do_test_connect, explicit_port) logger.info( "Resolved %s into %s:%s", self.address, self.resolved_ip, self.port) self.client_cipher_suites = self.get_option("client_cipher_suites", "") self.client_certificate = self.get_option("client_certificate", "") self.client_key = self.get_option("client_key", "") self.stepper_wrapper.read_config()
reads config
16,555
def arrow_get(string): if in string: string = string.replace(, ) if in string: return arrow.get(string) string = string.rstrip() return arrow.get(string, DATE_FORMATS[len(string)])
this function exists because ICS uses ISO 8601 without dashes or colons, i.e. not ISO 8601 at all.
16,556
def _process_diseasegene(self, limit): if self.test_mode: graph = self.testgraph else: graph = self.graph line_counter = 0 model = Model(graph) myfile = .join((self.rawdir, self.files[][])) for event, elem in ET.iterparse(myfile): if elem.tag == : disorder_num = elem.find().text disorder_id = + str(disorder_num) if self.test_mode and disorder_id not in self.all_test_ids[]: continue disorder_label = elem.find().text model.addClassToGraph(disorder_id, disorder_label) assoc_list = elem.find() expected_genes = assoc_list.get() LOG.info( , expected_genes, disorder_id) processed_genes = 0 for assoc in assoc_list.findall(): processed_genes += 1 gene = assoc.find() disorder_id, expected_genes, processed_genes) if self.test_mode and limit is not None and line_counter > limit: return return
:param limit: :return:
16,557
def filter(self, data, collection, **kwargs): if not data or self.filters is None: return None, collection filters = {} for f in self.filters: if f.name not in data: continue ops, collection = f.filter(collection, data, **kwargs) filters[f.name] = ops return filters, collection
Filter given collection.
16,558
def is_valid_python(tkn: str) -> bool: try: root = ast.parse(tkn) except SyntaxError: return False return len(root.body) == 1 and isinstance(root.body[0], ast.Expr) and isinstance(root.body[0].value, ast.Name)
Determine whether tkn is a valid python identifier :param tkn: :return:
16,559
def _convert_connected_app(self): if self.services and "connected_app" in self.services: return connected_app = self.get_connected_app() if not connected_app: return self.logger.warning( "Reading Connected App info from deprecated config." " Connected App should be changed to a service." " If using environment keychain, update the environment variable." " Otherwise, it has been handled automatically and you should not" " see this message again." ) ca_config = ServiceConfig( { "callback_url": connected_app.callback_url, "client_id": connected_app.client_id, "client_secret": connected_app.client_secret, } ) self.set_service("connected_app", ca_config)
Convert Connected App to service
16,560
def _handle_upsert(self, parts, unwritten_lobs=()): self.description = None self._received_last_resultset_part = True for part in parts: if part.kind == part_kinds.ROWSAFFECTED: self.rowcount = part.values[0] elif part.kind in (part_kinds.TRANSACTIONFLAGS, part_kinds.STATEMENTCONTEXT, part_kinds.PARAMETERMETADATA): pass elif part.kind == part_kinds.WRITELOBREPLY: for lob_buffer, lob_locator_id in izip(unwritten_lobs, part.locator_ids): lob_buffer.locator_id = lob_locator_id self._perform_lob_write_requests(unwritten_lobs) else: raise InterfaceError("Prepared insert statement response, unexpected part kind %d." % part.kind) self._executed = True
Handle reply messages from INSERT or UPDATE statements
16,561
def get_form(self, **kwargs): if not hasattr(self, "form_class"): raise AttributeError(_("You must define a form_class")) return self.form_class(**kwargs)
Returns the form for registering or inviting a user
16,562
def _selection_by_callable(self, view, num_slices, non_empty_slices): selected = [sl for sl in non_empty_slices if self._sampler(self._get_axis(self._image, view, sl))] return selected[:num_slices]
Returns all the slices selected by the given callable.
16,563
def transform(self, X): iclustup = [] dims = self.n_components if hasattr(self, ): if X.shape[1] == self.v.shape[0]: X = X @ self.v nclust = self.n_X AtS = self.A.T @ self.S vnorm = np.sum(self.S * (self.A @ AtS), axis=0)[np.newaxis,:] cv = X @ AtS cmap = np.maximum(0., cv)**2 / vnorm iclustup, cmax = upsample(np.sqrt(cmap), dims, nclust, 10) else: print() else: print() if iclustup.ndim > 1: iclustup = iclustup.T else: iclustup = iclustup.flatten() return iclustup
if already fit, can add new points and see where they fall
16,564
def resize(self, dims): width, height = dims[:2] self.dims = (width, height) self.logger.debug("renderer reconfigured to %dx%d" % ( width, height)) depth = len(self.rgb_order) self.surface = np.zeros((height, width, depth), dtype=np.uint8)
Resize our drawing area to encompass a space defined by the given dimensions.
16,565
def database_caller_creator(self, host, port, name=None): name = name or 0 client = redis.StrictRedis(host=host, port=port, db=name) pipe = client.pipeline(transaction=False) return client, pipe
creates a redis connection object which will be later used to modify the db
16,566
def get_mac_address_table(self): RE_MACTABLE_DEFAULT = r"^" + MAC_REGEX RE_MACTABLE_6500_1 = r"^\*\s+{}\s+{}\s+".format( VLAN_REGEX, MAC_REGEX ) RE_MACTABLE_6500_2 = r"^{}\s+{}\s+".format(VLAN_REGEX, MAC_REGEX) RE_MACTABLE_6500_3 = r"^\s{51}\S+" RE_MACTABLE_6500_4 = r"^R\s+{}\s+.*Router".format( VLAN_REGEX, MAC_REGEX ) RE_MACTABLE_6500_5 = r"^R\s+N/A\s+{}.*Router".format( MAC_REGEX ) RE_MACTABLE_4500_1 = r"^{}\s+{}\s+".format(VLAN_REGEX, MAC_REGEX) RE_MACTABLE_4500_2 = r"^\s{32,34}\S+" RE_MACTABLE_4500_3 = r"^{}\s+{}\s+".format( INT_REGEX, MAC_REGEX ) RE_MACTABLE_2960_1 = r"^All\s+{}".format(MAC_REGEX) RE_MACTABLE_GEN_1 = r"^{}\s+{}\s+".format( VLAN_REGEX, MAC_REGEX ) def process_mac_fields(vlan, mac, mac_type, interface): if mac_type.lower() in ["self", "static", "system"]: static = True if vlan.lower() == "all": vlan = 0 if ( interface.lower() == "cpu" or re.search(r"router", interface.lower()) or re.search(r"switch", interface.lower()) ): interface = "" else: static = False return { "mac": napalm.base.helpers.mac(mac), "interface": self._canonical_int(interface), "vlan": int(vlan), "static": static, "active": True, "moves": -1, "last_move": -1.0, } mac_address_table = [] command = IOS_COMMANDS["show_mac_address"] output = self._send_command(command) output = re.split(r"^----.*", output, flags=re.M)[1:] output = "\n".join(output).strip() output = re.sub(r"^\*", "", output, flags=re.M) fill_down_vlan = fill_down_mac = fill_down_mac_type = "" for line in output.splitlines(): if re.search(RE_MACTABLE_6500_3, line) or re.search( RE_MACTABLE_4500_2, line ): interface = line.strip() if "," in interface: interfaces = interface.split(",") else: interfaces = [interface] for single_interface in interfaces: mac_address_table.append( process_mac_fields( fill_down_vlan, fill_down_mac, fill_down_mac_type, single_interface, ) ) continue line = line.strip() if line == "": continue if re.search(r"^---", line): line = re.sub(r"^---", "0", line, flags=re.M) if re.search(RE_MACTABLE_DEFAULT, line): if len(line.split()) == 4: mac, mac_type, vlan, interface = line.split() mac_address_table.append( process_mac_fields(vlan, mac, mac_type, interface) ) else: raise ValueError("Unexpected output from: {}".format(line.split())) elif ( re.search(RE_MACTABLE_6500_1, line) or re.search(RE_MACTABLE_6500_2, line) ) and len(line.split()) >= 6: if len(line.split()) == 7: _, vlan, mac, mac_type, _, _, interface = line.split() elif len(line.split()) == 6: vlan, mac, mac_type, _, _, interface = line.split() if "," in interface: interfaces = interface.split(",") fill_down_vlan = vlan fill_down_mac = mac fill_down_mac_type = mac_type for single_interface in interfaces: mac_address_table.append( process_mac_fields(vlan, mac, mac_type, single_interface) ) else: mac_address_table.append( process_mac_fields(vlan, mac, mac_type, interface) ) elif re.search(RE_MACTABLE_4500_1, line) and len(line.split()) == 5: vlan, mac, mac_type, _, interface = line.split() mac_address_table.append( process_mac_fields(vlan, mac, mac_type, interface) ) elif re.search(RE_MACTABLE_4500_3, line) and len(line.split()) == 5: interface, mac, mac_type, _, _ = line.split() interface = canonical_interface_name(interface) vlan = "-1" mac_address_table.append( process_mac_fields(vlan, mac, mac_type, interface) ) elif re.search(r"^Vlan\s+Mac Address\s+", line): continue elif ( re.search(RE_MACTABLE_2960_1, line) or re.search(RE_MACTABLE_GEN_1, line) ) and len(line.split()) == 4: vlan, mac, mac_type, interface = line.split() if "," in interface: interfaces = interface.split(",") fill_down_vlan = vlan fill_down_mac = mac fill_down_mac_type = mac_type for single_interface in interfaces: mac_address_table.append( process_mac_fields(vlan, mac, mac_type, single_interface) ) else: mac_address_table.append( process_mac_fields(vlan, mac, mac_type, interface) ) elif re.search(RE_MACTABLE_4500_1, line) and len(line.split()) == 3: vlan, mac, mac_type = line.split() mac_address_table.append( process_mac_fields(vlan, mac, mac_type, interface="") ) elif re.search(RE_MACTABLE_4500_3, line) and len(line.split()) == 4: vlan, mac, mac_type, interface = line.split() vlan = "-1" mac_address_table.append( process_mac_fields(vlan, mac, mac_type, interface) ) elif re.search(RE_MACTABLE_6500_4, line) and len(line.split()) == 7: line = re.sub(r"^R\s+", "", line) vlan, mac, mac_type, _, _, interface = line.split() mac_address_table.append( process_mac_fields(vlan, mac, mac_type, interface) ) continue elif re.search(RE_MACTABLE_6500_5, line): line = re.sub(r"^R\s+", "", line) vlan, mac, mac_type, _, _, interface = line.split() vlan = re.sub(r"N/A", "0", vlan) mac_address_table.append( process_mac_fields(vlan, mac, mac_type, interface) ) continue elif re.search(r"Total Mac Addresses", line): continue elif re.search(r"Multicast Entries", line): continue elif re.search(r"vlan.*mac.*address.*type.*", line): continue elif re.search( r"Displaying entries from active supervisor:\s+\w+\s+\[\d\]:", line ): continue else: raise ValueError("Unexpected output from: {}".format(repr(line))) return mac_address_table
Returns a lists of dictionaries. Each dictionary represents an entry in the MAC Address Table, having the following keys * mac (string) * interface (string) * vlan (int) * active (boolean) * static (boolean) * moves (int) * last_move (float) Format1: Destination Address Address Type VLAN Destination Port ------------------- ------------ ---- -------------------- 6400.f1cf.2cc6 Dynamic 1 Wlan-GigabitEthernet0 Cat 6500: Legend: * - primary entry age - seconds since last seen n/a - not available vlan mac address type learn age ports ------+----------------+--------+-----+----------+-------------------------- * 999 1111.2222.3333 dynamic Yes 0 Port-channel1 999 1111.2222.3333 dynamic Yes 0 Port-channel1 Cat 4948 Unicast Entries vlan mac address type protocols port -------+---------------+--------+---------------------+-------------------- 999 1111.2222.3333 dynamic ip Port-channel1 Cat 2960 Mac Address Table ------------------------------------------- Vlan Mac Address Type Ports ---- ----------- -------- ----- All 1111.2222.3333 STATIC CPU
16,567
def image_member(self): uri = "/%s/member" % self.uri_base resp, resp_body = self.api.method_get(uri) return resp_body
Returns a json-schema document that represents an image member entity. (a container of member entities).
16,568
def _get_route_args(self, namespace, route, tag=False): data_type, _ = unwrap_nullable(route.arg_data_type) if is_struct_type(data_type): arg_list = [] for field in data_type.all_fields: arg_list.append((fmt_var(field.name), fmt_type( field.data_type, tag=tag, has_default=field.has_default))) doc_list = [(fmt_var(f.name), self.process_doc(f.doc, self._docf)) for f in data_type.fields if f.doc] elif is_union_type(data_type): arg_list = [(fmt_var(data_type.name), fmt_type( route.arg_data_type, tag=tag))] doc_list = [(fmt_var(data_type.name), self.process_doc(data_type.doc, self._docf) if data_type.doc else .format( fmt_class(data_type .name)))] else: arg_list = [] doc_list = [] return arg_list, doc_list
Returns a list of name / value string pairs representing the arguments for a particular route.
16,569
def _parse_action(action): i_open = action.find() if i_open is -1: return {: action, : [], : False} i_close = action.rfind() if i_close is -1: raise Exception() action_name = action[:i_open] arglist = action[i_open+1:i_close].strip() if not arglist: return {: action_name, : [], : False} if in arglist: return {: action_name, : [], : True} return {: action_name, : _parse_arg_list(arglist), : False}
Parses a single action item, for instance one of the following: m; m(); m(True); m(*) The brackets must match.
16,570
def findattr(self, name, resolved=True): name = % name parent = self.top().resolved if parent is None: result, ancestry = self.query(name, node) else: result, ancestry = self.getchild(name, parent) if result is None: return result if resolved: result = result.resolve() return result
Find an attribute type definition. @param name: An attribute name. @type name: basestring @param resolved: A flag indicating that the fully resolved type should be returned. @type resolved: boolean @return: The found schema I{type} @rtype: L{xsd.sxbase.SchemaObject}
16,571
def _preprocess_Y(self, Y, k): Y = Y.clone() if Y.dim() == 1 or Y.shape[1] == 1: Y = pred_to_prob(Y.long(), k=k) return Y
Convert Y to prob labels if necessary
16,572
def get_totals_by_payee(self, account, start_date=None, end_date=None): qs = Transaction.objects.filter(account=account, parent__isnull=True) qs = qs.values().annotate(models.Sum()) qs = qs.order_by() return qs
Returns transaction totals grouped by Payee.
16,573
def get_response_object(self, service_id, version_number, name): content = self._fetch("/service/%s/version/%d/response_object/%s" % (service_id, version_number, name)) return FastlyResponseObject(self, content)
Gets the specified Response Object.
16,574
def tv_to_rdf(infile_name, outfile_name): parser = Parser(Builder(), StandardLogger()) parser.build() with open(infile_name) as infile: data = infile.read() document, error = parser.parse(data) if not error: with open(outfile_name, mode=) as outfile: write_document(document, outfile) return True else: print() messages = [] document.validate(messages) print(.join(messages)) return False
Convert a SPDX file from tag/value format to RDF format. Return True on sucess, False otherwise.
16,575
def from_mongo(cls, doc): if doc is None: return None if isinstance(doc, Document): return doc if cls.__type_store__ and cls.__type_store__ in doc: cls = load(doc[cls.__type_store__], ) instance = cls(_prepare_defaults=False) instance.__data__ = doc instance._prepare_defaults() return instance
Convert data coming in from the MongoDB wire driver into a Document instance.
16,576
def reload_configuration(self, event): if event.target == self.uniquename: self.log() self._read_config()
Event triggered configuration reload
16,577
def setValue(self, newText): newText = str(newText) if self.text == newText: return self.text = newText textLines = self.text.splitlines() nLines = len(textLines) surfacesList = [] actualWidth = 0 for line in textLines: lineSurface = self.font.render(line, True, self.textColor) surfacesList.append(lineSurface) thisRect = lineSurface.get_rect() if thisRect.width > actualWidth: actualWidth = thisRect.width heightOfOneLine = self.fontHeight actualHeight = nLines * heightOfOneLine self.rect = pygame.Rect(self.loc[0], self.loc[1], actualWidth, actualHeight) self.textImage = pygame.Surface((actualWidth, actualHeight), flags=SRCALPHA) if self.backgroundColor is not None: self.textImage.fill(self.backgroundColor) thisLineTop = 0 for lineSurface in surfacesList: if self.justified == : self.textImage.blit(lineSurface, (0, thisLineTop)) else: thisSurfaceWidth = lineSurface.get_rect()[2] if self.justified == : theLeft = (actualWidth - thisSurfaceWidth) / 2 elif self.justified == : theLeft = actualWidth - thisSurfaceWidth else: raise Exception( + self.justified + ) self.textImage.blit(lineSurface, (theLeft, thisLineTop)) thisLineTop = thisLineTop + heightOfOneLine if self.useSpecifiedArea: textRect = self.textImage.get_rect() if self.userWidth is None: theWidth = textRect.width else: theWidth = self.userWidth if self.userHeight is None: theHeight = textRect.height else: theHeight = self.userHeight userSizedImage = pygame.Surface((theWidth, theHeight), flags=SRCALPHA) self.rect = pygame.Rect(self.loc[0], self.loc[1], theWidth, theHeight) if self.backgroundColor is not None: userSizedImage.fill(self.backgroundColor) if self.justified == : theLeft = 0 elif self.justified == : theLeft = (theWidth - textRect.width) / 2 else: theLeft = theWidth - textRect.width userSizedImage.blit(self.textImage, (theLeft, 0)) self.textImage = userSizedImage self.textImage = pygame.Surface.convert_alpha(self.textImage)
Sets a text value (string) into the text field.
16,578
def select(self, key=None, val=None, touch=None, log=, out=int): assert out in [int,bool] assert log in [,,] C = [key is None,touch is None] assert np.sum(C)>=1 if np.sum(C)==2: ind = np.ones((self.nRays,),dtype=bool) else: if key is not None: assert type(key) is str and key in self._dchans.keys() ltypes = [str,int,float,np.int64,np.float64] C0 = type(val) in ltypes C1 = type(val) in [list,tuple,np.ndarray] assert C0 or C1 if C0: val = [val] else: assert all([type(vv) in ltypes for vv in val]) ind = np.vstack([self._dchans[key]==ii for ii in val]) if log==: ind = np.any(ind,axis=0) elif log==: ind = np.all(ind,axis=0) else: ind = ~np.any(ind,axis=0) elif touch is not None: lint = [int,np.int64] larr = [list,tuple,np.ndarray] touch = [touch] if not type(touch) is list else touch assert len(touch) in [1,2,3] def _check_touch(tt): cS = type(tt) is str and len(tt.split())==2 c0 = type(tt) in lint c1 = type(tt) in larr and len(tt)>=0 c1 = c1 and all([type(t) in lint for t in tt]) return cS, c0, c1 for ii in range(0,3-len(touch)): touch.append([]) ntouch = len(touch) assert ntouch == 3 for ii in range(0,ntouch): cS, c0, c1 = _check_touch(touch[ii]) if not (cS or c0 or c1): msg = "Provided touch is not valid:\n"%touch msg += " - Provided: %s\n"%str(touch) msg += "Please provide either:\n" msg += " - str in the form \n" msg += " - int (index)\n" msg += " - array of int indices" raise Exception(msg) if cS: lS = self.lStruct_computeInOut k0, k1 = touch[ii].split() ind = [jj for jj in range(0,len(lS)) if lS[jj].Id.Cls==k0 and lS[jj].Id.Name==k1] assert len(ind)==1 touch[ii] = [ind[0]] elif c0: touch[ii] = [touch[ii]] ind = np.zeros((ntouch,self.nRays),dtype=bool) for i in range(0,ntouch): if len(touch[i])==0: ind[i,:] = True else: for n in range(0,len(touch[i])): ind[i,:] = np.logical_or(ind[i,:], self._dgeom[][i,:]==touch[i][n]) ind = np.all(ind,axis=0) if log==: ind[:] = ~ind if out is int: ind = ind.nonzero()[0] return ind
Return the indices of the rays matching selection criteria The criterion can be of two types: - a key found in self.dchans, with a matching value - a touch tuple (indicating which element in self.config is touched by the desired rays) Parameters ---------- key : None / str A key to be found in self.dchans val : int / str / float / list of such The value to be matched If a list of values is provided, the behaviour depends on log log : str A flag indicating which behaviour to use when val is a list - any : Returns indices of rays matching any value in val - all : Returns indices of rays matching all values in val - not : Returns indices of rays matching None of the val touch: None / str / int / tuple Used if key is None Tuple that can be of len()=1, 2 or 3 Tuple indicating you want the rays that are touching some specific elements of self.config: - touch[0] : str / int or list of such str : a 'Cls_Name' string indicating the element int : the index of the element in self.lStruct_computeInOut - touch[1] : int / list of int Indices of the desired segments on the polygon (i.e.: of the cross-section polygon of the above element) - touch[2] : int / list of int Indices, if relevant, of the toroidal / linear unit Only relevant when the element has noccur>1 In this case only log='not' has an effect out : str Flag indicating whether to return: - bool : a (nRays,) boolean array of indices - int : a (N,) array of int indices (N=number of matching rays) Returns ------- ind : np.ndarray The array of matching rays
16,579
def read_detections(fname): f = open(fname, ) detections = [] for index, line in enumerate(f): if index == 0: continue if line.rstrip().split()[0] == : continue detection = line.rstrip().split() detection[1] = UTCDateTime(detection[1]) detection[2] = int(float(detection[2])) detection[3] = ast.literal_eval(detection[3]) detection[4] = float(detection[4]) detection[5] = float(detection[5]) if len(detection) < 9: detection.extend([, float()]) else: detection[7] = float(detection[7]) detections.append(Detection( template_name=detection[0], detect_time=detection[1], no_chans=detection[2], detect_val=detection[4], threshold=detection[5], threshold_type=detection[6], threshold_input=detection[7], typeofdet=detection[8], chans=detection[3])) f.close() return detections
Read detections from a file to a list of Detection objects. :type fname: str :param fname: File to read from, must be a file written to by \ Detection.write. :returns: list of :class:`eqcorrscan.core.match_filter.Detection` :rtype: list .. note:: :class:`eqcorrscan.core.match_filter.Detection`'s returned do not contain Detection.event
16,580
def set_role(username, role): * try: sendline() role_line = .format(username, role) ret = sendline(role_line) sendline() sendline() return .join([role_line, ret]) except TerminalException as e: log.error(e) return
Assign role to username .. code-block:: bash salt '*' onyx.cmd set_role username=daniel role=vdc-admin
16,581
def _sanity_check_block_pairwise_constraints(ir_blocks): for first_block, second_block in pairwise(ir_blocks): if isinstance(first_block, MarkLocation) and isinstance(second_block, Filter): raise AssertionError(u.format(ir_blocks))
Assert that adjacent blocks obey all invariants.
16,582
def _select_next_server(self): while True: if len(self._server_pool) == 0: self._current_server = None raise ErrNoServers now = time.monotonic() s = self._server_pool.pop(0) if self.options["max_reconnect_attempts"] > 0: if s.reconnects > self.options["max_reconnect_attempts"]: continue self._server_pool.append(s) if s.last_attempt is not None and now < s.last_attempt + self.options["reconnect_time_wait"]: yield from asyncio.sleep(self.options["reconnect_time_wait"], loop=self._loop) try: s.last_attempt = time.monotonic() r, w = yield from asyncio.open_connection( s.uri.hostname, s.uri.port, loop=self._loop, limit=DEFAULT_BUFFER_SIZE) self._current_server = s self._bare_io_reader = self._io_reader = r self._bare_io_writer = self._io_writer = w break except Exception as e: s.last_attempt = time.monotonic() s.reconnects += 1 self._err = e if self._error_cb is not None: yield from self._error_cb(e) continue
Looks up in the server pool for an available server and attempts to connect.
16,583
def getLVstats(self, *args): if not len(args) in (1, 2): raise TypeError("The getLVstats must be called with either " "one or two arguments.") if self._vgTree is None: self._initDMinfo() if len(args) == 1: dmdev = self._mapLVname2dm.get(args[0]) else: dmdev = self._mapLVtuple2dm.get(args) if dmdev is not None: return self.getDevStats(dmdev) else: return None
Returns I/O stats for LV. @param args: Two calling conventions are implemented: - Passing two parameters vg and lv. - Passing only one parameter in 'vg-lv' format. @return: Dict of stats.
16,584
def add_to_env(self, content): if not self.rewrite_config: raise DirectoryException("Error! Directory was not intialized w/ rewrite_config.") if not self.env_file: self.env_path, self.env_file = self.__get_env_handle(self.root_dir) self.env_file.write(content + )
add content to the env script.
16,585
def CreateTask(self, session_identifier): task = tasks.Task(session_identifier) logger.debug(.format(task.identifier)) with self._lock: self._tasks_queued[task.identifier] = task self._total_number_of_tasks += 1 self.SampleTaskStatus(task, ) return task
Creates a task. Args: session_identifier (str): the identifier of the session the task is part of. Returns: Task: task attribute container.
16,586
def coge(args): p = OptionParser(coge.__doc__) opts, args = p.parse_args(args) if len(args) != 1: sys.exit(not p.print_help()) cogefile, = args fp = must_open(cogefile) cogefile = cogefile.replace(".gz", "") ksfile = cogefile + ".ks" anchorsfile = cogefile + ".anchors" fw_ks = must_open(ksfile, "w") fw_ac = must_open(anchorsfile, "w") tag = " print(tag, file=fw_ks) for header, lines in read_block(fp, tag): print(tag, file=fw_ac) lines = list(lines) for line in lines: if line[0] == : continue ks, ka, achr, a, astart, astop, bchr, \ b, bstart, bstop, ev, ss = line.split() a = a.split("||")[3] b = b.split("||")[3] print("\t".join((a, b, ev)), file=fw_ac) print(",".join((";".join((a, b)), ks, ka, ks, ka)), file=fw_ks) fw_ks.close() fw_ac.close()
%prog coge cogefile Convert CoGe file to anchors file.
16,587
def on_step_end(self, **kwargs): "Put the LR back to its value if necessary." if not self.learn.gan_trainer.gen_mode: self.learn.opt.lr /= self.mult_lr
Put the LR back to its value if necessary.
16,588
def compute_xy( self, projection: Union[pyproj.Proj, crs.Projection, None] = None ): if isinstance(projection, crs.Projection): projection = pyproj.Proj(projection.proj4_init) if projection is None: projection = pyproj.Proj( proj="lcc", lat_1=self.data.latitude.min(), lat_2=self.data.latitude.max(), lat_0=self.data.latitude.mean(), lon_0=self.data.longitude.mean(), ) x, y = pyproj.transform( pyproj.Proj(init="EPSG:4326"), projection, self.data.longitude.values, self.data.latitude.values, ) return self.__class__(self.data.assign(x=x, y=y))
Computes x and y columns from latitudes and longitudes. The source projection is WGS84 (EPSG 4326). The default destination projection is a Lambert Conformal Conical projection centered on the data inside the dataframe. For consistency reasons with pandas DataFrame, a new Traffic structure is returned.
16,589
def generate_documentation(schema): documentation_title = "Configuration documentation" documentation = documentation_title + "\n" documentation += "=" * len(documentation_title) + for section_name in schema: section_created = False for option_name in schema[section_name]: option = schema[section_name][option_name] if not section_created: documentation += documentation += section_name + documentation += * len(section_name) + section_created = True documentation += documentation += option_name + documentation += * len(option_name) + if option.get(): documentation += "** This option is required! **\n" if option.get(): documentation += % option.get() if option.get(): documentation += option.get() + if option.get(): documentation += % option.get() if option.get(): documentation += "** This option is deprecated! **\n" return documentation
Generates reStructuredText documentation from a Confirm file. :param schema: Dictionary representing the Confirm schema. :returns: String representing the reStructuredText documentation.
16,590
def add_ospf_area(self, ospf_area, ospf_interface_setting=None, network=None, communication_mode=, unicast_ref=None): communication_mode = communication_mode.upper() destinations=[] if not ospf_interface_setting else [ospf_interface_setting] if communication_mode == and unicast_ref: destinations.append(unicast_ref) routing_node_gateway = RoutingNodeGateway( ospf_area, communication_mode=communication_mode, destinations=destinations) return self._add_gateway_node(, routing_node_gateway, network)
Add OSPF Area to this routing node. Communication mode specifies how the interface will interact with the adjacent OSPF environment. Please see SMC API documentation for more in depth information on each option. If the interface has multiple networks nested below, all networks will receive the OSPF area by default unless the ``network`` parameter is specified. OSPF cannot be applied to IPv6 networks. Example of adding an area to interface routing node:: area = OSPFArea('area0') #obtain area resource #Set on routing interface 0 interface = engine.routing.get(0) interface.add_ospf_area(area) .. note:: If UNICAST is specified, you must also provide a unicast_ref of element type Host to identify the remote host. If no unicast_ref is provided, this is skipped :param OSPFArea ospf_area: OSPF area instance or href :param OSPFInterfaceSetting ospf_interface_setting: used to override the OSPF settings for this interface (optional) :param str network: if network specified, only add OSPF to this network on interface :param str communication_mode: NOT_FORCED|POINT_TO_POINT|PASSIVE|UNICAST :param Element unicast_ref: Element used as unicast gw (required for UNICAST) :raises ModificationAborted: Change must be made at the interface level :raises UpdateElementFailed: failure updating routing :raises ElementNotFound: ospf area not found :return: Status of whether the route table was updated :rtype: bool
16,591
def create_mosaic(tiles, nodata=0): if isinstance(tiles, GeneratorType): tiles = list(tiles) elif not isinstance(tiles, list): raise TypeError("tiles must be either a list or generator") if not all([isinstance(pair, tuple) for pair in tiles]): raise TypeError("tiles items must be tuples") if not all([ all([isinstance(tile, BufferedTile), isinstance(data, np.ndarray)]) for tile, data in tiles ]): raise TypeError("tuples must be pairs of BufferedTile and array") if len(tiles) == 0: raise ValueError("tiles list is empty") logger.debug("create mosaic from %s tile(s)", len(tiles)) if len(tiles) == 1: tile, data = tiles[0] return ReferencedRaster( data=data, affine=tile.affine, bounds=tile.bounds, crs=tile.crs ) pyramid, resolution, dtype = _get_tiles_properties(tiles) shift = _shift_required(tiles) m_left, m_bottom, m_right, m_top = None, None, None, None for tile, data in tiles: num_bands = data.shape[0] if data.ndim > 2 else 1 left, bottom, right, top = tile.bounds if shift: left += pyramid.x_size / 2 right += pyramid.x_size / 2 if right > pyramid.right: right -= pyramid.x_size left -= pyramid.x_size m_left = min([left, m_left]) if m_left is not None else left m_bottom = min([bottom, m_bottom]) if m_bottom is not None else bottom m_right = max([right, m_right]) if m_right is not None else right m_top = max([top, m_top]) if m_top is not None else top height = int(round((m_top - m_bottom) / resolution)) width = int(round((m_right - m_left) / resolution)) mosaic = ma.MaskedArray( data=np.full((num_bands, height, width), dtype=dtype, fill_value=nodata), mask=np.ones((num_bands, height, width)) ) affine = Affine(resolution, 0, m_left, 0, -resolution, m_top) for tile, data in tiles: data = prepare_array(data, nodata=nodata, dtype=dtype) t_left, t_bottom, t_right, t_top = tile.bounds if shift: t_left += pyramid.x_size / 2 t_right += pyramid.x_size / 2 if t_right > pyramid.right: t_right -= pyramid.x_size t_left -= pyramid.x_size minrow, maxrow, mincol, maxcol = bounds_to_ranges( out_bounds=(t_left, t_bottom, t_right, t_top), in_affine=affine, in_shape=(height, width) ) mosaic[:, minrow:maxrow, mincol:maxcol] = data mosaic.mask[:, minrow:maxrow, mincol:maxcol] = data.mask if shift: affine = Affine(resolution, 0, m_left - pyramid.x_size / 2, 0, -resolution, m_top) return ReferencedRaster( data=mosaic, affine=affine, bounds=Bounds(m_left, m_bottom, m_right, m_top), crs=tile.crs )
Create a mosaic from tiles. Tiles must be connected (also possible over Antimeridian), otherwise strange things can happen! Parameters ---------- tiles : iterable an iterable containing tuples of a BufferedTile and an array nodata : integer or float raster nodata value to initialize the mosaic with (default: 0) Returns ------- mosaic : ReferencedRaster
16,592
def get_loc_level(self, key, level=0, drop_level=True): def maybe_droplevels(indexer, levels, drop_level): if not drop_level: return self[indexer] orig_index = new_index = self[indexer] levels = [self._get_level_number(i) for i in levels] for i in sorted(levels, reverse=True): try: new_index = new_index.droplevel(i) except ValueError: return orig_index return new_index if isinstance(level, (tuple, list)): if len(key) != len(level): raise AssertionError( ) result = None for lev, k in zip(level, key): loc, new_index = self.get_loc_level(k, level=lev) if isinstance(loc, slice): mask = np.zeros(len(self), dtype=bool) mask[loc] = True loc = mask result = loc if result is None else result & loc return result, maybe_droplevels(result, level, drop_level) level = self._get_level_number(level) if isinstance(key, list): key = tuple(key) if isinstance(key, tuple) and level == 0: try: if key in self.levels[0]: indexer = self._get_level_indexer(key, level=level) new_index = maybe_droplevels(indexer, [0], drop_level) return indexer, new_index except TypeError: pass if not any(isinstance(k, slice) for k in key): def partial_selection(key, indexer=None): if indexer is None: indexer = self.get_loc(key) ilevels = [i for i in range(len(key)) if key[i] != slice(None, None)] return indexer, maybe_droplevels(indexer, ilevels, drop_level) if len(key) == self.nlevels and self.is_unique: return (self._engine.get_loc(key), None) else: return partial_selection(key) else: indexer = None for i, k in enumerate(key): if not isinstance(k, slice): k = self._get_level_indexer(k, level=i) if isinstance(k, slice): if k.start == 0 and k.stop == len(self): k = slice(None, None) else: k_index = k if isinstance(k, slice): if k == slice(None, None): continue else: raise TypeError(key) if indexer is None: indexer = k_index else: indexer &= k_index if indexer is None: indexer = slice(None, None) ilevels = [i for i in range(len(key)) if key[i] != slice(None, None)] return indexer, maybe_droplevels(indexer, ilevels, drop_level) else: indexer = self._get_level_indexer(key, level=level) return indexer, maybe_droplevels(indexer, [level], drop_level)
Get both the location for the requested label(s) and the resulting sliced index. Parameters ---------- key : label or sequence of labels level : int/level name or list thereof, optional drop_level : bool, default True if ``False``, the resulting index will not drop any level. Returns ------- loc : A 2-tuple where the elements are: Element 0: int, slice object or boolean array Element 1: The resulting sliced multiindex/index. If the key contains all levels, this will be ``None``. Examples -------- >>> mi = pd.MultiIndex.from_arrays([list('abb'), list('def')], ... names=['A', 'B']) >>> mi.get_loc_level('b') (slice(1, 3, None), Index(['e', 'f'], dtype='object', name='B')) >>> mi.get_loc_level('e', level='B') (array([False, True, False], dtype=bool), Index(['b'], dtype='object', name='A')) >>> mi.get_loc_level(['b', 'e']) (1, None) See Also --------- MultiIndex.get_loc : Get location for a label or a tuple of labels. MultiIndex.get_locs : Get location for a label/slice/list/mask or a sequence of such.
16,593
def to_dict(self, omit=()): result = dict(self) for key in omit: if key in result: del result[key] return result
Return a (shallow) copy of self cast to a dictionary, optionally omitting some key/value pairs.
16,594
def pickle_load(cls, filepath): if os.path.isdir(filepath): for dirpath, dirnames, filenames in os.walk(filepath): fnames = [f for f in filenames if f == cls.PICKLE_FNAME] if fnames: if len(fnames) == 1: filepath = os.path.join(dirpath, fnames[0]) break else: err_msg = "Found multiple databases:\n %s" % str(fnames) raise RuntimeError(err_msg) else: err_msg = "Cannot find %s inside directory %s" % (cls.PICKLE_FNAME, filepath) raise ValueError(err_msg) with open(filepath, "rb") as fh: new = pickle.load(fh) from .flows import Flow flow_workdirs, new.flows = new.flows, [] for flow in map(Flow.pickle_load, flow_workdirs): new.add_flow(flow) return new
Loads the object from a pickle file. Args: filepath: Filename or directory name. It filepath is a directory, we scan the directory tree starting from filepath and we read the first pickle database. Raise RuntimeError if multiple databases are found.
16,595
def get_stream(self, session_id, stream_id): endpoint = self.endpoints.get_stream_url(session_id, stream_id) response = requests.get( endpoint, headers=self.json_headers(), proxies=self.proxies, timeout=self.timeout ) if response.status_code == 200: return Stream(response.json()) elif response.status_code == 400: raise GetStreamError() elif response.status_code == 403: raise AuthError() elif response.status_code == 408: raise GetStreamError() else: raise RequestError(, response.status_code)
Returns an Stream object that contains information of an OpenTok stream: -id: The stream ID -videoType: "camera" or "screen" -name: The stream name (if one was set when the client published the stream) -layoutClassList: It's an array of the layout classes for the stream
16,596
def get_service_inspect(self, stack, service): url = .format(self.host, stack, service) return self.__get(url)
查看服务 查看指定名称服务的属性。 Args: - stack: 服务所属的服务组名称 - service: 服务名 Returns: 返回一个tuple对象,其格式为(<result>, <ResponseInfo>) - result 成功返回服务信息,失败返回{"error": "<errMsg string>"} - ResponseInfo 请求的Response信息
16,597
def should_we_load(kls): if kls.__name__.endswith("AbstractCheck"): return False if not kls.__name__.endswith("Check"): return False mro = kls.__mro__ for m in mro: if m.__name__ == "AbstractCheck": return True return False
should we load this class as a check?
16,598
def _set_show_zoning_enabled_configuration(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=show_zoning_enabled_configuration.show_zoning_enabled_configuration, is_leaf=True, yang_name="show-zoning-enabled-configuration", rest_name="show-zoning-enabled-configuration", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u: {u: u, u: u, u: u}}, namespace=, defining_module=, yang_type=, is_config=True) except (TypeError, ValueError): raise ValueError({ : , : "rpc", : , }) self.__show_zoning_enabled_configuration = t if hasattr(self, ): self._set()
Setter method for show_zoning_enabled_configuration, mapped from YANG variable /brocade_zone_rpc/show_zoning_enabled_configuration (rpc) If this variable is read-only (config: false) in the source YANG file, then _set_show_zoning_enabled_configuration is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_show_zoning_enabled_configuration() directly. YANG Description: This will display the Zoning Enabled-Configuration database.
16,599
def score(self, X, y=None, sample_weight=None): Xt, yt, swt = self._transform(X, y, sample_weight) self.N_test = len(yt) score_params = {} if swt is not None: score_params[] = swt if self.scorer is None: return self._final_estimator.score(Xt, yt, **score_params) return self.scorer(self._final_estimator, Xt, yt, **score_params)
Apply transforms, and score with the final estimator Parameters ---------- X : iterable Data to predict on. Must fulfill input requirements of first step of the pipeline. y : iterable, default=None Targets used for scoring. Must fulfill label requirements for all steps of the pipeline. sample_weight : array-like, default=None If not None, this argument is passed as ``sample_weight`` keyword argument to the ``score`` method of the final estimator. Returns ------- score : float