Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
11,000
def _check_and_send(self): if self.transport._stop_event.ready() or not self.transport.greenlet: self.log.error("CanRetrying messagePartner not reachable. Skipping.fixedStopping message send retryRaiden queue is goneStopping message send retryMessage was removed from queueSend\n'.join(message_texts))
Check and send all pending/queued messages that are not waiting on retry timeout After composing the to-be-sent message, also message queue from messages that are not present in the respective SendMessageEvent queue anymore
11,001
def labels(self, *labelvalues, **labelkwargs): if not self._labelnames: raise ValueError( % self) if self._labelvalues: raise ValueError( % ( self, dict(zip(self._labelnames, self._labelvalues)) )) if labelvalues and labelkwargs: raise ValueError("CanIncorrect label namesIncorrect label count') labelvalues = tuple(unicode(l) for l in labelvalues) with self._lock: if labelvalues not in self._metrics: self._metrics[labelvalues] = self.__class__( self._name, documentation=self._documentation, labelnames=self._labelnames, unit=self._unit, labelvalues=labelvalues, **self._kwargs ) return self._metrics[labelvalues]
Return the child for the given labelset. All metrics can have labels, allowing grouping of related time series. Taking a counter as an example: from prometheus_client import Counter c = Counter('my_requests_total', 'HTTP Failures', ['method', 'endpoint']) c.labels('get', '/').inc() c.labels('post', '/submit').inc() Labels can also be provided as keyword arguments: from prometheus_client import Counter c = Counter('my_requests_total', 'HTTP Failures', ['method', 'endpoint']) c.labels(method='get', endpoint='/').inc() c.labels(method='post', endpoint='/submit').inc() See the best practices on [naming](http://prometheus.io/docs/practices/naming/) and [labels](http://prometheus.io/docs/practices/instrumentation/#use-labels).
11,002
def get_self_uri(self, content_type): "return the first self uri with the content_type" try: return [self_uri for self_uri in self.self_uri_list if self_uri.content_type == content_type][0] except IndexError: return None
return the first self uri with the content_type
11,003
def infer_delimiter(filename, comment_char=" lines = [] with open(filename, "r") as f: for line in f: if line.startswith(comment_char): continue if len(lines) < n_lines: lines.append(line) else: break if len(lines) < n_lines: raise ValueError( "Not enough lines in %s to infer delimiter" % filename) candidate_delimiters = ["\t", ",", "\s+"] for candidate_delimiter in candidate_delimiters: counts = [len(re.split(candidate_delimiter, line)) for line in lines] first_line_count = counts[0] if all(c == first_line_count for c in counts) and first_line_count > 1: return candidate_delimiter raise ValueError("Could not determine delimiter for %s" % filename)
Given a file which contains data separated by one of the following: - commas - tabs - spaces Return the most likely separator by sniffing the first few lines of the file's contents.
11,004
def child_task(self): from MAVProxy.modules.lib import mp_util import wx_processguard from wx_loader import wx from wxsettings_ui import SettingsDlg mp_util.child_close_fds() app = wx.App(False) dlg = SettingsDlg(self.settings) dlg.parent_pipe = self.parent_pipe dlg.ShowModal() dlg.Destroy()
child process - this holds all the GUI elements
11,005
def indirect(self, interface): if interface == IWebViewer: return _AnonymousWebViewer(self.store) return super(AnonymousSite, self).indirect(interface)
Indirect the implementation of L{IWebViewer} to L{_AnonymousWebViewer}.
11,006
def p_func_args(self, p): p[0] = p[1] + (p[3],) p.set_lineno(0, p.lineno(1))
func_args : func_args COMMA expression
11,007
def admin_view_reverse_fk_links(modeladmin: ModelAdmin, obj, reverse_fk_set_field: str, missing: str = "(None)", use_str: bool = True, separator: str = "<br>", view_type: str = "change", current_app: str = None) -> str: if not hasattr(obj, reverse_fk_set_field): return missing linked_objs = getattr(obj, reverse_fk_set_field).all() if not linked_objs: return missing first = linked_objs[0] app_name = first._meta.app_label.lower() model_name = first._meta.object_name.lower() viewname = "admin:{}_{}_{}".format(app_name, model_name, view_type) if current_app is None: current_app = modeladmin.admin_site.name links = [] for linked_obj in linked_objs: url = reverse(viewname, args=[linked_obj.pk], current_app=current_app) if use_str: label = escape(str(linked_obj)) else: label = "{} {}".format(escape(linked_obj._meta.object_name), linked_obj.pk) links.append(.format(url, label)) return separator.join(links)
Get multiple Django admin site URL for multiple objects linked to our object of interest (where the other objects have foreign keys to our object).
11,008
def bugreport(dest_file="default.log"): adb_full_cmd = [v.ADB_COMMAND_PREFIX, v.ADB_COMMAND_BUGREPORT] try: dest_file_handler = open(dest_file, "w") except IOError: print("IOError: Failed to create a log file") if _isDeviceAvailable(): result = _exec_command_to_file(adb_full_cmd, dest_file_handler) return (result, "Success: Bug report saved to: " + dest_file) else: return (0, "Device Not Found")
Prints dumpsys, dumpstate, and logcat data to the screen, for the purposes of bug reporting :return: result of _exec_command() execution
11,009
def pass_session_attributes(self): for key, value in six.iteritems(self.request.session.attributes): self.response.sessionAttributes[key] = value
Copies request attributes to response
11,010
def mimeData(self, items): func = self.dataCollector() if func: return func(self, items) record_items = [] for item in self.selectedItems(): if isinstance(item, XOrbRecordItem): record_items.append(item) data = QMimeData() self.dataStoreRecords(data, record_items) return data
Returns the mime data for dragging for this instance. :param items | [<QTreeWidgetItem>, ..]
11,011
def dlogprior(self, param): assert param in self.freeparams, "Invalid param: {0}".format(param) return self._dlogprior[param]
Value of derivative of prior depends on value of `prior`.
11,012
def get_authinfo(request): if (("files_iv" not in request.session) or ("files_text" not in request.session) or ("files_key" not in request.COOKIES)): return False iv = base64.b64decode(request.session["files_iv"]) text = base64.b64decode(request.session["files_text"]) key = base64.b64decode(request.COOKIES["files_key"]) obj = AES.new(key, AES.MODE_CFB, iv) password = obj.decrypt(text) username = request.session["filecenter_username"] if "filecenter_username" in request.session else request.user.username return {"username": username, "password": password}
Get authentication info from the encrypted message.
11,013
def upload_logs(self, release_singleton=True): if release_singleton: self.release_singleton() def _upload(): for log in self.get_logs(): new_name = self._uniquename(log) self._upload(log, new_name) self.delete_log(log) if self.pcfg[] == : raise NotImplementedError threading.Thread(target=threadme) gui_uploader(threadme) elif self.pcfg[] == : raise NotImplementedError elif self.pcfg[] == : _upload()
uploads a log to a server using the method and gui specifed in self.pcfg singleton mode can be disabled so a new version can be restarted whille uploading oges on typicallin in the case of uploadnig after a crash or sys exit set self.cfg log_upload_interface to gui/cli/or background
11,014
def txt(self, diff, f): env = Environment( loader=PackageLoader(, ), trim_blocks=True, lstrip_blocks=True ) template = env.get_template() def format_row(label, values): change = format_comma(values[]) percent_change = .format(values[]) if values[] is not None else point_change = .format(values[] * 100) if values[] is not None else if values[] > 0: change = % change if values[] is not None and values[] > 0: percent_change = % percent_change if values[] is not None and values[] > 0: point_change = % point_change return .format(change, percent_change, point_change, label) context = { : diff, : self.field_definitions, : GLOBAL_ARGUMENTS, : format_comma, : format_duration, : format_percent, : format_row } f.write(template.render(**context).encode())
Generate a text report for a diff.
11,015
def check_token(func): @wraps(func) def wrapper(*args, **kwargs): response = func(*args, **kwargs) if response.status_code == 401: raise InvalidToken() else: return response return wrapper
检查 access token 是否有效.
11,016
def checkInfo(email=None, username=None, api_key=None): if api_key==None: allKeys = config_api_keys.returnListOfAPIKeys() try: api_key = allKeys["pipl_com"] except: api_key = "samplekey" results = {} results["person"] = [] results["records"] = [] if username != None: request = SearchAPIRequest( username=username, api_key=api_key) person, records = launchRequest(request) results["person"].append(person) results["records"].append(records) if email != None: request = SearchAPIRequest( email=email, api_key=api_key) person, records = launchRequest(request) results["person"].append(person) results["records"].append(records) return results
Method that checks if the given hash is stored in the pipl.com website. :param email: queries to be launched. :param api_key: api_key to be used in pipl.com. If not provided, the API key will be searched in the config_api_keys.py file. :return: Python structure for the Json received. It has the following structure:
11,017
def values(self): return [ _ColumnPairwiseSignificance( self._slice, col_idx, self._axis, self._weighted, self._alpha, self._only_larger, self._hs_dims, ) for col_idx in range(self._slice.get_shape(hs_dims=self._hs_dims)[1]) ]
list of _ColumnPairwiseSignificance tests. Result has as many elements as there are coliumns in the slice. Each significance test contains `p_vals` and `t_stats` significance tests.
11,018
def token_view(token): if request.method == "POST" and in request.form: db.session.delete(token) db.session.commit() return redirect(url_for()) show_token = session.pop(, False) form = TokenForm(request.form, name=token.client.name, scopes=token.scopes) form.scopes.choices = current_oauth2server.scope_choices() if form.validate_on_submit(): token.client.name = form.data[] token.scopes = form.data[] db.session.commit() if len(current_oauth2server.scope_choices()) == 0: del(form.scopes) return render_template( "invenio_oauth2server/settings/token_view.html", token=token, form=form, show_token=show_token, )
Show token details.
11,019
def community_post_comments(self, post_id, **kwargs): "https://developer.zendesk.com/rest_api/docs/help_center/post_comments api_path = "/api/v2/community/posts/{post_id}/comments.json" api_path = api_path.format(post_id=post_id) return self.call(api_path, **kwargs)
https://developer.zendesk.com/rest_api/docs/help_center/post_comments#list-comments
11,020
def set_contrast(self, contrast): self._contrast = contrast self.x_spread = 2 * (1.0 - contrast) self.y_spread = 2.0 - 2 * (1.0 - contrast) self._build_cdict()
Adjusts the image contrast. Contrast refers to the rate of change of color with color level. At low contrast, color changes gradually over many intensity levels, while at high contrast it can change rapidly within a few levels Args: contrast: float A number between 0 and 1. Note that upon initialization the colormap has a default contrast value of 0.5. Returns: void
11,021
def dispense(self): self.sendcommand(Vendapin.DISPENSE) time.sleep(1) response = self.receivepacket() print( + str(response)) if not self.was_packet_accepted(response): raise Exception( + str(response)) return self.parsedata(response)[0]
dispense a card if ready, otherwise throw an Exception
11,022
def normalizer(text, exclusion=OPERATIONS_EXCLUSION, lower=True, separate_char=, **kwargs): clean_str = re.sub(r.format( "".join(exclusion)), separate_char, text.strip()) or clean_lowerbar = clean_str_without_accents = strip_accents(clean_str) if not in exclusion: clean_lowerbar = re.sub(r, separate_char, clean_str_without_accents.strip()) limit_guion = re.sub(r, separate_char, clean_lowerbar.strip()) if limit_guion and separate_char and separate_char in limit_guion[0]: limit_guion = limit_guion[1:] if limit_guion and separate_char and separate_char in limit_guion[-1]: limit_guion = limit_guion[:-1] if lower: limit_guion = limit_guion.lower() return limit_guion
Clean text string of simbols only alphanumeric chars.
11,023
def unpack_rawr_zip_payload(table_sources, payload): data = zfh.open(table_name, ).read() unpacker = Unpacker(file_like=BytesIO(data)) source = table_sources[table_name] return Table(source, unpacker) return get_table
unpack a zipfile and turn it into a callable "tables" object.
11,024
def init(envVarName, enableColorOutput=False): global _initialized if _initialized: return global _ENV_VAR_NAME _ENV_VAR_NAME = envVarName if enableColorOutput: _preformatLevels(envVarName + "_NO_COLOR") else: _preformatLevels(None) if envVarName in os.environ: setDebug(os.environ[envVarName]) addLimitedLogHandler(stderrHandler) _initialized = True
Initialize the logging system and parse the environment variable of the given name. Needs to be called before starting the actual application.
11,025
def create_app(app_id, app_name, source_id, region, app_data): try: create_at = datetime.datetime.now().strftime() conn = get_conn() c = conn.cursor() c.execute("SELECT count(*) FROM app WHERE name= ".format(app_name)) old_app = c.fetchone() if old_app[0] > 0: print % app_name c.execute("DELETE FROM container WHERE app_id=".format(app_id)) c.execute("DELETE FROM app WHERE name=".format(app_name)) conn.commit() c.execute("INSERT INTO app (id,name,source_id,region,state,create_at,change_at,app_data) VALUES (,,,,,,,)" .format(app_id, app_name, source_id, region, constant.STATE_APP_RUNNING, create_at, create_at, app_data)) conn.commit() conn.close() print % app_id except Exception, e: raise RuntimeError( % (app_id,e))
insert app record when stack run as a app
11,026
def add_semantic_data(self, path_as_list, value, key): assert isinstance(key, string_types) target_dict = self.get_semantic_data(path_as_list) target_dict[key] = value return path_as_list + [key]
Adds a semantic data entry. :param list path_as_list: The path in the vividict to enter the value :param value: The value of the new entry. :param key: The key of the new entry. :return:
11,027
def evolve_genomes(rng, pop, params, recorder=None): import warnings with warnings.catch_warnings(): warnings.simplefilter("ignore") params.validate() from ._fwdpy11 import MutationRegions from ._fwdpy11 import evolve_without_tree_sequences from ._fwdpy11 import dispatch_create_GeneticMap pneutral = params.mutrate_n/(params.mutrate_n+params.mutrate_s) mm = MutationRegions.create(pneutral, params.nregions, params.sregions) rm = dispatch_create_GeneticMap(params.recrate, params.recregions) if recorder is None: from ._fwdpy11 import RecordNothing recorder = RecordNothing() evolve_without_tree_sequences(rng, pop, params.demography, params.mutrate_n, params.mutrate_s, params.recrate, mm, rm, params.gvalue, recorder, params.pself, params.prune_selected)
Evolve a population without tree sequence recordings. In other words, complete genomes must be simulated and tracked. :param rng: random number generator :type rng: :class:`fwdpy11.GSLrng` :param pop: A population :type pop: :class:`fwdpy11.DiploidPopulation` :param params: simulation parameters :type params: :class:`fwdpy11.ModelParams` :param recorder: (None) A temporal sampler/data recorder. :type recorder: callable .. note:: If recorder is None, then :class:`fwdpy11.RecordNothing` will be used.
11,028
def Parse(self): for data in self.Query(self.EVENTS_QUERY): (timestamp, agent_bundle_identifier, agent_name, url, sender, sender_address, type_number, title, referrer, referrer_alias) = data yield [ timestamp, "OSX_QUARANTINE", url, referrer, title, agent_name, agent_bundle_identifier, sender, sender_address, type_number, referrer_alias ]
Iterator returning dict for each entry in history.
11,029
def tops(opts): if not in opts: return {} whitelist = list(opts[].keys()) ret = LazyLoader( _module_dirs(opts, , ), opts, tag=, whitelist=whitelist, ) return FilterDictWrapper(ret, )
Returns the tops modules
11,030
def _init_os_api(self): if not self.nova_client: log.debug("Initializing OpenStack API clients:" " OS_AUTH_URL=" " OS_USERNAME=" " OS_USER_DOMAIN_NAME=" " OS_PROJECT_NAME=" " OS_PROJECT_DOMAIN_NAME=" " OS_REGION_NAME=" " OS_CACERT=" "", self._os_auth_url, self._os_username, self._os_user_domain_name, self._os_tenant_name, self._os_project_domain_name, self._os_region_name, self._os_cacert) sess = self.__init_keystone_session() log.debug("Creating OpenStack Compute API (Nova) v%s client ...", self._compute_api_version) self.nova_client = nova_client.Client( self._compute_api_version, session=sess, region_name=self._os_region_name, cacert=self._os_cacert) log.debug("Creating OpenStack Network API (Neutron) client ...") self.neutron_client = neutron_client.Client( log.debug("Creating OpenStack Image API (Glance) v%s client ...", self._image_api_version) self.glance_client = glance_client.Client( self._image_api_version, session=sess, region_name=self._os_region_name) log.debug("Creating OpenStack Volume API (Cinder) v%s client ...", self._volume_api_version) self.cinder_client = cinder_client.Client( self._volume_api_version, session=sess, region_name=self._os_region_name, cacert=self._os_cacert)
Initialise client objects for talking to OpenStack API. This is in a separate function so to be called by ``__init__`` and ``__setstate__``.
11,031
def ae_partial_waves(self): ae_partial_waves = OrderedDict() for mesh, values, attrib in self._parse_all_radfuncs("ae_partial_wave"): state = attrib["state"] ae_partial_waves[state] = RadialFunction(mesh, values) return ae_partial_waves
Dictionary with the AE partial waves indexed by state.
11,032
def rotate(obj, axis, angle, origin=None): R = _rodrigues_to_dcm(axis, angle) try: return obj.transform(PivotRotation(R, origin)) except AttributeError: raise NotImplementedError
Rotation around unit vector following the right hand rule Parameters: obj : obj to be rotated (e.g. neurite, neuron). Must implement a transform method. axis : unit vector for the axis of rotation angle : rotation angle in rads Returns: A copy of the object with the applied translation.
11,033
def next(self): curr_page = self.currentPage() if not curr_page: return elif not curr_page.validatePage(): return pageId = curr_page.nextId() try: next_page = self._pages[pageId] except KeyError: return self._currentId = pageId self._navigation.append(pageId) y = curr_page.y() next_page.move(self.width(), y) anim_in = QtCore.QPropertyAnimation(self) anim_in.setTargetObject(curr_page) anim_in.setPropertyName() anim_in.setStartValue(curr_page.pos()) anim_in.setEndValue(QtCore.QPoint(-curr_page.width(), y)) anim_in.setDuration(self.animationSpeed()) anim_in.setEasingCurve(QtCore.QEasingCurve.Linear) anim_out = QtCore.QPropertyAnimation(self) anim_out.setTargetObject(next_page) anim_out.setPropertyName() anim_out.setStartValue(next_page.pos()) anim_out.setEndValue(curr_page.pos()) anim_out.setDuration(self.animationSpeed()) anim_out.setEasingCurve(QtCore.QEasingCurve.Linear) anim_grp = QtCore.QParallelAnimationGroup(self) anim_grp.addAnimation(anim_in) anim_grp.addAnimation(anim_out) anim_grp.finished.connect(curr_page.hide) anim_grp.finished.connect(anim_grp.deleteLater) next_page.show() self._buttons[self.WizardButton.BackButton].setVisible(True) self._buttons[self.WizardButton.NextButton].setVisible(self.canGoForward()) self._buttons[self.WizardButton.RetryButton].setVisible(self.canRetry()) self._buttons[self.WizardButton.CommitButton].setVisible(next_page.isCommitPage()) self._buttons[self.WizardButton.FinishButton].setVisible(next_page.isFinalPage()) self.adjustSize() self.currentIdChanged.emit(pageId) next_page.initializePage() anim_grp.start()
Goes to the previous page for this wizard.
11,034
def proxy_for(widget): proxy_type = widget_proxies.get(widget.__class__) if proxy_type is None: raise KeyError( % widget) return proxy_type(widget)
Create a proxy for a Widget :param widget: A gtk.Widget to proxy This will raise a KeyError if there is no proxy type registered for the widget type.
11,035
def set_aesthetic(palette="yellowbrick", font="sans-serif", font_scale=1, color_codes=True, rc=None): _set_context(font_scale) set_style(rc={"font.family": font}) set_palette(palette, color_codes=color_codes) if rc is not None: mpl.rcParams.update(rc)
Set aesthetic parameters in one step. Each set of parameters can be set directly or temporarily, see the referenced functions below for more information. Parameters ---------- palette : string or sequence Color palette, see :func:`color_palette` font : string Font family, see matplotlib font manager. font_scale : float, optional Separate scaling factor to independently scale the size of the font elements. color_codes : bool If ``True`` and ``palette`` is a yellowbrick palette, remap the shorthand color codes (e.g. "b", "g", "r", etc.) to the colors from this palette. rc : dict or None Dictionary of rc parameter mappings to override the above.
11,036
def run(self, fnames=None): if fnames is None: fnames = self.get_selected_filenames() for fname in fnames: self.sig_run.emit(fname)
Run Python scripts
11,037
def _fill_missing_values(df, range_values, fill_value=0, fill_method=None): idx_colnames = df.index.names idx_colranges = [range_values[x] for x in idx_colnames] fullindex = pd.Index([p for p in product(*idx_colranges)], name=tuple(idx_colnames)) fulldf = df.reindex(index=fullindex, fill_value=fill_value, method=fill_method) fulldf.index.names = idx_colnames return fulldf, idx_colranges
Will get the names of the index colums of df, obtain their ranges from range_values dict and return a reindexed version of df with the given range values. :param df: pandas DataFrame :param range_values: dict or array-like Must contain for each index column of df an entry with all the values within the range of the column. :param fill_value: scalar or 'nearest', default 0 Value to use for missing values. Defaults to 0, but can be any "compatible" value, e.g., NaN. The 'nearest' mode will fill the missing value with the nearest value in the column. :param fill_method: {'backfill', 'bfill', 'pad', 'ffill', None}, default None Method to use for filling holes in reindexed DataFrame 'pad' / 'ffill': propagate last valid observation forward to next valid 'backfill' / 'bfill': use NEXT valid observation to fill gap :return: pandas Dataframe and used column ranges reindexed DataFrame and dict with index column ranges
11,038
def delete_topic_rule(ruleName, region=None, key=None, keyid=None, profile=None): try: conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) conn.delete_topic_rule(ruleName=ruleName) return {: True} except ClientError as e: return {: False, : __utils__[](e)}
Given a rule name, delete it. Returns {deleted: true} if the rule was deleted and returns {deleted: false} if the rule was not deleted. CLI Example: .. code-block:: bash salt myminion boto_iot.delete_rule myrule
11,039
def reindex(self, request): r = redis.StrictRedis.from_url(request.registry.settings["celery.scheduler_url"]) try: with SearchLock(r, timeout=30 * 60, blocking_timeout=30): p = urllib.parse.urlparse(request.registry.settings["elasticsearch.url"]) client = elasticsearch.Elasticsearch( [urllib.parse.urlunparse(p[:2] + ("",) * 4)], verify_certs=True, ca_certs=certifi.where(), timeout=30, retry_on_timeout=True, serializer=serializer.serializer, ) number_of_replicas = request.registry.get("elasticsearch.replicas", 0) refresh_interval = request.registry.get("elasticsearch.interval", "1s") index_base = request.registry["elasticsearch.index"] random_token = binascii.hexlify(os.urandom(5)).decode("ascii") new_index_name = "{}-{}".format(index_base, random_token) doc_types = request.registry.get("search.doc_types", set()) shards = request.registry.get("elasticsearch.shards", 1) new_index = get_index( new_index_name, doc_types, using=client, shards=shards, replicas=0, interval="-1", ) new_index.create(wait_for_active_shards=shards) try: request.db.execute("SET statement_timeout = ") for _ in parallel_bulk( client, _project_docs(request.db), index=new_index_name ): pass except: new_index.delete() raise finally: request.db.rollback() request.db.close() client.indices.put_settings( index=new_index_name, body={ "index": { "number_of_replicas": number_of_replicas, "refresh_interval": refresh_interval, } }, ) if client.indices.exists_alias(name=index_base): to_delete = set() actions = [] for name in client.indices.get_alias(name=index_base): to_delete.add(name) actions.append({"remove": {"index": name, "alias": index_base}}) actions.append({"add": {"index": new_index_name, "alias": index_base}}) client.indices.update_aliases({"actions": actions}) client.indices.delete(",".join(to_delete)) else: client.indices.put_alias(name=index_base, index=new_index_name) except redis.exceptions.LockError as exc: raise self.retry(countdown=60, exc=exc)
Recreate the Search Index.
11,040
def parametrized_class(decorator): t really do anything, just here to have a central implementation of the simple class decorator.' def decorator_builder(*args, **kwargs): def meta_decorator(cls): return decorator(cls, *args, **kwargs) return meta_decorator return decorator_builder
Decorator used to make simple class decorator with arguments. Doesn't really do anything, just here to have a central implementation of the simple class decorator.
11,041
def nodes(self, tree): if self.frequency == : nodes = [] for subject in tree.subjects: for sess in subject.sessions: nodes.append(sess) elif self.frequency == : nodes = tree.subjects elif self.frequency == : nodes = tree.visits elif self.frequency == : nodes = [tree] else: assert False, "Unrecognised frequency ".format( self.frequency) return nodes
Returns the relevant nodes for the spec's frequency
11,042
def env(self, key, value=None, unset=False, asap=False): if unset: self._set(, key, multi=True) else: if value is None: value = os.environ.get(key) self._set( % ( if asap else ), % (key, value), multi=True) return self
Processes (sets/unsets) environment variable. If is not given in `set` mode value will be taken from current env. :param str|unicode key: :param value: :param bool unset: Whether to unset this variable. :param bool asap: If True env variable will be set as soon as possible.
11,043
def getLinkProperties(self, wanInterfaceId=1, timeout=1): namespace = Wan.getServiceType("getLinkProperties") + str(wanInterfaceId) uri = self.getControlURL(namespace) results = self.execute(uri, namespace, "GetCommonLinkProperties", timeout=timeout) return WanLinkProperties(results)
Execute GetCommonLinkProperties action to get WAN link properties. :param int wanInterfaceId: the id of the WAN device :param float timeout: the timeout to wait for the action to be executed :return: WAN link properties :rtype: WanLinkProperties
11,044
def _instance_callable(obj): if not isinstance(obj, ClassTypes): return getattr(obj, , None) is not None if six.PY3: for base in (obj,) + obj.__mro__: if base.__dict__.get() is not None: return True else: klass = obj if klass.__dict__.get() is not None: return True for base in klass.__bases__: if _instance_callable(base): return True return False
Given an object, return True if the object is callable. For classes, return True if instances would be callable.
11,045
def register_as_guest(self): response = self.api.register(auth_body=None, kind=) return self._post_registration(response)
Register a guest account on this HS. Note: HS must have guest registration enabled. Returns: str: Access Token Raises: MatrixRequestError
11,046
def insert_record(self, table: str, fields: Sequence[str], values: Sequence[Any], update_on_duplicate_key: bool = False) -> int: self.ensure_db_open() if len(fields) != len(values): raise AssertionError("Field/value mismatch") if update_on_duplicate_key: sql = get_sql_insert_or_update(table, fields, self.get_delims()) else: sql = get_sql_insert(table, fields, self.get_delims()) sql = self.localize_sql(sql) log.debug("About to insert_record with SQL template: " + sql) try: cursor = self.db.cursor() debug_sql(sql, values) cursor.execute(sql, values) new_pk = get_pk_of_last_insert(cursor) log.debug("Record inserted.") return new_pk except: log.exception("insert_record: Failed to insert record.") raise
Inserts a record into database, table "table", using the list of fieldnames and the list of values. Returns the new PK (or None).
11,047
def _get_inherited_field_types(class_to_field_type_overrides, schema_graph): inherited_field_type_overrides = dict() for superclass_name, field_type_overrides in class_to_field_type_overrides.items(): for subclass_name in schema_graph.get_subclass_set(superclass_name): inherited_field_type_overrides.setdefault(subclass_name, dict()) inherited_field_type_overrides[subclass_name].update(field_type_overrides) return inherited_field_type_overrides
Return a dictionary describing the field type overrides in subclasses.
11,048
def place_objects(self): pos_arr, quat_arr = self.initializer.sample() for k, obj_name in enumerate(self.objects): self.objects[obj_name].set("pos", array_to_string(pos_arr[k])) self.objects[obj_name].set("quat", array_to_string(quat_arr[k]))
Places objects randomly until no collisions or max iterations hit.
11,049
def visit_List(self, node): if node.elts: return list(set(sum([self.visit(elt) for elt in node.elts], []))) else: return [frozenset()]
List construction depend on each elements type dependency.
11,050
def show_batch_runner(self): from safe.gui.tools.batch.batch_dialog import BatchDialog dialog = BatchDialog( parent=self.iface.mainWindow(), iface=self.iface, dock=self.dock_widget) dialog.exec_()
Show the batch runner dialog.
11,051
def check_no_signature(self, function, docstring): if docstring: first_line = ast.literal_eval(docstring).strip().split()[0] if function.name + in first_line.replace(, ): return violations.D402()
D402: First line should not be function's or method's "signature". The one-line docstring should NOT be a "signature" reiterating the function/method parameters (which can be obtained by introspection).
11,052
def init_progress_bar(self): if disable: total = None else: self.iterable = list(self.iterable) total = len(self.iterable) return tqdm(total=total, disable=disable, leave=False, desc=self.description)
Initialize and return a progress bar.
11,053
def spin(self): try: spin = self._spin if spin is not None: return spin except AttributeError: pass if self.vartype is Vartype.SPIN: self._spin = spin = self else: self._counterpart = self._spin = spin = self.change_vartype(Vartype.SPIN, inplace=False) spin._binary = self return spin
:class:`.BinaryQuadraticModel`: An instance of the Ising model subclass of the :class:`.BinaryQuadraticModel` superclass, corresponding to a binary quadratic model with spins as its variables. Enables access to biases for the spin-valued binary quadratic model regardless of the :class:`vartype` set when the model was created. If the model was created with the :attr:`.binary` vartype, the Ising model subclass is instantiated upon the first use of the :attr:`.spin` property and used in any subsequent reads. Examples: This example creates a QUBO model and uses the :attr:`.spin` property to instantiate the corresponding Ising model. >>> import dimod ... >>> bqm_qubo = dimod.BinaryQuadraticModel({0: -1, 1: -1}, {(0, 1): 2}, 0.0, dimod.BINARY) >>> bqm_spin = bqm_qubo.spin >>> bqm_spin # doctest: +SKIP BinaryQuadraticModel({0: 0.0, 1: 0.0}, {(0, 1): 0.5}, -0.5, Vartype.SPIN) >>> bqm_spin.spin is bqm_spin True Note: Methods like :meth:`.add_variable`, :meth:`.add_variables_from`, :meth:`.add_interaction`, etc. should only be used on the base model.
11,054
def to_file(file_): from sevenbridges.models.file import File if not file_: raise SbgError() elif isinstance(file_, File): return file_.id elif isinstance(file_, six.string_types): return file_ else: raise SbgError()
Serializes file to id string :param file_: object to serialize :return: string id
11,055
def get_ilo_firmware_version_as_major_minor(self): data = self.get_host_health_data() firmware_details = self._get_firmware_embedded_health(data) if firmware_details: ilo_version_str = firmware_details.get(, None) return common.get_major_minor(ilo_version_str)
Gets the ilo firmware version for server capabilities Parse the get_host_health_data() to retreive the firmware details. :param data: the output returned by get_host_health_data() :returns: String with the format "<major>.<minor>" or None.
11,056
def point_in_triangle(p, v1, v2, v3): def _test(p1, p2, p3): return (p1[0] - p3[0]) * (p2[1] - p3[1]) - (p2[0] - p3[0]) * (p1[1] - p3[1]) b1 = _test(p, v1, v2) < 0.0 b2 = _test(p, v2, v3) < 0.0 b3 = _test(p, v3, v1) < 0.0 return (b1 == b2) and (b2 == b3)
Checks whether a point is within the given triangle The function checks, whether the given point p is within the triangle defined by the the three corner point v1, v2 and v3. This is done by checking whether the point is on all three half-planes defined by the three edges of the triangle. :param p: The point to be checked (tuple with x any y coordinate) :param v1: First vertex of the triangle (tuple with x any y coordinate) :param v2: Second vertex of the triangle (tuple with x any y coordinate) :param v3: Third vertex of the triangle (tuple with x any y coordinate) :return: True if the point is within the triangle, False if not
11,057
def query(path, method=, data=None, params=None, header_dict=None, decode=True): certificate_path = config.get_cloud_config_value( , get_configured_provider(), __opts__, search_global=False ) subscription_id = salt.utils.stringutils.to_str( config.get_cloud_config_value( , get_configured_provider(), __opts__, search_global=False ) ) management_host = config.get_cloud_config_value( , get_configured_provider(), __opts__, search_global=False, default= ) backend = config.get_cloud_config_value( , get_configured_provider(), __opts__, search_global=False ) url = .format( management_host=management_host, subscription_id=subscription_id, path=path, ) if header_dict is None: header_dict = {} header_dict[] = result = salt.utils.http.query( url, method=method, params=params, data=data, header_dict=header_dict, port=443, text=True, cert=certificate_path, backend=backend, decode=decode, decode_type=, ) if in result: return result[] return
Perform a query directly against the Azure REST API
11,058
def get_index2data(model_description): index2latex = {} translation_csv = os.path.join(get_project_root(), model_description["data-source"], "index2formula_id.csv") with open(translation_csv) as csvfile: csvreader = csv.DictReader(csvfile, delimiter=, quotechar=) for row in csvreader: database_id = int(row[]) online_data = get_online_symbol_data(database_id) latex = online_data[] unicode_code_point = online_data[] font = online_data[] font_style = online_data[] index2latex[int(row[])] = [database_id, latex, unicode_code_point, font, font_style] return index2latex
Get a dictionary that maps indices to a list of (1) the id in the hwrt symbol database (2) the latex command (3) the unicode code point (4) a font family and (5) a font style. Parameters ---------- model_description : string A model description file that points to a feature folder where an ``index2formula_id.csv`` has to be. Returns ------- dictionary that maps indices to lists of data Notes ----- This command need a database connection.
11,059
def ancestors(self): ancestors = set([]) self._depth_ascend(self, ancestors) try: ancestors.remove(self) except KeyError: pass return list(ancestors)
Returns a list of the ancestors of this node.
11,060
def queue_emission(self, msg): if not msg: return for _emitter in self._emit: if not hasattr(_emitter, ): continue def emit(emitter=_emitter): self.log.debug("emit to {}".format(emitter.name)) emitter.emit(msg) self.log.debug("queue emission to {} ({})".format( _emitter.name, self._emit_queue.qsize())) self._emit_queue.put(emit)
queue an emission of a message for all output plugins
11,061
async def save(proxies, filename): with open(filename, ) as f: while True: proxy = await proxies.get() if proxy is None: break proto = if in proxy.types else row = % (proto, proxy.host, proxy.port) f.write(row)
Save proxies to a file.
11,062
def service(name, action): if action == : subprocess.check_output([, , str(name)], universal_newlines=True) elif action == : subprocess.check_output([, , , str(name)], universal_newlines=True) else: raise UFWError((" not supported, use " "or ").format(action))
Open/close access to a service :param name: could be a service name defined in `/etc/services` or a port number. :param action: `open` or `close`
11,063
def open_files(by_pid=False): ** pids = {} procfs = os.listdir() for pfile in procfs: try: pids[int(pfile)] = [] except ValueError: pass files = {} for pid in pids: ppath = .format(pid) try: tids = os.listdir(.format(ppath)) except OSError: continue fd_ = [] for fpath in os.listdir(.format(ppath)): fd_.append(.format(ppath, fpath)) for tid in tids: try: fd_.append( os.path.realpath(.format(ppath, tid)) ) except OSError: continue for tpath in os.listdir(.format(ppath, tid)): fd_.append(.format(ppath, tid, tpath)) fd_ = sorted(set(fd_)) for fdpath in fd_: try: name = os.path.realpath(fdpath) os.stat(name) except OSError: continue if name not in files: files[name] = [pid] else: files[name].append(pid) files[name] = sorted(set(files[name])) pids[pid].append(name) pids[pid] = sorted(set(pids[pid])) if by_pid: return pids return files
Return a list of all physical open files on the system. CLI Examples: .. code-block:: bash salt '*' file.open_files salt '*' file.open_files by_pid=True
11,064
def normalize(alias): alias = re.sub(r, r, alias) words = alias.lower().split() words = filter(lambda w: w not in IGNORED_WORDS, words) return .join(words)
Normalizes an alias by removing adverbs defined in IGNORED_WORDS
11,065
def ref_file( ticker: str, fld: str, has_date=False, cache=False, ext=, **kwargs ) -> str: data_path = os.environ.get(assist.BBG_ROOT, ).replace(, ) if (not data_path) or (not cache): return proper_ticker = ticker.replace(, ) cache_days = kwargs.pop(, 10) root = f if len(kwargs) > 0: info = utils.to_str(kwargs)[1:-1].replace(, ) else: info = if has_date: cur_dt = utils.cur_time() missing = f to_find = re.compile(rf) cur_files = list(filter(to_find.match, sorted( files.all_files(path_name=root, keyword=info, ext=ext) ))) if len(cur_files) > 0: upd_dt = to_find.match(cur_files[-1]).group(1) diff = pd.Timestamp() - pd.Timestamp(upd_dt) if diff >= pd.Timedelta(days=cache_days): return missing return sorted(cur_files)[-1] else: return missing else: return f
Data file location for Bloomberg reference data Args: ticker: ticker name fld: field has_date: whether add current date to data file cache: if has_date is True, whether to load file from latest cached ext: file extension **kwargs: other overrides passed to ref function Returns: file location Examples: >>> import shutil >>> >>> os.environ['BBG_ROOT'] = '' >>> ref_file('BLT LN Equity', fld='Crncy') == '' True >>> os.environ['BBG_ROOT'] = '/data/bbg' >>> ref_file('BLT LN Equity', fld='Crncy', cache=True) '/data/bbg/Equity/BLT LN Equity/Crncy/ovrd=None.parq' >>> ref_file('BLT LN Equity', fld='Crncy') '' >>> cur_dt = utils.cur_time(tz=utils.DEFAULT_TZ) >>> ref_file( ... 'BLT LN Equity', fld='DVD_Hist_All', has_date=True, cache=True, ... ).replace(cur_dt, '[cur_date]') '/data/bbg/Equity/BLT LN Equity/DVD_Hist_All/asof=[cur_date], ovrd=None.parq' >>> ref_file( ... 'BLT LN Equity', fld='DVD_Hist_All', has_date=True, ... cache=True, DVD_Start_Dt='20180101', ... ).replace(cur_dt, '[cur_date]')[:-5] '/data/bbg/Equity/BLT LN Equity/DVD_Hist_All/asof=[cur_date], DVD_Start_Dt=20180101' >>> sample = 'asof=2018-11-02, DVD_Start_Dt=20180101, DVD_End_Dt=20180501.pkl' >>> root_path = 'xbbg/tests/data' >>> sub_path = f'{root_path}/Equity/AAPL US Equity/DVD_Hist_All' >>> os.environ['BBG_ROOT'] = root_path >>> for tmp_file in files.all_files(sub_path): os.remove(tmp_file) >>> files.create_folder(sub_path) >>> sample in shutil.copy(f'{root_path}/{sample}', sub_path) True >>> new_file = ref_file( ... 'AAPL US Equity', 'DVD_Hist_All', DVD_Start_Dt='20180101', ... has_date=True, cache=True, ext='pkl' ... ) >>> new_file.split('/')[-1] == f'asof={cur_dt}, DVD_Start_Dt=20180101.pkl' True >>> old_file = 'asof=2018-11-02, DVD_Start_Dt=20180101, DVD_End_Dt=20180501.pkl' >>> old_full = '/'.join(new_file.split('/')[:-1] + [old_file]) >>> updated_file = old_full.replace('2018-11-02', cur_dt) >>> updated_file in shutil.copy(old_full, updated_file) True >>> exist_file = ref_file( ... 'AAPL US Equity', 'DVD_Hist_All', DVD_Start_Dt='20180101', ... has_date=True, cache=True, ext='pkl' ... ) >>> exist_file == updated_file False >>> exist_file = ref_file( ... 'AAPL US Equity', 'DVD_Hist_All', DVD_Start_Dt='20180101', ... DVD_End_Dt='20180501', has_date=True, cache=True, ext='pkl' ... ) >>> exist_file == updated_file True
11,066
def _ExtractPathSpecsFromFile(self, file_entry): produced_main_path_spec = False for data_stream in file_entry.data_streams: yield path_spec if not data_stream.name: produced_main_path_spec = True if not produced_main_path_spec: yield file_entry.path_spec
Extracts path specification from a file. Args: file_entry (dfvfs.FileEntry): file entry that refers to the file. Yields: dfvfs.PathSpec: path specification of a file entry found in the file.
11,067
def remove_role(role): def processor(action, argument): ActionRoles.query_by_action(action, argument=argument).filter( ActionRoles.role_id == role.id ).delete(synchronize_session=False) return processor
Remove a action for a role.
11,068
def set_time_zone(self, item): i3s_time = item["full_text"].encode("UTF-8", "replace") try: i3s_time = i3s_time.decode() except: pass parts = i3s_time.split() i3s_datetime = " ".join(parts[:2]) if len(parts) < 3: return True else: i3s_time_tz = parts[2] date = datetime.strptime(i3s_datetime, TIME_FORMAT) utcnow = datetime.utcnow() delta = datetime( date.year, date.month, date.day, date.hour, date.minute ) - datetime(utcnow.year, utcnow.month, utcnow.day, utcnow.hour, utcnow.minute) try: self.tz = Tz(i3s_time_tz, delta) except ValueError: return False return True
Work out the time zone and create a shim tzinfo. We return True if all is good or False if there was an issue and we need to re check the time zone. see issue #1375
11,069
def find_mof(self, classname): classname = classname.lower() for search in self.parser.search_paths: for root, dummy_dirs, files in os.walk(search): for file_ in files: if file_.endswith() and \ file_[:-4].lower() == classname: return os.path.join(root, file_) return None
Find the MOF file that defines a particular CIM class, in the search path of the MOF compiler. The MOF file is found based on its file name: It is assumed that the base part of the file name is the CIM class name. Example: The class "CIM_ComputerSystem" is expected to be in a file "CIM_ComputerSystem.mof". Parameters: classame (:term:`string`): The name of the CIM class to look up. Returns: :term:`string`: Path name of the MOF file defining the CIM class, if it was found. `None`, if it was not found.
11,070
def to_native(self, obj, name, value): if self.mapping: for original, new in self.mapping.items(): value = value.replace(original, new) return load(value, self.namespace)
Transform the MongoDB value into a Marrow Mongo value.
11,071
def score_samples(self, X): check_is_fitted(self, "mean_") Xr = X - self.mean_ n_features = X.shape[1] precision = self.get_precision() log_like = -0.5 * (Xr * (da.dot(Xr, precision))).sum(axis=1) log_like -= 0.5 * (n_features * da.log(2.0 * np.pi) - fast_logdet(precision)) return log_like
Return the log-likelihood of each sample. See. "Pattern Recognition and Machine Learning" by C. Bishop, 12.2.1 p. 574 or http://www.miketipping.com/papers/met-mppca.pdf Parameters ---------- X : array, shape(n_samples, n_features) The data. Returns ------- ll : array, shape (n_samples,) Log-likelihood of each sample under the current model
11,072
def flush_redis_unsafe(redis_client=None): if redis_client is None: ray.worker.global_worker.check_connected() redis_client = ray.worker.global_worker.redis_client keys = redis_client.keys("LOGFILE:*") if len(keys) > 0: num_deleted = redis_client.delete(*keys) else: num_deleted = 0 print("Deleted {} log files from Redis.".format(num_deleted)) keys = redis_client.keys("event_log:*") if len(keys) > 0: num_deleted = redis_client.delete(*keys) else: num_deleted = 0 print("Deleted {} event logs from Redis.".format(num_deleted))
This removes some non-critical state from the primary Redis shard. This removes the log files as well as the event log from Redis. This can be used to try to address out-of-memory errors caused by the accumulation of metadata in Redis. However, it will only partially address the issue as much of the data is in the task table (and object table), which are not flushed. Args: redis_client: optional, if not provided then ray.init() must have been called.
11,073
def get_pk(self, field_val): field_name = self.schema.pk.name return self.is_field(field_name, field_val).get_one()
convenience method for running is_pk(_id).get_one() since this is so common
11,074
def create_object_id(collection, vault, name, version): collection = _validate_string_argument(collection, ) vault = _validate_string_argument(vault, ) name = _validate_string_argument(name, ) version = _validate_string_argument(version, , True) _parse_uri_argument(vault) return KeyVaultIdentifier(collection=collection, vault=vault, name=name, version=version)
:param collection: The resource collection type. :type collection: str :param vault: The vault URI. :type vault: str :param name: The resource name. :type name: str :param version: The resource version. :type version: str :rtype: KeyVaultId
11,075
def parse_options(): version = "%%prog {version}".format(version=__version__) parser = OptionParser(version=version) parser.add_option( "-u", "--username", action="store", dest="username", type="string", default="", metavar="RECIPIENT", help="user" ) parser.add_option( "-C", "--calendar", metavar="CALENDAR", action="store", type="string", dest="calendar", default="", help="google calendar ID" ) parser.add_option( "-t", "--timezone", metavar="TIMEZONE", action="store", type="string", dest="timezone", default="", help="user timezone" ) parser.add_option( "-m", "--message", metavar="MESSAGE", action="store", type="string", dest="message", default="", help="message text" ) parser.add_option( "-c", "--config", metavar="CONFIG", action="store", type="string", dest="config", help="path to config file", default="/etc/nagios/notification_google_calendar.ini") parser.add_option( "-q", "--quiet", metavar="QUIET", action="store_true", default=False, dest="quiet", help="be quiet" ) parser.add_option( "-g", "--get-google-credentials", metavar="GET-GOOGLE-CREDENTIALS", action="store_true", default=False, dest="get_google_credentials", help="get google API credentials for user" ) options = parser.parse_args(sys.argv)[0] mandatories = ["username", ] if not options.get_google_credentials: mandatories.append("calendar") mandatories.append("message") mandatories.append("timezone") if not all(options.__dict__[mandatory] for mandatory in mandatories): parser.error("Required command line option missing\n") return options
Commandline options arguments parsing.
11,076
def _load_hooks_settings(self): log.debug("executing _load_hooks_settings") hook_show_widget = self.get_widget("hook_show") hook_show_setting = self.settings.hooks.get_string("show") if hook_show_widget is not None: if hook_show_setting is not None: hook_show_widget.set_text(hook_show_setting)
load hooks settings
11,077
def poll(self): if self.group_id is None or self.config[] < (0, 8, 2): return self._invoke_completed_offset_commit_callbacks() self.ensure_coordinator_ready() if self.config[] >= (0, 9) and self._subscription.partitions_auto_assigned(): if self.need_rejoin(): if self._subscription.subscribed_pattern: metadata_update = self._client.cluster.request_update() self._client.poll(future=metadata_update) self.ensure_active_group() self.poll_heartbeat() self._maybe_auto_commit_offsets_async()
Poll for coordinator events. Only applicable if group_id is set, and broker version supports GroupCoordinators. This ensures that the coordinator is known, and if using automatic partition assignment, ensures that the consumer has joined the group. This also handles periodic offset commits if they are enabled.
11,078
def set_terms(self,*terms, **kw_terms): for t in terms: self.add_term(t) for k,v in kw_terms.items(): try: value, props = v except (ValueError, TypeError) as e: value, props = v,{} self.new_term(k,value,**props)
Create or set top level terms in the section. After python 3.6.0, the terms entries should maintain the same order as the argument list. The term arguments can have any of these forms: * For position argument, a Term object * For kw arguments: - 'TermName=TermValue' - 'TermName=(TermValue, PropertyDict) Positional arguments are processed before keyword arguments, and are passed into .add_term() :param terms: Term arguments :return:
11,079
def getSearchUrl(self, album, artist): params = collections.OrderedDict() params["search-alias"] = "popular" params["field-artist"] = artist params["field-title"] = album params["sort"] = "relevancerank" return __class__.assembleUrl(self.base_url, params)
See CoverSource.getSearchUrl.
11,080
def get_certificates( self, vault_base_url, maxresults=None, include_pending=None, custom_headers=None, raw=False, **operation_config): def internal_paging(next_link=None, raw=False): if not next_link: url = self.get_certificates.metadata[] path_format_arguments = { : self._serialize.url("vault_base_url", vault_base_url, , skip_quote=True) } url = self._client.format_url(url, **path_format_arguments) query_parameters = {} if maxresults is not None: query_parameters[] = self._serialize.query("maxresults", maxresults, , maximum=25, minimum=1) if include_pending is not None: query_parameters[] = self._serialize.query("include_pending", include_pending, ) query_parameters[] = self._serialize.query("self.api_version", self.api_version, ) else: url = next_link query_parameters = {} header_parameters = {} header_parameters[] = if self.config.generate_client_request_id: header_parameters[] = str(uuid.uuid1()) if custom_headers: header_parameters.update(custom_headers) if self.config.accept_language is not None: header_parameters[] = self._serialize.header("self.config.accept_language", self.config.accept_language, ) request = self._client.get(url, query_parameters) response = self._client.send( request, header_parameters, stream=False, **operation_config) if response.status_code not in [200]: raise models.KeyVaultErrorException(self._deserialize, response) return response deserialized = models.CertificateItemPaged(internal_paging, self._deserialize.dependencies) if raw: header_dict = {} client_raw_response = models.CertificateItemPaged(internal_paging, self._deserialize.dependencies, header_dict) return client_raw_response return deserialized
List certificates in a specified key vault. The GetCertificates operation returns the set of certificates resources in the specified key vault. This operation requires the certificates/list permission. :param vault_base_url: The vault name, for example https://myvault.vault.azure.net. :type vault_base_url: str :param maxresults: Maximum number of results to return in a page. If not specified the service will return up to 25 results. :type maxresults: int :param include_pending: Specifies whether to include certificates which are not completely provisioned. :type include_pending: bool :param dict custom_headers: headers that will be added to the request :param bool raw: returns the direct response alongside the deserialized response :param operation_config: :ref:`Operation configuration overrides<msrest:optionsforoperations>`. :return: An iterator like instance of CertificateItem :rtype: ~azure.keyvault.v7_0.models.CertificateItemPaged[~azure.keyvault.v7_0.models.CertificateItem] :raises: :class:`KeyVaultErrorException<azure.keyvault.v7_0.models.KeyVaultErrorException>`
11,081
def p_opt_order(self, p): if len(p) > 1: if p[3] not in : raise PythranSyntaxError("Invalid Pythran spec. " "Unknown order ".format(p[3])) p[0] = p[3] else: p[0] = None
opt_order : | ORDER LPAREN IDENTIFIER RPAREN
11,082
def plot(self, x=None, y=None, z=None, what="count(*)", vwhat=None, reduce=["colormap"], f=None, normalize="normalize", normalize_axis="what", vmin=None, vmax=None, shape=256, vshape=32, limits=None, grid=None, colormap="afmhot", figsize=None, xlabel=None, ylabel=None, aspect="auto", tight_layout=True, interpolation="nearest", show=False, colorbar=True, colorbar_label=None, selection=None, selection_labels=None, title=None, background_color="white", pre_blend=False, background_alpha=1., visual=dict(x="x", y="y", layer="z", fade="selection", row="subspace", column="what"), smooth_pre=None, smooth_post=None, wrap=True, wrap_columns=4, return_extra=False, hardcopy=None): import pylab import matplotlib n = _parse_n(normalize) if type(shape) == int: shape = (shape,) * 2 binby = [] x = _ensure_strings_from_expressions(x) y = _ensure_strings_from_expressions(y) for expression in [y, x]: if expression is not None: binby = [expression] + binby fig = pylab.gcf() if figsize is not None: fig.set_size_inches(*figsize) import re what_units = None whats = _ensure_list(what) selections = _ensure_list(selection) selections = _ensure_strings_from_expressions(selections) if y is None: waslist, [x, ] = vaex.utils.listify(x) else: waslist, [x, y] = vaex.utils.listify(x, y) x = list(zip(x, y)) limits = [limits] vwhats = _expand_limits(vwhat, len(x)) else: raise ValueError("Could not understand argument %r, expected something in form: , " % what) if i == 0: what_label = str(whats[j]) if what_units: what_label += " (%s)" % what_units if fs[j]: what_label = fs[j] + " " + what_label what_labels.append(what_label) grid_of_grids[-1].append(grid) self.executor.execute() for i, (binby, limits) in enumerate(zip(x, xlimits)): for j, what in enumerate(whats): grid = grid_of_grids[i][j].get() total_grid[i, j, :, :] = grid[:, None, ...] labels["what"] = what_labels else: dims_left = 6 - len(grid.shape) total_grid = np.broadcast_to(grid, (1,) * dims_left + grid.shape) def _selection_name(name): if name in [None, False]: return "selection: all" elif name in ["default", True]: return "selection: default" else: return "selection: %s" % name if selection_labels is None: labels["selection"] = list([_selection_name(k) for k in selections]) else: labels["selection"] = selection_labels axes = [None] * len(move) for key, value in move.items(): axes[value] = key visual_grid = np.transpose(total_grid, axes) logger.debug("grid shape: %r", total_grid.shape) logger.debug("visual: %r", visual.items()) logger.debug("move: %r", move) logger.debug("visual grid shape: %r", visual_grid.shape) xexpressions = [] yexpressions = [] for i, (binby, limits) in enumerate(zip(x, xlimits)): xexpressions.append(binby[0]) yexpressions.append(binby[1]) if xlabel is None: xlabels = [] ylabels = [] for i, (binby, limits) in enumerate(zip(x, xlimits)): if z is not None: xlabels.append(self.label(binby[1])) ylabels.append(self.label(binby[2])) else: xlabels.append(self.label(binby[0])) ylabels.append(self.label(binby[1])) else: Nl = visual_grid.shape[visual_axes[]] xlabels = _expand(xlabel, Nl) ylabels = _expand(ylabel, Nl) labels["x"] = xlabels labels["y"] = ylabels axes = [] background_color = np.array(matplotlib.colors.colorConverter.to_rgb(background_color)) import math facet_columns = None facets = visual_grid.shape[visual_axes["row"]] * visual_grid.shape[visual_axes["column"]] if visual_grid.shape[visual_axes["column"]] == 1 and wrap: facet_columns = min(wrap_columns, visual_grid.shape[visual_axes["row"]]) wrapped = True elif visual_grid.shape[visual_axes["row"]] == 1 and wrap: facet_columns = min(wrap_columns, visual_grid.shape[visual_axes["column"]]) wrapped = True else: wrapped = False facet_columns = visual_grid.shape[visual_axes["column"]] facet_rows = int(math.ceil(facets / facet_columns)) logger.debug("facet_rows: %r", facet_rows) logger.debug("facet_columns: %r", facet_columns) grid = visual_grid * 1. fgrid = visual_grid * 1. ngrid = visual_grid * 1. vmins = _expand(vmin, visual_grid.shape[visual_axes[visual[normalize_axis]]], type=list) vmaxs = _expand(vmax, visual_grid.shape[visual_axes[visual[normalize_axis]]], type=list) visual_grid if smooth_pre: grid = vaex.grids.gf(grid, smooth_pre) if 1: axis = visual_axes[visual[normalize_axis]] for i in range(visual_grid.shape[axis]): item = [slice(None, None, None), ] * len(visual_grid.shape) item[axis] = i item = tuple(item) f = _parse_f(fs[i]) with np.errstate(divide=, invalid=): facet_index += 1 if title: fig.suptitle(title, fontsize="x-large") if tight_layout: if title: pylab.tight_layout(rect=[0, 0.03, 1, 0.95]) else: pylab.tight_layout() if hardcopy: pylab.savefig(hardcopy) if show: pylab.show() if return_extra: return im, grid, fgrid, ngrid, rgrid else: return im
Viz data in a 2d histogram/heatmap. Declarative plotting of statistical plots using matplotlib, supports subplots, selections, layers. Instead of passing x and y, pass a list as x argument for multiple panels. Give what a list of options to have multiple panels. When both are present then will be origanized in a column/row order. This methods creates a 6 dimensional 'grid', where each dimension can map the a visual dimension. The grid dimensions are: * x: shape determined by shape, content by x argument or the first dimension of each space * y: ,, * z: related to the z argument * selection: shape equals length of selection argument * what: shape equals length of what argument * space: shape equals length of x argument if multiple values are given By default, this its shape is (1, 1, 1, 1, shape, shape) (where x is the last dimension) The visual dimensions are * x: x coordinate on a plot / image (default maps to grid's x) * y: y ,, (default maps to grid's y) * layer: each image in this dimension is blended togeher to one image (default maps to z) * fade: each image is shown faded after the next image (default mapt to selection) * row: rows of subplots (default maps to space) * columns: columns of subplot (default maps to what) All these mappings can be changes by the visual argument, some examples: >>> df.plot('x', 'y', what=['mean(x)', 'correlation(vx, vy)']) Will plot each 'what' as a column. >>> df.plot('x', 'y', selection=['FeH < -3', '(FeH >= -3) & (FeH < -2)'], visual=dict(column='selection')) Will plot each selection as a column, instead of a faded on top of each other. :param x: Expression to bin in the x direction (by default maps to x), or list of pairs, like [['x', 'y'], ['x', 'z']], if multiple pairs are given, this dimension maps to rows by default :param y: y (by default maps to y) :param z: Expression to bin in the z direction, followed by a :start,end,shape signature, like 'FeH:-3,1:5' will produce 5 layers between -10 and 10 (by default maps to layer) :param what: What to plot, count(*) will show a N-d histogram, mean('x'), the mean of the x column, sum('x') the sum, std('x') the standard deviation, correlation('vx', 'vy') the correlation coefficient. Can also be a list of values, like ['count(x)', std('vx')], (by default maps to column) :param reduce: :param f: transform values by: 'identity' does nothing 'log' or 'log10' will show the log of the value :param normalize: normalization function, currently only 'normalize' is supported :param normalize_axis: which axes to normalize on, None means normalize by the global maximum. :param vmin: instead of automatic normalization, (using normalize and normalization_axis) scale the data between vmin and vmax to [0, 1] :param vmax: see vmin :param shape: shape/size of the n-D histogram grid :param limits: list of [[xmin, xmax], [ymin, ymax]], or a description such as 'minmax', '99%' :param grid: if the binning is done before by yourself, you can pass it :param colormap: matplotlib colormap to use :param figsize: (x, y) tuple passed to pylab.figure for setting the figure size :param xlabel: :param ylabel: :param aspect: :param tight_layout: call pylab.tight_layout or not :param colorbar: plot a colorbar or not :param interpolation: interpolation for imshow, possible options are: 'nearest', 'bilinear', 'bicubic', see matplotlib for more :param return_extra: :return:
11,083
def oauth2_token_setter(remote, resp, token_type=, extra_data=None): return token_setter( remote, resp[], secret=, token_type=token_type, extra_data=extra_data, )
Set an OAuth2 token. The refresh_token can be used to obtain a new access_token after the old one is expired. It is saved in the database for long term use. A refresh_token will be present only if `access_type=offline` is included in the authorization code request. :param remote: The remote application. :param resp: The response. :param token_type: The token type. (Default: ``''``) :param extra_data: Extra information. (Default: ``None``) :returns: A :class:`invenio_oauthclient.models.RemoteToken` instance.
11,084
def save(self, filename): projex.text.xmlindent(self.xmlElement()) try: f = open(filename, ) except IOError: logger.exception( % filename) return False f.write(self.toString()) f.close() return True
Saves the xml data to the inputed filename. :param filename | <str>
11,085
def addFASTACommandLineOptions(parser): parser.add_argument( , type=open, default=sys.stdin, metavar=, help=( )) parser.add_argument( , default=, choices=readClassNameToClass, metavar=, help=( % .join(readClassNameToClass))) group = parser.add_mutually_exclusive_group() group.add_argument( , default=False, action=, help=( )) group.add_argument( , default=False, action=, help=) group.add_argument( , dest=, default=False, action=, help=( ))
Add standard command-line options to an argparse parser. @param parser: An C{argparse.ArgumentParser} instance.
11,086
def feather_links(self, factor=0.01, include_self=False): def feather_node(node): node_weight_sum = sum(l.weight for l in node.link_list) for original_link in node.link_list[:]: neighbor_node = original_link.target neighbor_weight = original_link.weight feather_weight = neighbor_weight / node_weight_sum neighbor_node_weight_sum = sum(l.weight for l in neighbor_node.link_list) for neighbor_link in neighbor_node.link_list: if (not include_self) and (neighbor_link.target == node): continue relative_link_weight = (neighbor_link.weight / neighbor_node_weight_sum) feathered_link_weight = round((relative_link_weight * feather_weight * factor), 2) node.add_link(neighbor_link.target, feathered_link_weight) for n in self.node_list: feather_node(n)
Feather the links of connected nodes. Go through every node in the network and make it inherit the links of the other nodes it is connected to. Because the link weight sum for any given node can be very different within a graph, the weights of inherited links are made proportional to the sum weight of the parent nodes. Args: factor (float): multiplier of neighbor links include_self (bool): whether nodes can inherit links pointing to themselves Returns: None Example: >>> from blur.markov.node import Node >>> node_1 = Node('One') >>> node_2 = Node('Two') >>> node_1.add_link(node_2, 1) >>> node_2.add_link(node_1, 1) >>> graph = Graph([node_1, node_2]) >>> for link in graph.node_list[0].link_list: ... print('{} {}'.format(link.target.value, link.weight)) Two 1 >>> graph.feather_links(include_self=True) >>> for link in graph.node_list[0].link_list: ... print('{} {}'.format(link.target.value, link.weight)) Two 1 One 0.01
11,087
def result_report_class_wise(self): results = self.results_class_wise_metrics() output = self.ui.section_header(, indent=2) + output += self.ui.row( , , , , widths=[20, 12, 12, 12], separators=[True, False, True, False], indent=4 ) + output += self.ui.row(, , , ) + for scene_label in self.scene_label_list: output += self.ui.row( scene_label, results[scene_label][][], results[scene_label][][], results[scene_label][][] * 100, types=[, , , ] ) + return output
Report class-wise results Returns ------- str result report in string format
11,088
def read_until(self, expected_commands, timeout): msg = timeouts.loop_until_timeout_or_valid( timeout, lambda: self.read_message(timeout), lambda m: m.command in expected_commands, 0) if msg.command not in expected_commands: raise usb_exceptions.AdbTimeoutError( , expected_commands) return msg
Read AdbMessages from this transport until we get an expected command. The ADB protocol specifies that before a successful CNXN handshake, any other packets must be ignored, so this method provides the ability to ignore unwanted commands. It's primarily used during the initial connection to the device. See Read() for more details, including more exceptions that may be raised. Args: expected_commands: Iterable of expected command responses, like ('CNXN', 'AUTH'). timeout: timeouts.PolledTimeout object to use for timeout. Returns: The ADB message received that matched one of expected_commands. Raises: AdbProtocolError: If timeout expires between reads, this can happen if we are getting spammed with unexpected commands.
11,089
def ValidOptions(cls): valid_options = [] for obj_name in dir(cls): obj = getattr(cls, obj_name) if inspect.isclass(obj) and issubclass(obj, cls.OptionBase): valid_options.append(obj_name) return valid_options
Returns a list of valid option names.
11,090
def adapter_add_nio_binding(self, adapter_number, port_number, nio): try: adapter = self._adapters[adapter_number] except IndexError: raise IOUError(.format(name=self._name, adapter_number=adapter_number)) if not adapter.port_exists(port_number): raise IOUError("Port {port_number} does not exist in adapter {adapter}".format(adapter=adapter, port_number=port_number)) adapter.add_nio(port_number, nio) log.info(.format(name=self._name, id=self._id, nio=nio, adapter_number=adapter_number, port_number=port_number)) if self.ubridge: bridge_name = "IOL-BRIDGE-{}".format(self.application_id + 512) yield from self._ubridge_send("iol_bridge add_nio_udp {name} {iol_id} {bay} {unit} {lport} {rhost} {rport}".format(name=bridge_name, iol_id=self.application_id, bay=adapter_number, unit=port_number, lport=nio.lport, rhost=nio.rhost, rport=nio.rport)) yield from self._ubridge_apply_filters(adapter_number, port_number, nio.filters)
Adds a adapter NIO binding. :param adapter_number: adapter number :param port_number: port number :param nio: NIO instance to add to the adapter/port
11,091
def install_nginx(instance, dbhost, dbname, port, hostname=None): _check_root() log("Installing nginx configuration") if hostname is None: try: configuration = _get_system_configuration(dbhost, dbname) hostname = configuration.hostname except Exception as e: log(, e, type(e), exc=True, lvl=error) log(, lvl=warn) hostname = definitions = { : instance, : hostname, : cert_file, : key_file, : % port } if distribution == : configuration_file = % instance configuration_link = % instance elif distribution == : configuration_file = configuration_link = None else: log( , lvl=error) return log() write_template_file(os.path.join(, nginx_configuration), configuration_file, definitions) if configuration_link is not None: log() if not os.path.exists(configuration_link): os.symlink(configuration_file, configuration_link) log() Popen([ , , ]) log("Done: Install nginx configuration")
Install nginx configuration
11,092
def unserializers(self, value): raise foundations.exceptions.ProgrammingError( "{0} | attribute is read only!".format(self.__class__.__name__, "unserializers"))
Setter for **self.__unserializers** attribute. :param value: Attribute value. :type value: dict
11,093
def validate_call(kwargs, returns, is_method=False): def decorator(func): @wraps(func) def inner(*passed_args, **passed_kwargs): max_allowed_passed_args_len = 0 if is_method and type(func) in (types.FunctionType, classmethod): max_allowed_passed_args_len = 1 if len(passed_args) > max_allowed_passed_args_len: raise PositionalError() validate(kwargs, passed_kwargs, ) return_value = func(*passed_args, **passed_kwargs) validate(returns, return_value, ) return return_value inner.__wrapped__ = func inner.__validated__ = True return inner return decorator
Decorator which runs validation on a callable's arguments and its return value. Pass a schema for the kwargs and for the return value. Positional arguments are not supported.
11,094
def plot(darray, row=None, col=None, col_wrap=None, ax=None, hue=None, rtol=0.01, subplot_kws=None, **kwargs): darray = darray.squeeze() plot_dims = set(darray.dims) plot_dims.discard(row) plot_dims.discard(col) plot_dims.discard(hue) ndims = len(plot_dims) error_msg = ( ) if ndims in [1, 2]: if row or col: kwargs[] = row kwargs[] = col kwargs[] = col_wrap kwargs[] = subplot_kws if ndims == 1: plotfunc = line kwargs[] = hue elif ndims == 2: if hue: plotfunc = line kwargs[] = hue else: plotfunc = pcolormesh else: if row or col or hue: raise ValueError(error_msg) plotfunc = hist kwargs[] = ax return plotfunc(darray, **kwargs)
Default plot of DataArray using matplotlib.pyplot. Calls xarray plotting function based on the dimensions of darray.squeeze() =============== =========================== Dimensions Plotting function --------------- --------------------------- 1 :py:func:`xarray.plot.line` 2 :py:func:`xarray.plot.pcolormesh` Anything else :py:func:`xarray.plot.hist` =============== =========================== Parameters ---------- darray : DataArray row : string, optional If passed, make row faceted plots on this dimension name col : string, optional If passed, make column faceted plots on this dimension name hue : string, optional If passed, make faceted line plots with hue on this dimension name col_wrap : integer, optional Use together with ``col`` to wrap faceted plots ax : matplotlib axes, optional If None, uses the current axis. Not applicable when using facets. rtol : number, optional Relative tolerance used to determine if the indexes are uniformly spaced. Usually a small positive number. subplot_kws : dict, optional Dictionary of keyword arguments for matplotlib subplots. Only applies to FacetGrid plotting. **kwargs : optional Additional keyword arguments to matplotlib
11,095
def _commonprefix(files): out = os.path.commonprefix(files) out = out.rstrip("_R") out = out.rstrip("_I") out = out.rstrip("_") return out
Retrieve a common prefix for files without extra _R1 _I1 extensions. Allows alternative naming schemes (R1/R2/R3) (R1/R2/I1).
11,096
def extern_store_utf8(self, context_handle, utf8_ptr, utf8_len): c = self._ffi.from_handle(context_handle) return c.to_value(self._ffi.string(utf8_ptr, utf8_len).decode())
Given a context and UTF8 bytes, return a new Handle to represent the content.
11,097
def splitext_files_only(filepath): "Custom version of splitext that doesn') if os.path.isdir(filepath) else os.path.splitext(filepath) )
Custom version of splitext that doesn't perform splitext on directories
11,098
def get_proficiencies_by_search(self, proficiency_query, proficiency_search): if not self._can(): raise PermissionDenied() return self._provider_session.get_proficiencies_by_search(proficiency_query, proficiency_search)
Pass through to provider ProficiencySearchSession.get_proficiencies_by_search
11,099
async def acquire(self, command=None, args=()): if self.closed: raise PoolClosedError("Pool is closed") async with self._cond: if self.closed: raise PoolClosedError("Pool is closed") while True: await self._fill_free(override_min=True) if self.freesize: conn = self._pool.popleft() assert not conn.closed, conn assert conn not in self._used, (conn, self._used) self._used.add(conn) return conn else: await self._cond.wait()
Acquires a connection from free pool. Creates new connection if needed.