Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
27,000
def remove(self, point, node=None): if not self: return if self.should_remove(point, node): return self._remove(point) if self.left and self.left.should_remove(point, node): self.left = self.left._remove(point) elif self.right and self.right.should_remove(point, node): self.right = self.right._remove(point) if point[self.axis] <= self.data[self.axis]: if self.left: self.left = self.left.remove(point, node) if point[self.axis] >= self.data[self.axis]: if self.right: self.right = self.right.remove(point, node) return self
Removes the node with the given point from the tree Returns the new root node of the (sub)tree. If there are multiple points matching "point", only one is removed. The optional "node" parameter is used for checking the identity, once the removeal candidate is decided.
27,001
def _limit_call_handler(self): with self.limit_lock: if self.limit_per_min <= 0: return now = time.time() self.limits = [l for l in self.limits if l > now] self.limits.append(now + 60) if len(self.limits) >= self.limit_per_min: time.sleep(self.limits[0] - now)
Ensure we don't exceed the N requests a minute limit by leveraging a thread lock
27,002
def output_sizes(self): return tuple([l() if callable(l) else l for l in self._output_sizes])
Returns a tuple of all output sizes of all the layers.
27,003
def resend_invitations(self): if not self.id: raise TypeError(u"You cant been created yet.") self.service.send(body) return self
Resends invites for an event. :: event = service.calendar().get_event(id='KEY HERE') event.resend_invitations() Anybody who has not declined this meeting will get a new invite.
27,004
def _find_blob_start(self): self._setup_chans() blob_time_start = self.t_start blob_freq_start = self.chan_start_idx blob_start = blob_time_start * self.n_channels_in_file + blob_freq_start return blob_start
Find first blob from selection.
27,005
def _is_cif(string): if (string[0:5] == u"data_" and u"_entry.id" in string) or (string[0:5] == b"data_" and b"_entry.id" in string): return string return False
Test if input string is in CIF format. :param string: Input string. :type string: :py:class:`str` or :py:class:`bytes` :return: Input string if in CIF format or False otherwise. :rtype: :py:class:`str` or :py:obj:`False`
27,006
def watchlist(self, tubes): tubes = set(tubes) for tube in tubes - self._watchlist: self.watch(tube) for tube in self._watchlist - tubes: self.ignore(tube)
Set the watchlist to the given tubes :param tubes: A list of tubes to watch Automatically un-watches any tubes that are not on the target list
27,007
def lasts(iterable, items=1, default=None): last_items = deque(iterable, maxlen=items) for _ in range(items - len(last_items)): yield default for y in last_items: yield y
Lazily return the last x items from this iterable or default.
27,008
def median2D(const, bin1, label1, bin2, label2, data_label, returnData=False): if isinstance(const, pysat.Instrument): const = [const] elif not isinstance(const, pysat.Constellation): raise ValueError("Parameter must be an Instrument or a Constellation.") binx = np.linspace(bin1[0], bin1[1], bin1[2]+1) biny = np.linspace(bin2[0], bin2[1], bin2[2]+1) numx = len(binx)-1 numy = len(biny)-1 numz = len(data_label) yarr = np.arange(numy) xarr = np.arange(numx) zarr = np.arange(numz) ans = [ [ [collections.deque() for i in xarr] for j in yarr] for k in zarr] for inst in const: for inst in inst: if len(inst.data) != 0: xind = np.digitize(inst.data[label1], binx)-1 for xi in xarr: xindex, = np.where(xind==xi) if len(xindex) > 0: yData = inst.data.iloc[xindex] yind = np.digitize(yData[label2], biny)-1 for yj in yarr: yindex, = np.where(yind==yj) if len(yindex) > 0: for zk in zarr: ans[zk][yj][xi].extend( yData.ix[yindex,data_label[zk]].tolist() ) return _calc_2d_median(ans, data_label, binx, biny, xarr, yarr, zarr, numx, numy, numz, returnData)
Return a 2D average of data_label over a season and label1, label2. Parameters ---------- const: Constellation or Instrument bin#: [min, max, number of bins] label#: string identifies data product for bin# data_label: list-like contains strings identifying data product(s) to be averaged Returns ------- median : dictionary 2D median accessed by data_label as a function of label1 and label2 over the season delineated by bounds of passed instrument objects. Also includes 'count' and 'avg_abs_dev' as well as the values of the bin edges in 'bin_x' and 'bin_y'.
27,009
def diag_post_enable(self, **kwargs): config = ET.Element("config") diag = ET.SubElement(config, "diag", xmlns="urn:brocade.com:mgmt:brocade-diagnostics") post = ET.SubElement(diag, "post") enable = ET.SubElement(post, "enable") callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code
27,010
def close(self): try: self._close() if hasattr(self.stream_lock, ) and \ not self.stream_lock.closed: self.stream_lock.close() finally: self.stream_lock = None if Handler: Handler.close(self)
Close log stream and stream_lock.
27,011
def post_relationship(self, session, json_data, api_type, obj_id, rel_key): model = self._fetch_model(api_type) resource = self._fetch_resource(session, api_type, obj_id, Permissions.EDIT) if rel_key not in resource.__jsonapi_map_to_py__.keys(): raise RelationshipNotFoundError(resource, resource, rel_key) py_key = resource.__jsonapi_map_to_py__[rel_key] relationship = self._get_relationship(resource, py_key, Permissions.CREATE) if relationship.direction == MANYTOONE: raise ValidationError() if not isinstance(json_data[], list): raise ValidationError() remote_side = relationship.back_populates try: for item in json_data[]: setter = get_rel_desc(resource, relationship.key, RelationshipActions.APPEND) if not isinstance(json_data[], list): raise BadRequestError( .format(relationship.key)) for item in json_data[]: if {, } != set(item.keys()): raise BadRequestError( .format(relationship.key)) to_relate = self._fetch_resource( session, item[], item[], Permissions.EDIT) rem = to_relate.__mapper__.relationships[remote_side] if rem.direction == MANYTOONE: check_permission(to_relate, remote_side, Permissions.EDIT) else: check_permission(to_relate, remote_side, Permissions.CREATE) setter(resource, to_relate) session.add(resource) session.commit() except KeyError: raise ValidationError() return self.get_relationship( session, {}, model.__jsonapi_type__, resource.id, rel_key)
Append to a relationship. :param session: SQLAlchemy session :param json_data: Request JSON Data :param api_type: Type of the resource :param obj_id: ID of the resource :param rel_key: Key of the relationship to fetch
27,012
def min_scalar_prod(x, y): x = sorted(x) y = sorted(y) return sum(x[i] * y[-i - 1] for i in range(len(x)))
Permute vector to minimize scalar product :param x: :param y: x, y are vectors of same size :returns: min sum x[i] * y[sigma[i]] over all permutations sigma :complexity: O(n log n)
27,013
def appendRecord(self, record): assert self._file is not None assert self._mode == self._FILE_WRITE_MODE assert isinstance(record, (list, tuple)), \ "unexpected record type: " + repr(type(record)) assert len(record) == self._fieldCount, \ "len(record): %s, fieldCount: %s" % (len(record), self._fieldCount) if self._recordCount == 0: names, types, specials = zip(*self.getFields()) for line in names, types, specials: self._writer.writerow(line) self._updateSequenceInfo(record) line = [self._adapters[i](f) for i, f in enumerate(record)] self._writer.writerow(line) self._recordCount += 1
Saves the record in the underlying csv file. :param record: a list of Python objects that will be string-ified
27,014
def get_grade_mdata(): return { : { : { : , : str(DEFAULT_LANGUAGE_TYPE), : str(DEFAULT_SCRIPT_TYPE), : str(DEFAULT_FORMAT_TYPE), }, : { : , : str(DEFAULT_LANGUAGE_TYPE), : str(DEFAULT_SCRIPT_TYPE), : str(DEFAULT_FORMAT_TYPE), }, : False, : False, : False, : False, : [None], : , : None, : None, : None, : [], }, : { : { : , : str(DEFAULT_LANGUAGE_TYPE), : str(DEFAULT_SCRIPT_TYPE), : str(DEFAULT_FORMAT_TYPE), }, : { : , : str(DEFAULT_LANGUAGE_TYPE), : str(DEFAULT_SCRIPT_TYPE), : str(DEFAULT_FORMAT_TYPE), }, : False, : False, : False, : False, : [], : , : [], }, : { : { : , : str(DEFAULT_LANGUAGE_TYPE), : str(DEFAULT_SCRIPT_TYPE), : str(DEFAULT_FORMAT_TYPE), }, : { : , : str(DEFAULT_LANGUAGE_TYPE), : str(DEFAULT_SCRIPT_TYPE), : str(DEFAULT_FORMAT_TYPE), }, : False, : False, : False, : False, : [None], : , : None, : None, : None, : [], }, : { : { : , : str(DEFAULT_LANGUAGE_TYPE), : str(DEFAULT_SCRIPT_TYPE), : str(DEFAULT_FORMAT_TYPE), }, : { : , : str(DEFAULT_LANGUAGE_TYPE), : str(DEFAULT_SCRIPT_TYPE), : str(DEFAULT_FORMAT_TYPE), }, : False, : False, : False, : False, : [None], : , : None, : None, : None, : [], }, }
Return default mdata map for Grade
27,015
async def kickban(self, channel, target, reason=None, range=0): await self.ban(channel, target, range) await self.kick(channel, target, reason)
Kick and ban user from channel.
27,016
def update_object(self, url, container, container_object, object_headers, container_headers): headers, container_uri = self._return_base_data( url=url, container=container, container_object=container_object, container_headers=container_headers, object_headers=object_headers, ) return self._header_poster( uri=container_uri, headers=headers )
Update an existing object in a swift container. This method will place new headers on an existing object or container. :param url: :param container: :param container_object:
27,017
def __doDownloadPage(self, *args, **kwargs): logger.debug("download page: %r, %r", args, kwargs) return self.__clientDefer(downloadPage(*args, **kwargs))
Works like client.downloadPage(), but handle incoming headers
27,018
def encode(string): result=".".join([ str(ord(s)) for s in string ]) return "%s." % (len(string)) + result
Encode the given string as an OID. >>> import snmp_passpersist as snmp >>> snmp.PassPersist.encode("hello") '5.104.101.108.108.111' >>>
27,019
def _make_ipmi_payload(self, netfn, command, bridge_request=None, data=()): bridge_msg = [] self.expectedcmd = command self.expectednetfn = netfn + 1 seqincrement = 7 while (not self.servermode and (netfn, command, self.seqlun) in self.tabooseq and self.tabooseq[(netfn, command, self.seqlun)] and seqincrement): self.tabooseq[(self.expectednetfn, command, self.seqlun)] -= 1 self.seqlun += 4 self.seqlun &= 0xff seqincrement -= 1 if bridge_request: addr = bridge_request.get(, 0x0) channel = bridge_request.get(, 0x0) bridge_msg = self._make_bridge_request_msg(channel, netfn, command) rqaddr = constants.IPMI_BMC_ADDRESS rsaddr = addr else: rqaddr = self.rqaddr rsaddr = constants.IPMI_BMC_ADDRESS if self.servermode: rsaddr = self.clientaddr header = bytearray((rsaddr, netfn << 2)) reqbody = bytearray((rqaddr, self.seqlun, command)) + data headsum = bytearray((_checksum(*header),)) bodysum = bytearray((_checksum(*reqbody),)) payload = header + headsum + reqbody + bodysum if bridge_request: payload = bridge_msg + payload tail_csum = _checksum(*payload[3:]) payload.append(tail_csum) if not self.servermode: self._add_request_entry((self.expectednetfn, self.seqlun, command)) return payload
This function generates the core ipmi payload that would be applicable for any channel (including KCS)
27,020
def sync(self, hooks=True, async_hooks=True): active_repos = {} github_repos = {repo.id: repo for repo in self.api.repositories() if repo.permissions[]} for gh_repo_id, gh_repo in github_repos.items(): active_repos[gh_repo_id] = { : gh_repo_id, : gh_repo.full_name, : gh_repo.description, } if hooks: self._sync_hooks(list(active_repos.keys()), asynchronous=async_hooks) db_repos = Repository.query.filter( Repository.user_id == self.user_id, Repository.github_id.in_(github_repos.keys()) ) for repo in db_repos: gh_repo = github_repos.get(repo.github_id) if gh_repo and repo.name != gh_repo.full_name: repo.name = gh_repo.full_name db.session.add(repo) Repository.query.filter( Repository.user_id == self.user_id, ~Repository.github_id.in_(github_repos.keys()) ).update(dict(user_id=None, hook=None), synchronize_session=False) self.account.extra_data.update(dict( repos=active_repos, last_sync=iso_utcnow(), )) self.account.extra_data.changed() db.session.add(self.account)
Synchronize user repositories. :param bool hooks: True for syncing hooks. :param bool async_hooks: True for sending of an asynchronous task to sync hooks. .. note:: Syncing happens from GitHub's direction only. This means that we consider the information on GitHub as valid, and we overwrite our own state based on this information.
27,021
def install_board(board_id, board_options, hwpack=, replace_existing=False): doaction = 0 if board_id in boards(hwpack).keys(): log.debug(, board_id) if replace_existing: log.debug( , board_id) remove_board(board_id) doaction = 1 else: doaction = 1 if doaction: lines = bunch2properties(board_id, board_options) boards_txt().write_lines([] + lines, append=1)
install board in boards.txt. :param board_id: string identifier :param board_options: dict like :param replace_existing: bool :rtype: None
27,022
def _gettables(self): groups = self._h5file.list_nodes("/") if len(groups) == 0: return [] else: return [ gr.PyMCsamples for gr in groups if gr._v_name[:5] == ]
Return a list of hdf5 tables name PyMCsamples.
27,023
def remove_jobs(self, mask): jobnames = self.table[mask][] jobkey = self.table[mask][] self.table[mask][] = JobStatus.removed for jobname, jobkey in zip(jobnames, jobkey): fullkey = JobDetails.make_fullkey(jobname, jobkey) self._cache.pop(fullkey).status = JobStatus.removed self.write_table_file()
Mark all jobs that match a mask as 'removed'
27,024
def _should_send(self, rebuild, success, auto_canceled, manual_canceled): should_send = False should_send_mapping = { self.MANUAL_SUCCESS: not rebuild and success, self.MANUAL_FAIL: not rebuild and not success, self.MANUAL_CANCELED: not rebuild and manual_canceled, self.AUTO_SUCCESS: rebuild and success, self.AUTO_FAIL: rebuild and not success, self.AUTO_CANCELED: rebuild and auto_canceled } for state in self.send_on: should_send |= should_send_mapping[state] return should_send
Return True if any state in `self.send_on` meets given conditions, thus meaning that a notification mail should be sent.
27,025
def add_securitygroup_rule(self, group_id, remote_ip=None, remote_group=None, direction=None, ethertype=None, port_max=None, port_min=None, protocol=None): rule = {: direction} if ethertype is not None: rule[] = ethertype if port_max is not None: rule[] = port_max if port_min is not None: rule[] = port_min if protocol is not None: rule[] = protocol if remote_ip is not None: rule[] = remote_ip if remote_group is not None: rule[] = remote_group return self.add_securitygroup_rules(group_id, [rule])
Add a rule to a security group :param int group_id: The ID of the security group to add this rule to :param str remote_ip: The remote IP or CIDR to enforce the rule on :param int remote_group: The remote security group ID to enforce the rule on :param str direction: The direction to enforce (egress or ingress) :param str ethertype: The ethertype to enforce (IPv4 or IPv6) :param int port_max: The upper port bound to enforce (icmp code if the protocol is icmp) :param int port_min: The lower port bound to enforce (icmp type if the protocol is icmp) :param str protocol: The protocol to enforce (icmp, udp, tcp)
27,026
def connection_made(self, transport: asyncio.BaseTransport) -> None: logger.debug("%s - event = connection_made(%s)", self.side, transport) transport.set_write_buffer_limits(self.write_limit) super().connection_made(transport)
Configure write buffer limits. The high-water limit is defined by ``self.write_limit``. The low-water limit currently defaults to ``self.write_limit // 4`` in :meth:`~asyncio.WriteTransport.set_write_buffer_limits`, which should be all right for reasonable use cases of this library. This is the earliest point where we can get hold of the transport, which means it's the best point for configuring it.
27,027
def createEditor(self, parent, column, operator, value): editor = super(EnumPlugin, self).createEditor(parent, column, operator, value) editor.setEnum(column.enum()) if operator in (, ): editor.setCheckable(True) editor.setCurrentValue(value) return editor
Creates a new editor for the system.
27,028
def fetch(self): api = self.doapi_manager return api._action(api.request(self.url)["action"])
Fetch & return a new `Action` object representing the action's current state :rtype: Action :raises DOAPIError: if the API endpoint replies with an error
27,029
def _identifier_filtered_iterator(graph): for data in graph: for pair in _get_node_names(data): yield pair for member in data.get(MEMBERS, []): for pair in _get_node_names(member): yield pair for ((_, _, data), side) in itt.product(graph.edges(data=True), (SUBJECT, OBJECT)): side_data = data.get(side) if side_data is None: continue modifier = side_data.get(MODIFIER) effect = side_data.get(EFFECT) if modifier == ACTIVITY and effect is not None and NAMESPACE in effect and NAME in effect: yield effect[NAMESPACE], effect[NAME] elif modifier == TRANSLOCATION and effect is not None: from_loc = effect.get(FROM_LOC) if NAMESPACE in from_loc and NAME in from_loc: yield from_loc[NAMESPACE], from_loc[NAME] to_loc = effect.get(TO_LOC) if NAMESPACE in to_loc and NAME in to_loc: yield to_loc[NAMESPACE], to_loc[NAME] location = side_data.get(LOCATION) if location is not None and NAMESPACE in location and NAME in location: yield location[NAMESPACE], location[NAME]
Iterate over names in the given namespace.
27,030
def check(self, state, when): ok = self.enabled and (when == self.when or self.when == BP_BOTH) if not ok: return ok l.debug("... after enabled and when: %s", ok) for a in [ _ for _ in self.kwargs if not _.endswith("_unique") ]: current_expr = getattr(state.inspect, a) needed = self.kwargs.get(a, None) l.debug("... checking condition %s", a) if current_expr is None and needed is None: l.debug("...... both None, True") c_ok = True elif current_expr is not None and needed is not None: if state.solver.solution(current_expr, needed): l.debug("...... is_solution!") c_ok = True else: l.debug("...... not solution...") c_ok = False if c_ok and self.kwargs.get(a+, True): l.debug("...... checking uniqueness") if not state.solver.unique(current_expr): l.debug("...... not unique") c_ok = False else: l.debug("...... one None, False") c_ok = False ok = ok and c_ok if not ok: return ok l.debug("... after condition %s: %s", a, ok) ok = ok and (self.condition is None or self.condition(state)) l.debug("... after condition func: %s", ok) return ok
Checks state `state` to see if the breakpoint should fire. :param state: The state. :param when: Whether the check is happening before or after the event. :return: A boolean representing whether the checkpoint should fire.
27,031
def calibrate(self, dataset_id, pre_launch_coeffs=False, calib_coeffs=None): tic = datetime.now() if calib_coeffs is None: calib_coeffs = {} units = {: , : , : , : } if dataset_id.name in ("3a", "3b") and self._is3b is None: is3b = np.expand_dims( np.bitwise_and( np.right_shift(self._data[], 0), 1) == 1, 1) self._is3b = np.repeat(is3b, self._data[][0].shape[0], axis=1) try: vis_idx = [, , ].index(dataset_id.name) ir_idx = None except ValueError: vis_idx = None ir_idx = [, , ].index(dataset_id.name) if vis_idx is not None: coeffs = calib_coeffs.get( + dataset_id.name) ds = create_xarray( _vis_calibrate(self._data, vis_idx, dataset_id.calibration, pre_launch_coeffs, coeffs, mask=(dataset_id.name == and self._is3b))) else: ds = create_xarray( _ir_calibrate(self._header, self._data, ir_idx, dataset_id.calibration, mask=(dataset_id.name == and np.logical_not(self._is3b)))) if dataset_id.name == and np.all(np.isnan(ds)): raise ValueError("Empty dataset for channel 3A") if dataset_id.name == and np.all(np.isnan(ds)): raise ValueError("Empty dataset for channel 3B") ds.attrs[] = units[dataset_id.calibration] ds.attrs.update(dataset_id._asdict()) logger.debug("Calibration time %s", str(datetime.now() - tic)) return ds
Calibrate the data
27,032
def check(self, F): assert F.ndim == 1, "checker only supports 1D" f = self.xfac * F fabs = np.abs(f) iQ1, iQ3 = np.searchsorted(fabs.cumsum(), np.array([0.25, 0.75]) * fabs.sum()) assert 0 != iQ1 != iQ3 != self.Nin, "checker giving up" fabs_l = fabs[:iQ1].mean() fabs_m = fabs[iQ1:iQ3].mean() fabs_r = fabs[iQ3:].mean() if fabs_l > fabs_m: warnings.warn("left wing seems heavy: {:.2g} vs {:.2g}, " "change tilt and mind convergence".format(fabs_l, fabs_m), RuntimeWarning) if fabs_m < fabs_r: warnings.warn("right wing seems heavy: {:.2g} vs {:.2g}, " "change tilt and mind convergence".format(fabs_m, fabs_r), RuntimeWarning) if fabs[0] > fabs[1]: warnings.warn("left tail may blow up: {:.2g} vs {:.2g}, " "change tilt or avoid extrapolation".format(f[0], f[1]), RuntimeWarning) if fabs[-2] < fabs[-1]: warnings.warn("right tail may blow up: {:.2g} vs {:.2g}, " "change tilt or avoid extrapolation".format(f[-2], f[-1]), RuntimeWarning) if f[0]*f[1] <= 0: warnings.warn("left tail looks wiggly: {:.2g} vs {:.2g}, " "avoid extrapolation".format(f[0], f[1]), RuntimeWarning) if f[-2]*f[-1] <= 0: warnings.warn("right tail looks wiggly: {:.2g} vs {:.2g}, " "avoid extrapolation".format(f[-2], f[-1]), RuntimeWarning)
Rough sanity checks on the input function.
27,033
def get_builds(self, project, definitions=None, queues=None, build_number=None, min_time=None, max_time=None, requested_for=None, reason_filter=None, status_filter=None, result_filter=None, tag_filters=None, properties=None, top=None, continuation_token=None, max_builds_per_definition=None, deleted_filter=None, query_order=None, branch_name=None, build_ids=None, repository_id=None, repository_type=None): route_values = {} if project is not None: route_values[] = self._serialize.url(, project, ) query_parameters = {} if definitions is not None: definitions = ",".join(map(str, definitions)) query_parameters[] = self._serialize.query(, definitions, ) if queues is not None: queues = ",".join(map(str, queues)) query_parameters[] = self._serialize.query(, queues, ) if build_number is not None: query_parameters[] = self._serialize.query(, build_number, ) if min_time is not None: query_parameters[] = self._serialize.query(, min_time, ) if max_time is not None: query_parameters[] = self._serialize.query(, max_time, ) if requested_for is not None: query_parameters[] = self._serialize.query(, requested_for, ) if reason_filter is not None: query_parameters[] = self._serialize.query(, reason_filter, ) if status_filter is not None: query_parameters[] = self._serialize.query(, status_filter, ) if result_filter is not None: query_parameters[] = self._serialize.query(, result_filter, ) if tag_filters is not None: tag_filters = ",".join(tag_filters) query_parameters[] = self._serialize.query(, tag_filters, ) if properties is not None: properties = ",".join(properties) query_parameters[] = self._serialize.query(, properties, ) if top is not None: query_parameters[] = self._serialize.query(, top, ) if continuation_token is not None: query_parameters[] = self._serialize.query(, continuation_token, ) if max_builds_per_definition is not None: query_parameters[] = self._serialize.query(, max_builds_per_definition, ) if deleted_filter is not None: query_parameters[] = self._serialize.query(, deleted_filter, ) if query_order is not None: query_parameters[] = self._serialize.query(, query_order, ) if branch_name is not None: query_parameters[] = self._serialize.query(, branch_name, ) if build_ids is not None: build_ids = ",".join(map(str, build_ids)) query_parameters[] = self._serialize.query(, build_ids, ) if repository_id is not None: query_parameters[] = self._serialize.query(, repository_id, ) if repository_type is not None: query_parameters[] = self._serialize.query(, repository_type, ) response = self._send(http_method=, location_id=, version=, route_values=route_values, query_parameters=query_parameters) return self._deserialize(, self._unwrap_collection(response))
GetBuilds. Gets a list of builds. :param str project: Project ID or project name :param [int] definitions: A comma-delimited list of definition IDs. If specified, filters to builds for these definitions. :param [int] queues: A comma-delimited list of queue IDs. If specified, filters to builds that ran against these queues. :param str build_number: If specified, filters to builds that match this build number. Append * to do a prefix search. :param datetime min_time: If specified, filters to builds that finished/started/queued after this date based on the queryOrder specified. :param datetime max_time: If specified, filters to builds that finished/started/queued before this date based on the queryOrder specified. :param str requested_for: If specified, filters to builds requested for the specified user. :param str reason_filter: If specified, filters to builds that match this reason. :param str status_filter: If specified, filters to builds that match this status. :param str result_filter: If specified, filters to builds that match this result. :param [str] tag_filters: A comma-delimited list of tags. If specified, filters to builds that have the specified tags. :param [str] properties: A comma-delimited list of properties to retrieve. :param int top: The maximum number of builds to return. :param str continuation_token: A continuation token, returned by a previous call to this method, that can be used to return the next set of builds. :param int max_builds_per_definition: The maximum number of builds to return per definition. :param str deleted_filter: Indicates whether to exclude, include, or only return deleted builds. :param str query_order: The order in which builds should be returned. :param str branch_name: If specified, filters to builds that built branches that built this branch. :param [int] build_ids: A comma-delimited list that specifies the IDs of builds to retrieve. :param str repository_id: If specified, filters to builds that built from this repository. :param str repository_type: If specified, filters to builds that built from repositories of this type. :rtype: [Build]
27,034
def _cursor(self, *args, **kwargs): transaction = self._transaction if not transaction: self._ping_check(2) try: if self._maxusage: if self._usage >= self._maxusage: raise self._failure cursor = self._con.cursor(*args, **kwargs) except self._failures as error: try: con = self._create() except Exception: pass else: try: cursor = con.cursor(*args, **kwargs) except Exception: pass else: self._close() self._store(con) if transaction: raise error return cursor try: con.close() except Exception: pass if transaction: self._transaction = False raise error return cursor
A "tough" version of the method cursor().
27,035
def add_tenant_user_role(request, project=None, user=None, role=None, group=None, domain=None): manager = keystoneclient(request, admin=True).roles if VERSIONS.active < 3: manager.add_user_role(user, role, project) else: manager.grant(role, user=user, project=project, group=group, domain=domain)
Adds a role for a user on a tenant.
27,036
def import_module(mod_str): _module = __import__(mod_str) _mod_parts = mod_str.split() for _mod_part in _mod_parts[1:]: _module = getattr(_module, _mod_part) return _module
inspired by post on stackoverflow :param name: import path string like 'netshowlib.linux.provider_discovery' :return: module matching the import statement
27,037
def _debug_linter_status(linter, filename, show_lint_files): if show_lint_files: print("{linter}: {filename}".format(linter=linter, filename=filename))
Indicate that we are running this linter if required.
27,038
def add_params(param_list_left, param_list_right): res = [] for x, y in zip(param_list_left, param_list_right): res.append(x + y) return res
Add two lists of parameters one by one :param param_list_left: list of numpy arrays :param param_list_right: list of numpy arrays :return: list of numpy arrays
27,039
def add_to_bashrc(self, line, match_regexp=None, note=None, loglevel=logging.DEBUG): shutit = self.shutit shutit.handle_note(note) if not shutit_util.check_regexp(match_regexp): shutit.fail( + match_regexp) if self.whoami() == : shutit.add_line_to_file(line, , match_regexp=match_regexp, loglevel=loglevel) else: shutit.add_line_to_file(line, , match_regexp=match_regexp, loglevel=loglevel) shutit.add_line_to_file(line, , match_regexp=match_regexp, loglevel=loglevel) return True
Takes care of adding a line to everyone's bashrc (/etc/bash.bashrc). @param line: Line to add. @param match_regexp: See add_line_to_file() @param note: See send() @return: See add_line_to_file()
27,040
def configure_logger(self): logger_name = self.logger = logging.getLogger(logger_name) format_ = BROME_CONFIG[][] if BROME_CONFIG[][]: sh = logging.StreamHandler() stream_formatter = logging.Formatter(format_) sh.setFormatter(stream_formatter) self.logger.addHandler(sh) if BROME_CONFIG[][] and \ self.runner_dir: self.log_file_path = os.path.join( self.runner_dir, % logger_name ) self.relative_log_file_path = os.path.join( self.relative_runner_dir, % logger_name ) fh = logging.FileHandler( self.log_file_path ) file_formatter = logging.Formatter(format_) fh.setFormatter(file_formatter) self.logger.addHandler(fh) self.logger.setLevel( getattr( logging, BROME_CONFIG[][] ) )
Configure the test batch runner logger
27,041
def or_has(self, relation, operator=, count=1): return self.has(relation, operator, count, )
Add a relationship count condition to the query with an "or". :param relation: The relation to count :type relation: str :param operator: The operator :type operator: str :param count: The count :type count: int :rtype: Builder
27,042
def to_dot(self, name=): parts = [, name, ] for node in self.dfs_postorder(): if node is BDDNODEZERO: parts += [ + str(id(node)), ] elif node is BDDNODEONE: parts += [ + str(id(node)), ] else: v = _VARS[node.root] parts.append( + str(id(node))) parts.append(.format(v)) for node in self.dfs_postorder(): if node is not BDDNODEZERO and node is not BDDNODEONE: parts += [ + str(id(node)), , + str(id(node.lo)), ] parts += [ + str(id(node)), , + str(id(node.hi)), ] parts.append() return " ".join(parts)
Convert to DOT language representation. See the `DOT language reference <http://www.graphviz.org/content/dot-language>`_ for details.
27,043
def create(*args, **kwargs): impl = kwargs.pop(, None) if impl is None: impl = Configuration.get() if impl == : return SDRClassifier(*args, **kwargs) elif impl == : return FastSDRClassifier(*args, **kwargs) elif impl == : return SDRClassifierDiff(*args, **kwargs) else: raise ValueError( % impl)
Create a SDR classifier factory. The implementation of the SDR Classifier can be specified with the "implementation" keyword argument. The SDRClassifierFactory uses the implementation as specified in `Default NuPIC Configuration <default-config.html>`_.
27,044
def draw(self, **kwargs): self.ax.set_aspect("equal") mask = np.zeros_like(self.ranks_, dtype=np.bool) mask[np.triu_indices_from(mask)] = True data = np.ma.masked_where(mask, self.ranks_) mesh = self.ax.pcolormesh(data, cmap=self.colormap, vmin=-1, vmax=1) self.ax.set( xlim=(0, data.shape[1]), ylim=(0, data.shape[0]) ) cb = self.ax.figure.colorbar(mesh, None, self.ax) cb.outline.set_linewidth(0) self.ax.invert_yaxis() self.ax.set_xticks(np.arange(len(self.ranks_)) + 0.5) self.ax.set_yticks(np.arange(len(self.ranks_)) + 0.5) if self.show_feature_names_: self.ax.set_xticklabels(self.features_, rotation=90) self.ax.set_yticklabels(self.features_) else: self.ax.set_xticklabels([]) self.ax.set_yticklabels([])
Draws the heatmap of the ranking matrix of variables.
27,045
def parse_timestamp(x): dt = dateutil.parser.parse(x) if dt.tzinfo is None: dt = dt.replace(tzinfo=pytz.utc) return dt
Parse ISO8601 formatted timestamp.
27,046
def create_global_secondary_index(table_name, global_index, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) table = Table(table_name, connection=conn) return table.create_global_secondary_index(global_index)
Creates a single global secondary index on a DynamoDB table. CLI Example: .. code-block:: bash salt myminion boto_dynamodb.create_global_secondary_index table_name / index_name
27,047
def reassign_label(cls, destination_cluster, label): conn = Qubole.agent(version=Cluster.api_version) data = { "destination_cluster": destination_cluster, "label": label } return conn.put(cls.rest_entity_path + "/reassign-label", data)
Reassign a label from one cluster to another. Args: `destination_cluster`: id/label of the cluster to move the label to `label`: label to be moved from the source cluster
27,048
def import_wikipage(self, slug, content, **attrs): return WikiPages(self.requester).import_( self.id, slug, content, **attrs )
Import a Wiki page and return a :class:`WikiPage` object. :param slug: slug of the :class:`WikiPage` :param content: content of the :class:`WikiPage` :param attrs: optional attributes for :class:`Task`
27,049
def upload(self, docs_base, release): return getattr(self, + self.target)(docs_base, release)
Upload docs in ``docs_base`` to the target of this uploader.
27,050
def ws_db996(self, value=None): if value is not None: try: value = float(value) except ValueError: raise ValueError( .format(value)) self._ws_db996 = value
Corresponds to IDD Field `ws_db996` Mean wind speed coincident with 99.6% dry-bulb temperature Args: value (float): value for IDD Field `ws_db996` Unit: m/s if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value
27,051
def random(self, namespace=0): query = self.LIST.substitute( WIKI=self.uri, ENDPOINT=self.endpoint, LIST=) query += "&rnlimit=1&rnnamespace=%d" % namespace emoji = [ u, u, u, u, u, u, u, u, ] action = if namespace: action = % namespace self.set_status(action, random.choice(emoji)) return query
Returns query string for random page
27,052
def _compare_variables_function_generator( method_string, aggregation_func): def comparison_function(self, other): if self is other: return method_string in (, , ) method = getattr(self.value, method_string) try: if hasattr(type(other), ): other = other.__hydpy__get_value__() result = method(other) if result is NotImplemented: return result return aggregation_func(result) except BaseException: objecttools.augment_excmessage( f f f) return comparison_function
Return a function usable as a comparison method for class |Variable|. Pass the specific method (e.g. `__eq__`) and the corresponding operator (e.g. `==`) as strings. Also pass either |numpy.all| or |numpy.any| for aggregating multiple boolean values.
27,053
def atype_view_asset(self, ): if not self.cur_atype: return i = self.atype_asset_treev.currentIndex() item = i.internalPointer() if item: asset = item.internal_data() if isinstance(asset, djadapter.models.Asset): self.view_asset(asset)
View the project of the current assettype :returns: None :rtype: None :raises: None
27,054
def add_peddy_information(config_data): ped_info = {} ped_check = {} sex_check = {} relations = [] if config_data.get(): file_handle = open(config_data[], ) for ind_info in parse_peddy_ped(file_handle): ped_info[ind_info[]] = ind_info if config_data.get(): file_handle = open(config_data[], ) for pair_info in parse_peddy_ped_check(file_handle): ped_check[(pair_info[], pair_info[])] = pair_info if config_data.get(): file_handle = open(config_data[], ) for ind_info in parse_peddy_sex_check(file_handle): sex_check[ind_info[]] = ind_info if not ped_info: return analysis_inds = {} for ind in config_data[]: ind_id = ind[] analysis_inds[ind_id] = ind for ind_id in analysis_inds: ind = analysis_inds[ind_id] if ind_id in ped_info: ind[] = ped_info[ind_id].get(, ) if ind_id in sex_check: if sex_check[ind_id][]: ind[] = False else: ind[] = True for parent in [, ]: if ind[parent] != : for pair in ped_check: if (ind_id in pair and ind[parent] in pair): if ped_check[pair][]: analysis_inds[ind[parent]][] = False else: if not in analysis_inds[ind[parent]]: analysis_inds[ind[parent]][] = True
Add information from peddy outfiles to the individuals
27,055
def as_list(callable): @wraps(callable) def wrapper(value_iter): return [callable(value) for value in value_iter] return wrapper
Convert a scalar validator in a list validator
27,056
def createContactItem(self, person, notes): if notes: return Notes( store=person.store, person=person, notes=notes)
Create a new L{Notes} associated with the given person based on the given string. @type person: L{Person} @param person: The person with whom to associate the new L{Notes}. @type notes: C{unicode} @param notes: The value to use for the I{notes} attribute of the newly created L{Notes}. If C{''}, no L{Notes} will be created. @rtype: L{Notes} or C{NoneType}
27,057
def mass1_from_tau0_tau3(tau0, tau3, f_lower): r mtotal = mtotal_from_tau0_tau3(tau0, tau3, f_lower) eta = eta_from_tau0_tau3(tau0, tau3, f_lower) return mass1_from_mtotal_eta(mtotal, eta)
r"""Returns the primary mass from the given :math:`\tau_0, \tau_3`.
27,058
def download(self, files=None, destination=None, overwrite=False, callback=None): if files is None: files = self.files elif not isinstance(files, list): files = [files] if destination is None: destination = os.path.expanduser() for f in files: if not isinstance(f, dict): raise FMBaseError() self._download(f, destination, overwrite, callback)
Download file or files. :param files: file or files to download :param destination: destination path (defaults to users home directory) :param overwrite: replace existing files? :param callback: callback function that will receive total file size and written bytes as arguments :type files: ``list`` of ``dict`` with file data from filemail :type destination: ``str`` or ``unicode`` :type overwrite: ``bool`` :type callback: ``func``
27,059
def _gradient(self, diff, d, coords): denom = np.copy(d) denom[denom == 0] = 1e-5 with np.errstate(divide=, invalid=): K = -2 * diff / denom K[np.isnan(K)] = 0 g = np.empty_like(coords) for n in range(self.n): for i in range(self.m): g[i, n] = ((coords[i, n] - coords[:, n]) * K[i, :]).sum() return g
Compute the gradient. Args: diff (`array-like`): [`m`, `m`] matrix. `D` - `d` d (`array-like`): [`m`, `m`] matrix. coords (`array-like`): [`m`, `n`] matrix. Returns: `np.array`: Gradient, shape [`m`, `n`].
27,060
def get_site_packages(venv): * bin_path = _verify_virtualenv(venv) ret = __salt__[]( bin_path, ) if ret[] != 0: raise CommandExecutionError(.format(**ret)) return ret[]
Return the path to the site-packages directory of a virtualenv venv Path to the virtualenv. CLI Example: .. code-block:: bash salt '*' virtualenv.get_site_packages /path/to/my/venv
27,061
def append(self, new: Statement) -> None: new = HistoryItem(new) list.append(self, new) new.idx = len(self)
Append a HistoryItem to end of the History list :param new: command line to convert to HistoryItem and add to the end of the History list
27,062
def _compute_centers(self, X, sparse, rs): centers = self._get_user_components() if (centers is None): n_features = X.shape[1] if (sparse): fxr = range(n_features) cols = [X.getcol(i) for i in fxr] min_dtype = X.dtype.type(1.0e10) sp_min = lambda col: np.minimum(min_dtype, np.min(col.data)) min_Xs = np.array(map(sp_min, cols)) max_dtype = X.dtype.type(-1.0e10) sp_max = lambda col: np.maximum(max_dtype, np.max(col.data)) max_Xs = np.array(map(sp_max, cols)) else: min_Xs = X.min(axis=0) max_Xs = X.max(axis=0) spans = max_Xs - min_Xs ctrs_size = (self.n_hidden, n_features) centers = min_Xs + spans * rs.uniform(0.0, 1.0, ctrs_size) self.components_[] = centers
Generate RBF centers
27,063
def command_packet(cmd): return message(, Container(string_length=len(cmd), string=bytes(cmd, ENCODING)), len(cmd) + 2)
Build a command message.
27,064
def results(self, **query_params): query_params[] = query_params.get(, ) return self.get("results", **query_params).body
Returns a streaming handle to this job's search results. To get a nice, Pythonic iterator, pass the handle to :class:`splunklib.results.ResultsReader`, as in:: import splunklib.client as client import splunklib.results as results from time import sleep service = client.connect(...) job = service.jobs.create("search * | head 5") while not job.is_done(): sleep(.2) rr = results.ResultsReader(job.results()) for result in rr: if isinstance(result, results.Message): # Diagnostic messages may be returned in the results print '%s: %s' % (result.type, result.message) elif isinstance(result, dict): # Normal events are returned as dicts print result assert rr.is_preview == False Results are not available until the job has finished. If called on an unfinished job, the result is an empty event set. This method makes a single roundtrip to the server, plus at most two additional round trips if the ``autologin`` field of :func:`connect` is set to ``True``. :param query_params: Additional parameters (optional). For a list of valid parameters, see `GET search/jobs/{search_id}/results <http://docs.splunk.com/Documentation/Splunk/latest/RESTAPI/RESTsearch#GET_search.2Fjobs.2F.7Bsearch_id.7D.2Fresults>`_. :type query_params: ``dict`` :return: The ``InputStream`` IO handle to this job's results.
27,065
def reindex(self, comments=True, change_history=True, worklogs=True): params = {} if not comments: params[] = comments if not change_history: params[] = change_history if not worklogs: params[] = worklogs return self.post(, params=params)
Reindex the Jira instance Kicks off a reindex. Need Admin permissions to perform this reindex. :param comments: Indicates that comments should also be reindexed. Not relevant for foreground reindex, where comments are always reindexed. :param change_history: Indicates that changeHistory should also be reindexed. Not relevant for foreground reindex, where changeHistory is always reindexed. :param worklogs: Indicates that changeHistory should also be reindexed. Not relevant for foreground reindex, where changeHistory is always reindexed. :return:
27,066
def approvewitness(ctx, witnesses, account): pprint(ctx.peerplays.approvewitness(witnesses, account=account))
Approve witness(es)
27,067
def parse_content(self, content): self.data = get_active_lines(content, comment_char="COMMAND>")[0] parts = self.data.split() if not len(parts) == 6: msg = "Expected six date parts. Got [%s]" raise DateParseException(msg % self.data) try: self.timezone = parts[4] no_tz = .join(parts[:4]) + + parts[-1] self.datetime = datetime.strptime(no_tz, ) except: six.reraise(DateParseException, DateParseException(self.data), sys.exc_info()[2])
Parses the output of the ``date`` and ``date --utc`` command. Sample: Fri Jun 24 09:13:34 CST 2016 Sample: Fri Jun 24 09:13:34 UTC 2016 Attributes ---------- datetime: datetime.datetime A native datetime.datetime of the parsed date string timezone: str The string portion of the date string containing the timezone Raises: DateParseException: Raised if any exception occurs parsing the content.
27,068
def coffee(input, output, **kw): subprocess.call([current_app.config.get(), , , output, input])
Process CoffeeScript files
27,069
def wrap_http_for_jwt_access(credentials, http): orig_request_method = http.request wrap_http_for_auth(credentials, http) authenticated_request_method = http.request def new_request(uri, method=, body=None, headers=None, redirections=httplib2.DEFAULT_MAX_REDIRECTS, connection_type=None): if in credentials._kwargs: if (credentials.access_token is None or credentials.access_token_expired): credentials.refresh(None) return request(authenticated_request_method, uri, method, body, headers, redirections, connection_type) else: return request(orig_request_method, uri, method, body, clean_headers(headers), redirections, connection_type) http.request = new_request http.request.credentials = credentials
Prepares an HTTP object's request method for JWT access. Wraps HTTP requests with logic to catch auth failures (typically identified via a 401 status code). In the event of failure, tries to refresh the token used and then retry the original request. Args: credentials: _JWTAccessCredentials, the credentials used to identify a service account that uses JWT access tokens. http: httplib2.Http, an http object to be used to make auth requests.
27,070
def PrimaryHDU(model): cards = model._mission.HDUCards(model.meta, hdu=0) if not in [c[0] for c in cards]: cards.append((, model.mag, )) cards.append((, )) cards.append((, )) cards.append((, )) cards.append((, model.mission, )) cards.append((, EVEREST_MAJOR_MINOR, )) cards.append((, EVEREST_VERSION, )) cards.append((, strftime(), )) header = pyfits.Header(cards=cards) hdu = pyfits.PrimaryHDU(header=header) return hdu
Construct the primary HDU file containing basic header info.
27,071
def resolve_method(state, method_name, class_name, params=(), ret_type=None, include_superclasses=True, init_class=True, raise_exception_if_not_found=False): base_class = state.javavm_classloader.get_class(class_name) if include_superclasses: class_hierarchy = state.javavm_classloader.get_class_hierarchy(base_class) else: class_hierarchy = [base_class] for class_descriptor in class_hierarchy: java_binary = state.project.loader.main_object soot_method = java_binary.get_soot_method(method_name, class_descriptor.name, params, none_if_missing=True) if soot_method is not None: if init_class: state.javavm_classloader.init_class(class_descriptor) return SootMethodDescriptor.from_soot_method(soot_method) if raise_exception_if_not_found: raise SootMethodNotLoadedException() else: return SootMethodDescriptor(class_name, method_name, params, ret_type=ret_type)
Resolves the method based on the given characteristics (name, class and params) The method may be defined in one of the superclasses of the given class (TODO: support interfaces). :rtype: archinfo.arch_soot.SootMethodDescriptor
27,072
def write_brackets(docgraph, output_file, layer=): bracketed_str = gen_bracketed_output(docgraph, layer=layer) assert isinstance(output_file, (str, file)) if isinstance(output_file, str): path_to_file = os.path.dirname(output_file) if not os.path.isdir(path_to_file): create_dir(path_to_file) with codecs.open(output_file, , ) as outfile: outfile.write(bracketed_str) else: output_file.write(bracketed_str)
converts a document graph into a plain text file with brackets. Parameters ---------- layer : str or None The layer from which the pointing chains/relations (i.e. coreference relations) should be extracted. If no layer is selected, all pointing relations will be considered. (This might lead to errors, e.g. when the document contains Tiger syntax trees with secondary edges.)
27,073
def request(endpoint, verb=, session_options=None, **options): req = functools.partial(_request, endpoint, verb, session_options, **options) return _run_in_fresh_loop(req)
Performs a synchronous request. Uses a dedicated event loop and aiohttp.ClientSession object. Options: - endpoint: the endpoint to call - verb: the HTTP verb to use (defaults: GET) - session_options: a dict containing options to initialize the session (defaults: None) - options: extra options for the request (defaults: None) Returns a dict object with the following keys: - content: the content of the response - status: the status - headers: a dict with all the response headers
27,074
async def put(self, cid): if settings.SIGNATURE_VERIFICATION: super().verify() try: body = json.loads(self.request.body) except: self.set_status(400) self.write({"error":400, "reason":"Unexpected data format. JSON required"}) raise tornado.web.Finish public_key = body.get("public_key", None) if isinstance(body["message"], str): message = json.loads(body["message"]) elif isinstance(body["message"], dict): message = body["message"] descr = message.get("description") coinid = message.get("coinid") if not coinid in settings.bridges.keys(): self.set_status(400) self.write({"error":400, "reason":"Unknown coin id"}) raise tornado.web.Finish if not all([public_key, descr, coinid]): self.set_status(400) self.write({"error":400, "reason":"Missed required fields"}) raise tornado.web.Finish owneraddr = self.account.validator[coinid](public_key) response = await self.account.blockchain.ownerbycid(cid=cid) if isinstance(response, dict): if "error" in response.keys(): error_code = response["error"] self.set_status(error_code) self.write({"error":error_code, "reason":response["error"]}) raise tornado.web.Finish if response != owneraddr: self.set_status(403) self.write({"error":403, "reason":"Owner does not match."}) raise tornado.web.Finish fee = await billing.update_description_fee(owneraddr=owneraddr,cid=cid, description=descr) if coinid in settings.bridges.keys(): self.account.blockchain.setendpoint(settings.bridges[coinid]) else: self.set_status(400) self.write({"error":400, "reason":"Invalid coinid"}) raise tornado.web.Finish request = await self.account.blockchain.setdescrforcid(cid=cid, descr=descr, owneraddr=owneraddr) if "error" in request.keys(): self.set_status(request["error"]) self.write(request) raise tornado.web.Finish self.write({"cid":cid, "description":descr, "coinid":coinid, "owneraddr": owneraddr})
Update description for content Accepts: Query string args: - "cid" - int Request body parameters: - message (signed dict): - "description" - str - "coinid" - str Returns: dict with following fields: - "confirmed": None - "txid" - str - "description" - str - "content" - str - "read_access" - int - "write_access" - int - "cid" - int - "txid" - str - "seller_pubkey" - str - "seller_access_string": None or str Verified: True
27,075
def reset_sequence(self, topic): if topic in self.queues: self.queues[topic].reset()
Reset the expected sequence number for a topic If the topic is unknown, this does nothing. This behaviour is useful when you have wildcard topics that only create queues once they receive the first message matching the topic. Args: topic (string): The topic to reset the packet queue on
27,076
def set_Y(self, Y): assert isinstance(Y, (np.ndarray, ObsAr)) state = self.update_model() self.update_model(False) if self.normalizer is not None: self.normalizer.scale_by(Y) self.Y_normalized = ObsAr(self.normalizer.normalize(Y)) self.Y = Y else: self.Y = ObsAr(Y) if isinstance(Y, np.ndarray) else Y self.Y_normalized = self.Y self.update_model(state)
Set the output data of the model :param Y: output observations :type Y: np.ndarray or ObsArray
27,077
def self_inventory(self): if self.api_key is None: return {} if self._self_inventory: return self._self_inventory resp, self_inventory = self.get( % self.api_key) real_self_inventory = dict() for host in self_inventory: real_self_inventory[host[0]] = self.full_inventory[host[0]] self._self_inventory = real_self_inventory return self._self_inventory
Inventory output will only contain the server name and the session ID when a key is provided. Provide the same format as with the full inventory instead for consistency.
27,078
def applyslicer(array, slicer, pmask, cval = 0): r l = len(slicer) patch = numpy.zeros(list(pmask.shape[:l]) + list(array.shape[l:]), array.dtype) if not 0 == cval: patch.fill(cval) sliced = array[slicer] patch[pmask] = sliced.reshape([numpy.prod(sliced.shape[:l])] + list(sliced.shape[l:])) return patch
r""" Apply a slicer returned by the iterator to a new array of the same dimensionality as the one used to initialize the iterator. Notes ----- If ``array`` has more dimensions than ``slicer`` and ``pmask``, the first ones are sliced. Parameters ---------- array : array_like A n-dimensional array. slicer : list List if `slice()` instances as returned by `next()`. pmask : narray The array mask as returned by `next()`. cval : number Value to fill undefined positions. Experiments ----------- >>> import numpy >>> from medpy.iterators import CentredPatchIterator >>> arr = numpy.arange(0, 25).reshape((5,5)) >>> for patch, pmask, _, slicer in CentredPatchIterator(arr, 3): >>> new_patch = CentredPatchIterator.applyslicer(arr, slicer, pmask) >>> print numpy.all(new_patch == patch) True ...
27,079
def get_data_frame_transform(self, transform_id=None, params=None): return self.transport.perform_request( "GET", _make_path("_data_frame", "transforms", transform_id), params=params )
`<https://www.elastic.co/guide/en/elasticsearch/reference/current/get-data-frame-transform.html>`_ :arg transform_id: The id or comma delimited list of id expressions of the transforms to get, '_all' or '*' implies get all transforms :arg from_: skips a number of transform configs, defaults to 0 :arg size: specifies a max number of transforms to get, defaults to 100
27,080
async def reload_modules(self, pathlist): loadedModules = [] failures = [] for path in pathlist: p, module = findModule(path, False) if module is not None and hasattr(module, ) and module._instance.state != ModuleLoadStateChanged.UNLOADED: loadedModules.append(module) ums = [ModuleLoadStateChanged.createMatcher(m, ModuleLoadStateChanged.UNLOADED) for m in loadedModules] for m in loadedModules: self.subroutine(self.unloadmodule(m, True), False) await self.wait_for_all(*ums) grouped = {} for path in pathlist: dotpos = path.rfind() if dotpos == -1: raise ModuleLoadException() package = path[:dotpos] classname = path[dotpos + 1:] mlist = grouped.setdefault(package, []) p, module = findModule(path, False) mlist.append((classname, module)) for package, mlist in grouped.items(): try: p = sys.modules[package] removeCache(p) p = reload(p) except KeyError: try: p = __import__(package, fromlist=[m[0] for m in mlist]) except Exception: self._logger.warning(, package, exc_info = True) failures.append( + package) continue except Exception: self._logger.warning(, package, exc_info = True) failures.append( + package) continue for cn, module in mlist: try: module2 = getattr(p, cn) except AttributeError: self._logger.warning(, package, cn) failures.append( + package + + cn) continue if module is not None and module is not module2: try: lpos = loadedModules.index(module) loaded = True except Exception: loaded = False for d in module.depends: d.referencedBy.remove(module) if loaded and hasattr(d, ): try: d._instance.dependedBy.remove(module) d._instance.dependedBy.add(module2) except ValueError: pass if hasattr(module, ): for d in module.referencedBy: pos = d.depends.index(module) d.depends[pos] = module2 if not hasattr(module2, ): module2.referencedBy = [] module2.referencedBy.append(d) if loaded: loadedModules[lpos] = module2 for m in loadedModules: self.subroutine(self.loadmodule(m)) if failures: raise ModuleLoadException( + .join(failures))
Reload modules with a full path in the pathlist
27,081
def get_category_by_id(cls, category_id, **kwargs): kwargs[] = True if kwargs.get(): return cls._get_category_by_id_with_http_info(category_id, **kwargs) else: (data) = cls._get_category_by_id_with_http_info(category_id, **kwargs) return data
Find Category Return single instance of Category by its ID. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.get_category_by_id(category_id, async=True) >>> result = thread.get() :param async bool :param str category_id: ID of category to return (required) :return: Category If the method is called asynchronously, returns the request thread.
27,082
def _set_value(self, target, value, bitarray): rng = target.find() rng_min = float(rng.find().text) rng_max = float(rng.find().text) scl = target.find() scl_min = float(scl.find().text) scl_max = float(scl.find().text) raw_value = (value - scl_min) * (rng_max - rng_min) / (scl_max - scl_min) + rng_min return self._set_raw(target, int(raw_value), bitarray)
set given numeric value to target field in bitarray
27,083
def stop(self): if self._process is None: return if self._shared: BackendManager.SHARE_COUNT -= 1 if BackendManager.SHARE_COUNT: return comm() for s in self._sockets: s._callback = None s.close() self._sockets[:] = [] self._process._prevent_logs = True while self._process.state() != self._process.NotRunning: self._process.waitForFinished(1) if sys.platform == : self._process.kill() else: self._process.terminate() self._process._prevent_logs = False self._heartbeat_timer.stop() comm()
Stops the backend process.
27,084
async def acquire_async(self): r = self.acquire(blocking=False) while not r: await asyncio.sleep(.01) r = self.acquire(blocking=False)
Acquire the :attr:`lock` asynchronously
27,085
def now(format_string): from datetime import datetime from django.utils.dateformat import DateFormat return DateFormat(datetime.now()).format(self.format_string)
Displays the date, formatted according to the given string. Uses the same format as PHP's ``date()`` function; see http://php.net/date for all the possible values. Sample usage:: It is {% now "jS F Y H:i" %}
27,086
def build(ctx, builder="html", options=""): sourcedir = ctx.config.sphinx.sourcedir destdir = Path(ctx.config.sphinx.destdir or "build")/builder destdir = destdir.abspath() with cd(sourcedir): destdir_relative = Path(".").relpathto(destdir) command = "sphinx-build {opts} -b {builder} {sourcedir} {destdir}" \ .format(builder=builder, sourcedir=".", destdir=destdir_relative, opts=options) ctx.run(command)
Build docs with sphinx-build
27,087
def get_provider_token(self, provider_secret): return self._post( , data={ : self._client.corp_id, : provider_secret, } )
获取服务商凭证 https://work.weixin.qq.com/api/doc#90001/90143/91200 :param provider_secret: 服务商的secret,在服务商管理后台可见 :return: 返回的 JSON 数据包
27,088
async def list_vms(self, preset_name): response = await self.nova.servers.list(name=f) result = [] for server in response[]: result.append(self._map_vm_structure(server)) return result
List VMs by preset name :arg present_name: string
27,089
def new(self, vd, ino, orig_len, csum): if self._initialized: raise pycdlibexception.PyCdlibInternalError() self.vd = vd self.orig_len = orig_len self.csum = csum self.inode = ino self._initialized = True
A method to create a new boot info table. Parameters: vd - The volume descriptor to associate with this boot info table. ino - The Inode associated with this Boot Info Table. orig_len - The original length of the file before the boot info table was patched into it. csum - The checksum for the boot file, starting at the byte after the boot info table. Returns: Nothing.
27,090
def relabel(self, catalogue): for work, label in catalogue.items(): self._matches.loc[self._matches[constants.WORK_FIELDNAME] == work, constants.LABEL_FIELDNAME] = label
Relabels results rows according to `catalogue`. A row whose work is labelled in the catalogue will have its label set to the label in the catalogue. Rows whose works are not labelled in the catalogue will be unchanged. :param catalogue: mapping of work names to labels :type catalogue: `Catalogue`
27,091
def patch_lines(x): for idx in range(len(x)-1): x[idx] = np.vstack([x[idx], x[idx+1][0,:]]) return x
Draw lines between groups
27,092
def delete_dashboard(self, team_context, dashboard_id): project = None team = None if team_context is not None: if team_context.project_id: project = team_context.project_id else: project = team_context.project if team_context.team_id: team = team_context.team_id else: team = team_context.team route_values = {} if project is not None: route_values[] = self._serialize.url(, project, ) if team is not None: route_values[] = self._serialize.url(, team, ) if dashboard_id is not None: route_values[] = self._serialize.url(, dashboard_id, ) self._send(http_method=, location_id=, version=, route_values=route_values)
DeleteDashboard. [Preview API] Delete a dashboard given its ID. This also deletes the widgets associated with this dashboard. :param :class:`<TeamContext> <azure.devops.v5_0.dashboard.models.TeamContext>` team_context: The team context for the operation :param str dashboard_id: ID of the dashboard to delete.
27,093
def divsin_fc(fdata): nrows = fdata.shape[0] ncols = fdata.shape[1] L = int(nrows / 2) L2 = L - 2 g = np.zeros([nrows, ncols], dtype=np.complex128) g[L2, :] = 2 * 1j * fdata[L - 1, :] for k in xrange(L2, -L2, -1): g[k - 1, :] = 2 * 1j * fdata[k, :] + g[k + 1, :] fdata[:, :] = g
Apply divide by sine in the Fourier domain.
27,094
def get_asset_by_name(self, publisher_name, extension_name, version, asset_type, account_token=None, accept_default=None, account_token_header=None, **kwargs): route_values = {} if publisher_name is not None: route_values[] = self._serialize.url(, publisher_name, ) if extension_name is not None: route_values[] = self._serialize.url(, extension_name, ) if version is not None: route_values[] = self._serialize.url(, version, ) if asset_type is not None: route_values[] = self._serialize.url(, asset_type, ) query_parameters = {} if account_token is not None: query_parameters[] = self._serialize.query(, account_token, ) if accept_default is not None: query_parameters[] = self._serialize.query(, accept_default, ) response = self._send(http_method=, location_id=, version=, route_values=route_values, query_parameters=query_parameters, accept_media_type=) if "callback" in kwargs: callback = kwargs["callback"] else: callback = None return self._client.stream_download(response, callback=callback)
GetAssetByName. [Preview API] :param str publisher_name: :param str extension_name: :param str version: :param str asset_type: :param str account_token: :param bool accept_default: :param String account_token_header: Header to pass the account token :rtype: object
27,095
def _certifi_where_for_ssl_version(): if not ssl: return if ssl.OPENSSL_VERSION_INFO < (1, 0, 2): warnings.warn( t use stronger root certificates.') return certifi.old_where() return certifi.where()
Gets the right location for certifi certifications for the current SSL version. Older versions of SSL don't support the stronger set of root certificates.
27,096
def update_ethernet_settings(self, configuration, force=False, timeout=-1): uri = "{}/ethernetSettings".format(self.data["uri"]) return self._helper.update(configuration, uri=uri, force=force, timeout=timeout)
Updates the Ethernet interconnect settings for the logical interconnect. Args: configuration: Ethernet interconnect settings. force: If set to true, the operation completes despite any problems with network connectivity or errors on the resource itself. The default is false. timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView; it just stops waiting for its completion. Returns: dict: Logical Interconnect.
27,097
def is_installed(self, pkgname): return any(d for d in self.get_distributions() if d.project_name == pkgname)
Given a package name, returns whether it is installed in the environment :param str pkgname: The name of a package :return: Whether the supplied package is installed in the environment :rtype: bool
27,098
def search_article(self, keyword, page=1, timesn=WechatSogouConst.search_article_time.anytime, article_type=WechatSogouConst.search_article_type.all, ft=None, et=None, unlock_callback=None, identify_image_callback=None, decode_url=True): url = WechatSogouRequest.gen_search_article_url(keyword, page, timesn, article_type, ft, et) session = requests.session() resp = self.__get_by_unlock(url, WechatSogouRequest.gen_search_article_url(keyword), unlock_platform=self.__unlock_sogou, unlock_callback=unlock_callback, identify_image_callback=identify_image_callback, session=session) article_list = WechatSogouStructuring.get_article_by_search(resp.text) for i in article_list: if decode_url: i[][] = self.__format_url(i[][], url, resp.text, unlock_callback=unlock_callback, identify_image_callback=identify_image_callback, session=session) i[][] = self.__format_url(i[][], url, resp.text, unlock_callback=unlock_callback, identify_image_callback=identify_image_callback, session=session) yield i
搜索 文章 对于出现验证码的情况,可以由使用者自己提供: 1、函数 unlock_callback ,这个函数 handle 出现验证码到解决的整个流程 2、也可以 只提供函数 identify_image_callback,这个函数输入验证码二进制数据,输出验证码文字,剩下的由 wechatsogou 包来解决 注意: 函数 unlock_callback 和 identify_image_callback 只需要提供一个,如果都提供了,那么 identify_image_callback 不起作用 Parameters ---------- keyword : str or unicode 搜索文字 page : int, optional 页数 the default is 1 timesn : WechatSogouConst.search_article_time 时间 anytime 没有限制 / day 一天 / week 一周 / month 一月 / year 一年 / specific 自定 the default is anytime article_type : WechatSogouConst.search_article_type 含有内容的类型 image 有图 / video 有视频 / rich 有图和视频 / all 啥都有 ft, et : datetime.date or None 当 tsn 是 specific 时,ft 代表开始时间,如: 2017-07-01 当 tsn 是 specific 时,et 代表结束时间,如: 2017-07-15 unlock_callback : callable 处理出现验证码页面的函数,参见 unlock_callback_example identify_image_callback : callable 处理验证码函数,输入验证码二进制数据,输出文字,参见 identify_image_callback_example decode_url : bool 是否解析 url Returns ------- list[dict] { 'article': { 'title': '', # 文章标题 'url': '', # 文章链接 'imgs': '', # 文章图片list 'abstract': '', # 文章摘要 'time': '' # 文章推送时间 }, 'gzh': { 'profile_url': '', # 公众号最近10条群发页链接 'headimage': '', # 头像 'wechat_name': '', # 名称 'isv': '', # 是否加v } } Raises ------ WechatSogouRequestsException requests error
27,099
async def on_raw_433(self, message): if not self.registered: self._registration_attempts += 1 if self._attempt_nicknames: await self.set_nickname(self._attempt_nicknames.pop(0)) else: await self.set_nickname( self._nicknames[0] + * (self._registration_attempts - len(self._nicknames)))
Nickname in use.