Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
7,400
def load_kwargs(*args, **kwargs): def handle_scene(): scene = Scene() scene.geometry.update({k: load_kwargs(v) for k, v in kwargs[].items()}) for k in kwargs[]: if isinstance(k, dict): scene.graph.update(**k) elif util.is_sequence(k) and len(k) == 3: scene.graph.update(k[1], k[0], **k[2]) if in kwargs: scene.graph.base_frame = kwargs[] if in kwargs: scene.metadata.update(kwargs[]) return scene def handle_trimesh_kwargs(): if (isinstance(kwargs[], dict) or isinstance(kwargs[], dict)): return Trimesh(**misc.load_dict(kwargs)) elif kwargs[] is None: return PointCloud(**kwargs) else: return Trimesh(**kwargs) def handle_trimesh_export(): data, file_type = kwargs[], kwargs[] if not isinstance(data, dict): data = util.wrap_as_stream(data) k = mesh_loaders[file_type](data, file_type=file_type) return Trimesh(**k) return handler()
Load geometry from a properly formatted dict or kwargs
7,401
def Runs(self): with self._accumulators_mutex: items = list(six.iteritems(self._accumulators)) return {run_name: accumulator.Tags() for run_name, accumulator in items}
Return all the run names in the `EventMultiplexer`. Returns: ``` {runName: { scalarValues: [tagA, tagB, tagC], graph: true, meta_graph: true}} ```
7,402
def emit(self, event, *args, **kwargs): for func in self._registered_events[event].values(): func(*args, **kwargs)
Send out an event and call it's associated functions :param event: Name of the event to trigger
7,403
def _generate_normals(polygons): if isinstance(polygons, np.ndarray): n = polygons.shape[-2] i1, i2, i3 = 0, n//3, 2*n//3 v1 = polygons[..., i1, :] - polygons[..., i2, :] v2 = polygons[..., i2, :] - polygons[..., i3, :] else: v1 = np.empty((len(polygons), 3)) v2 = np.empty((len(polygons), 3)) for poly_i, ps in enumerate(polygons): n = len(ps) i1, i2, i3 = 0, n//3, 2*n//3 v1[poly_i, :] = ps[i1, :] - ps[i2, :] v2[poly_i, :] = ps[i2, :] - ps[i3, :] return np.cross(v1, v2)
Takes a list of polygons and return an array of their normals. Normals point towards the viewer for a face with its vertices in counterclockwise order, following the right hand rule. Uses three points equally spaced around the polygon. This normal of course might not make sense for polygons with more than three points not lying in a plane, but it's a plausible and fast approximation. Args: polygons (list): list of (M_i, 3) array_like, or (..., M, 3) array_like A sequence of polygons to compute normals for, which can have varying numbers of vertices. If the polygons all have the same number of vertices and array is passed, then the operation will be vectorized. Returns: normals: (..., 3) array_like A normal vector estimated for the polygon.
7,404
def _as_rescale(self, get, targetbitdepth): width, height, pixels, meta = get() maxval = 2**meta[] - 1 targetmaxval = 2**targetbitdepth - 1 factor = float(targetmaxval) / float(maxval) meta[] = targetbitdepth def iterscale(rows): for row in rows: yield array([targetbitdepth > 8], [int(round(x * factor)) for x in row]) if maxval == targetmaxval: return width, height, pixels, meta else: if in meta: transparent = meta[] if isinstance(transparent, tuple): transparent = tuple(list( iterscale((transparent,)) )[0]) else: transparent = tuple(list( iterscale(((transparent,),)) )[0])[0] meta[] = transparent return width, height, iterscale(pixels), meta
Helper used by :meth:`asRGB8` and :meth:`asRGBA8`.
7,405
def _file_model_from_db(self, record, content, format): path = to_api_path(record[] + record[]) model = base_model(path) model[] = model[] = model[] = record[] if content: bcontent = record[] model[], model[], model[] = from_b64( path, bcontent, format, ) return model
Build a file model from database record.
7,406
def fetch_routing_info(self, address): metadata = {} records = [] def fail(md): if md.get("code") == "Neo.ClientError.Procedure.ProcedureNotFound": raise RoutingProtocolError("Server {!r} does not support routing".format(address)) else: raise RoutingProtocolError("Routing support broken on server {!r}".format(address)) try: with self.acquire_direct(address) as cx: _, _, server_version = (cx.server.agent or "").partition("/") if server_version and Version.parse(server_version) >= Version((3, 2)): log_debug("[ cx.run("CALL dbms.cluster.routing.getRoutingTable({context})", {"context": self.routing_context}, on_success=metadata.update, on_failure=fail) else: log_debug("[ cx.run("CALL dbms.cluster.routing.getServers", {}, on_success=metadata.update, on_failure=fail) cx.pull_all(on_success=metadata.update, on_records=records.extend) cx.sync() routing_info = [dict(zip(metadata.get("fields", ()), values)) for values in records] log_debug("[ return routing_info except RoutingProtocolError as error: raise ServiceUnavailable(*error.args) except ServiceUnavailable: self.deactivate(address) return None
Fetch raw routing info from a given router address. :param address: router address :return: list of routing records or None if no connection could be established :raise ServiceUnavailable: if the server does not support routing or if routing support is broken
7,407
def show_pattern(syncpr_output_dynamic, image_height, image_width): number_pictures = len(syncpr_output_dynamic); iteration_math_step = 1.0; if (number_pictures > 50): iteration_math_step = number_pictures / 50.0; number_pictures = 50; number_cols = int(numpy.ceil(number_pictures ** 0.5)); number_rows = int(numpy.ceil(number_pictures / number_cols)); real_index = 0, 0; double_indexer = True; if ( (number_cols == 1) or (number_rows == 1) ): real_index = 0; double_indexer = False; (_, axarr) = plt.subplots(number_rows, number_cols); if (number_pictures > 1): plt.setp([ax for ax in axarr], visible = False); iteration_display = 0.0; for iteration in range(len(syncpr_output_dynamic)): if (iteration >= iteration_display): iteration_display += iteration_math_step; ax_handle = axarr; if (number_pictures > 1): ax_handle = axarr[real_index]; syncpr_visualizer.__show_pattern(ax_handle, syncpr_output_dynamic, image_height, image_width, iteration); if (double_indexer is True): real_index = real_index[0], real_index[1] + 1; if (real_index[1] >= number_cols): real_index = real_index[0] + 1, 0; else: real_index += 1; plt.show();
! @brief Displays evolution of phase oscillators as set of patterns where the last one means final result of recognition. @param[in] syncpr_output_dynamic (syncpr_dynamic): Output dynamic of a syncpr network. @param[in] image_height (uint): Height of the pattern (image_height * image_width should be equal to number of oscillators). @param[in] image_width (uint): Width of the pattern.
7,408
def status(request): token = request.GET.get("token", "") if not token or token != settings.STATUS_TOKEN: raise Http404() info = {} check_mapping = { : (get_redis_info, ), : (get_elasticsearch_info, ), : (get_pg_info, ), : (get_celery_info, ), : (get_certificate_info, ), } for setting, (check_fn, key) in check_mapping.items(): if setting in settings.HEALTH_CHECK: log.debug(, key) info[key] = check_fn() log.debug(, key) code = HTTP_OK status_all = UP for key in info: if info[key]["status"] == DOWN: code = SERVICE_UNAVAILABLE status_all = DOWN break info["status_all"] = status_all resp = JsonResponse(info) resp.status_code = code return resp
Status
7,409
def shapeless_placeholder(x, axis, name): shp = x.get_shape().as_list() if not isinstance(axis, list): axis = [axis] for a in axis: if shp[a] is None: raise ValueError("Axis {} of shape {} is already unknown!".format(a, shp)) shp[a] = None x = tf.placeholder_with_default(x, shape=shp, name=name) return x
Make the static shape of a tensor less specific. If you want to feed to a tensor, the shape of the feed value must match the tensor's static shape. This function creates a placeholder which defaults to x if not fed, but has a less specific static shape than x. See also `tensorflow#5680 <https://github.com/tensorflow/tensorflow/issues/5680>`_. Args: x: a tensor axis(int or list of ints): these axes of ``x.get_shape()`` will become None in the output. name(str): name of the output tensor Returns: a tensor equal to x, but shape information is partially cleared.
7,410
def p_select_from_where_statement_1(self, p): p[0] = SelectFromWhereNode(cardinality=p[2], variable_name=p[3], key_letter=p[7], where_clause=p[9])
statement : SELECT ANY variable_name FROM INSTANCES OF identifier WHERE expression | SELECT MANY variable_name FROM INSTANCES OF identifier WHERE expression
7,411
def to_ds9(self, coordsys=, fmt=, radunit=): valid_symbols_reverse = {y: x for x, y in valid_symbols_ds9.items()} ds9_strings = { : , : , : , : , : , : , : , : } output = if radunit == : else: line = ds9_strings[shape.region_type].format(include, *coord) if meta_str.strip(): output += "{0} else: output += "{0}\n".format(line) return output
Converts a list of ``regions.Shape`` objects to ds9 region strings. Parameters ---------- coordsys : str This overrides the coordinate system frame for all regions. fmt : str A python string format defining the output precision. Default is .6f, which is accurate to 0.0036 arcseconds. radunit : str This denotes the unit of the radius. Returns ------- region_string : str ds9 region string Examples -------- TODO
7,412
def htmlDocContentDumpFormatOutput(self, cur, encoding, format): if cur is None: cur__o = None else: cur__o = cur._o libxml2mod.htmlDocContentDumpFormatOutput(self._o, cur__o, encoding, format)
Dump an HTML document.
7,413
def registry_hostname(registry): if registry.startswith() or registry.startswith(): return urlparse(registry).netloc else: return registry
Strip a reference to a registry to just the hostname:port
7,414
def insert_successor(self, successor): self.feature += successor.feature; self.successors.append(successor); successor.parent = self;
! @brief Insert successor to the node. @param[in] successor (cfnode): Successor for adding.
7,415
def start(self, version=None, **kwargs): if not version: version = self.mostRecentVersion pysc2Version = lib.Version( version.version, version.baseVersion, version.dataHash, version.fixedHash) return sc_process.StarcraftProcess( self, exec_path=self.exec_path(version.baseVersion), version=pysc2Version, **kwargs)
Launch the game process.
7,416
def get_message(self, dummy0, sock_info, use_cmd=False): ns = self.namespace() ctx = sock_info.compression_context if use_cmd: spec = self.as_command(sock_info)[0] if sock_info.op_msg_enabled: request_id, msg, size, _ = _op_msg( 0, spec, self.db, ReadPreference.PRIMARY, False, False, self.codec_options, ctx=sock_info.compression_context) return request_id, msg, size ns = _UJOIN % (self.db, "$cmd") return query(0, ns, 0, -1, spec, None, self.codec_options, ctx=ctx) return get_more(ns, self.ntoreturn, self.cursor_id, ctx)
Get a getmore message.
7,417
def read_header(file): file_type = _file_type(file) if file_type in ["txt", "plain", "bat"]: file_temp = open(file, "r") header = file_temp.readlines()[1] file_temp.close() header = ast.literal_eval(header.split(" macs = header.keys() col_nbr = 0 for mac in macs: del header[mac]["special"] del header[mac]["position"] del header[mac]["mode"] column_labels = {} for chn_nbr, chn in enumerate(header[mac]["channels"]): chn_label = header[mac]["label"][chn_nbr] column_labels[chn] = col_nbr + numpy.where(numpy.array(header[mac]["column"]) == chn_label)[0][0] header[mac]["column labels"] = column_labels col_nbr += len(header[mac]["column"]) del header[mac]["column"] del header[mac]["label"] elif file_type in ["h5", "x-hdf", "a"]: file_temp = h5py.File(file) macs = file_temp.keys() header = {} for mac in macs: header[mac] = dict(file_temp.get(mac).attrs.items()) header[mac]["sensor"] = [] for key in ["duration", "mode", "keywords", "nsamples", "forcePlatform values", "macaddress"]: if key in header[mac].keys(): del header[mac][key] column_labels = {} for chn in header[mac]["channels"]: chn_label = "channel_" + str(chn) column_labels[chn] = chn_label header[mac]["sensor"].append(dict(file_temp.get(mac).get("raw").get("channel_" + str(chn)).attrs.items())["sensor"]) header[mac]["column labels"] = column_labels file_temp.close() else: raise RuntimeError("The type of the input file does not correspond to the predefined " "formats of OpenSignals") return header
----- Brief ----- Universal function for reading the header of .txt, .h5 and .edf files generated by OpenSignals. ----------- Description ----------- Each file generated by the OpenSignals software (available at https://www.biosignalsplux.com/en/software) owns a set of metadata that allows the proper identification and characterization of each acquisition, by the identification of the mac address of the devices, date of acquisition, duration, number of samples, type of the devices and firmware version. This function allows to easily access all of this information using only one line of code and outputs a dictionary to easily identify each field of the header of file. ---------- Parameters ---------- file : file path File path. Returns ------- out : dict Header data read from the input file as dictionary with keys: [mac address]: The key is a string with the mac address of the device; sensor: Sensor(s) used in the acquisition; device name: String with the mac address identifying the device used in the acquisition process; sync interval: Time interval (in seconds) at which a digital signal is sent by a “pacemaker” thread to a single device (used when the sync mode in on OpenSignals for synchronized data acquisition using multiple devices); time: Time of the acquisition; comments: Comments inserted in the OpenSignals software after the acquisition; device connection: Used connection to the device while using it; channels: Used channels; keywords: Keywords inserted in the OpenSignals software after the acquisition; digital IO: Digital channels available in each device (0 is the Input and 1 is the Output); firmware version: Firmware version of the device; device: Type of device used during the acquisition; sampling rate: Sampling rate set prior to the acquisition; resolution: Resolution set prior to the acquisition; date: Date of the acquisition; column labels: Labels of each set of data (e.g. channel 1).
7,418
def org_find_members(object_id, input_params={}, always_retry=True, **kwargs): return DXHTTPRequest( % object_id, input_params, always_retry=always_retry, **kwargs)
Invokes the /org-xxxx/findMembers API method. For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Organizations#API-method%3A-%2Forg-xxxx%2FfindMembers
7,419
def patch_namespaced_replica_set_scale(self, name, namespace, body, **kwargs): kwargs[] = True if kwargs.get(): return self.patch_namespaced_replica_set_scale_with_http_info(name, namespace, body, **kwargs) else: (data) = self.patch_namespaced_replica_set_scale_with_http_info(name, namespace, body, **kwargs) return data
partially update scale of the specified ReplicaSet This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_replica_set_scale(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the Scale (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param object body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch). :param bool force: Force is going to \"force\" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests. :return: V1Scale If the method is called asynchronously, returns the request thread.
7,420
def addRelationships( self, data: list, LIMIT: int = 20, _print: bool = True, crawl: bool = False, ) -> list: url_base = self.base_url + relationships = [] for relationship in data: relationship.update({ : relationship[], : relationship[], : relationship[] }) relationships.append((url_base, relationship)) return self.post( relationships, LIMIT = LIMIT, action = , _print = _print, crawl = crawl, )
data = [{ "term1_id", "term2_id", "relationship_tid", "term1_version", "term2_version", "relationship_term_version",}]
7,421
def __sort_stats(self, sortedby=None): return sort_stats(self.stats, sortedby, reverse=glances_processes.sort_reverse)
Return the stats (dict) sorted by (sortedby).
7,422
def get_firmware(self): firmware_uri = "{}/firmware".format(self.data["uri"]) return self._helper.do_get(firmware_uri)
Gets baseline firmware information for a SAS Logical Interconnect. Returns: dict: SAS Logical Interconnect Firmware.
7,423
def signature(cls): snake_scope = cls.options_scope.replace(, ) partial_construct_optionable = functools.partial(_construct_optionable, cls) partial_construct_optionable.__name__ = .format(snake_scope) return dict( output_type=cls.optionable_cls, input_selectors=tuple(), func=partial_construct_optionable, input_gets=(Get.create_statically_for_rule_graph(ScopedOptions, Scope),), dependency_optionables=(cls.optionable_cls,), )
Returns kwargs to construct a `TaskRule` that will construct the target Optionable. TODO: This indirection avoids a cycle between this module and the `rules` module.
7,424
def hash_contents(contents): assert isinstance(contents, GroupNode) result = hashlib.sha256() def _hash_int(value): result.update(struct.pack(">L", value)) def _hash_str(string): assert isinstance(string, string_types) _hash_int(len(string)) result.update(string.encode()) def _hash_object(obj): _hash_str(obj.json_type) if isinstance(obj, (TableNode, FileNode)): hashes = obj.hashes _hash_int(len(hashes)) for hval in hashes: _hash_str(hval) elif isinstance(obj, GroupNode): children = obj.children _hash_int(len(children)) for key, child in sorted(iteritems(children)): _hash_str(key) _hash_object(child) else: assert False, "Unexpected object: %r" % obj if obj.metadata_hash is not None: _hash_str(obj.metadata_hash) _hash_object(contents) return result.hexdigest()
Creates a hash of key names and hashes in a package dictionary. "contents" must be a GroupNode.
7,425
def _dequeue_function(self): from UcsBase import WriteUcsWarning, _GenericMO, WriteObject, UcsUtils while len(self._wbs): lowestTimeout = None for wb in self._wbs: pollSec = wb.params["pollSec"] managedObject = wb.params["managedObject"] timeoutSec = wb.params["timeoutSec"] transientValue = wb.params["transientValue"] successValue = wb.params["successValue"] failureValue = wb.params["failureValue"] prop = wb.params["prop"] startTime = wb.params["startTime"] gmo = None pmo = None mce = None if (pollSec != None and managedObject != None): crDn = self.ConfigResolveDn(managedObject.getattr("Dn"), inHierarchical=YesOrNo.FALSE, dumpXml=YesOrNo.FALSE) if (crDn.errorCode != 0): WriteUcsWarning( + crDn.errorCode + + crDn.errorDescr) continue for eachMo in crDn.OutConfig.GetChild(): pmo = eachMo if pmo == None: WriteUcsWarning( + managedObject.getattr("Dn") + ) continue gmo = _GenericMO(mo=pmo, option=WriteXmlOption.All) else: ts = datetime.datetime.now() - startTime timeoutMs = 0 if (timeoutSec != None): if (ts.seconds >= timeoutSec): self._remove_watch_block(wb) continue timeoutMs = (timeoutSec - ts.seconds) if (lowestTimeout == None): lowestTimeout = timeoutMs else: if (lowestTimeout > timeoutMs): lowestTimeout = timeoutMs if (timeoutMs > 0): mce = wb.Dequeue(timeoutMs) else: mce = wb.Dequeue(2147483647) if mce == None: continue if (managedObject == None): if wb.cb != None: wb.cb(mce) continue if mce != None: gmo = _GenericMO(mo=mce.mo, option=WriteXmlOption.All) attributes = [] if mce == None: attributes = gmo.properties.keys() else: attributes = mce.changeList if prop.lower() in (attr.lower() for attr in attributes): if (len(successValue) > 0 and gmo.GetAttribute(UcsUtils.WordU(prop)) in successValue): if mce != None: if wb.cb != None: wb.cb(mce) else: if wb.cb != None: wb.cb(UcsMoChangeEvent(eventId=0, mo=pmo, changeList=prop)) if wb != None: self._remove_watch_block(wb) wb = None break continue if (len(failureValue) > 0 and gmo.GetAttribute(UcsUtils.WordU(prop)) in failureValue): WriteUcsWarning( + gmo.GetAttribute( UcsUtils.WordU(prop)) + + prop + ) if mce != None: if wb.cb != None: wb.cb(mce) else: if wb.cb != None: wb.cb(UcsMoChangeEvent(eventId=0, mo=pmo, changeList=prop)) if wb != None: self._remove_watch_block(wb) wb = None break continue if ((len(transientValue) > 0) and (not gmo.GetAttribute(UcsUtils.WordU(prop)) in transientValue)): WriteUcsWarning( + gmo.GetAttribute( UcsUtils.WordU(prop)) + + prop + ) if mce != None: if wb.cb != None: wb.cb(mce) else: if wb.cb != None: wb.cb(UcsMoChangeEvent(eventId=0, mo=pmo, changeList=prop)) if wb != None: self._remove_watch_block(wb) wb = None break continue if (pollSec != None): pollMs = pollSec if (timeoutSec != None): pts = datetime.datetime.now() - startTime if (pts.seconds >= timeoutSec): break timeoutMs = (timeoutSec - pts.seconds) if (timeoutMs < pollSec): pollMs = pts.seconds if (lowestTimeout == None): lowestTimeout = pollMs else: if (lowestTimeout > pollMs): lowestTimeout = pollMs if len(self._wbs): self._dequeue_wait(lowestTimeout) return
Internal method to dequeue to events.
7,426
def find_elements_by_xpath(self, xpath): return self.find_elements(by=By.XPATH, value=xpath)
Finds multiple elements by xpath. :Args: - xpath - The xpath locator of the elements to be found. :Returns: - list of WebElement - a list with elements if any was found. An empty list if not :Usage: :: elements = driver.find_elements_by_xpath("//div[contains(@class, 'foo')]")
7,427
async def create_webhook(self, *, name, avatar=None, reason=None): if avatar is not None: avatar = utils._bytes_to_base64_data(avatar) data = await self._state.http.create_webhook(self.id, name=str(name), avatar=avatar, reason=reason) return Webhook.from_state(data, state=self._state)
|coro| Creates a webhook for this channel. Requires :attr:`~.Permissions.manage_webhooks` permissions. .. versionchanged:: 1.1.0 Added the ``reason`` keyword-only parameter. Parameters ------------- name: :class:`str` The webhook's name. avatar: Optional[:class:`bytes`] A :term:`py:bytes-like object` representing the webhook's default avatar. This operates similarly to :meth:`~ClientUser.edit`. reason: Optional[:class:`str`] The reason for creating this webhook. Shows up in the audit logs. Raises ------- HTTPException Creating the webhook failed. Forbidden You do not have permissions to create a webhook. Returns -------- :class:`Webhook` The created webhook.
7,428
def classify_by_name(names): if len(names) > 3: if len(set(config.RNA).intersection(set(names))) != 0: ligtype = elif len(set(config.DNA).intersection(set(names))) != 0: ligtype = else: ligtype = "POLYMER" else: ligtype = for name in names: if name in config.METAL_IONS: if len(names) == 1: ligtype = else: if "ION" not in ligtype: ligtype += return ligtype
Classify a (composite) ligand by the HETID(s)
7,429
def unbroadcast(a, b): spa = sps.issparse(a) spb = sps.issparse(b) if spa and spb: return (a,b) elif spa or spb: def fix(sp,nm): nm = np.asarray(nm) dnm = len(nm.shape) nnm = np.prod(nm.shape) if dnm == 0: return (sp, np.reshape(nm, (1, 1))) elif dnm == 1: return (sp, np.reshape(nm, (nnm, 1))) elif dnm == 2: return (sp, nm) else: return unbroadcast(sp.toarray(), nm) return fix(a, b) if spa else tuple(reversed(fix(b, a))) a = np.asarray(a) b = np.asarray(b) da = len(a.shape) db = len(b.shape) if da > db: return (a, np.reshape(b, b.shape + tuple(np.ones(da-db, dtype=np.int)))) elif da < db: return (np.reshape(a, a.shape + tuple(np.ones(db-da, dtype=np.int))), b) else: return (a, b)
unbroadcast(a, b) yields a tuple (aa, bb) that is equivalent to (a, b) except that aa and bb have been reshaped such that arithmetic numpy operations such as aa * bb will result in row-wise operation instead of column-wise broadcasting.
7,430
def get_replication_command_history(self, schedule_id, limit=20, offset=0, view=None): params = { : limit, : offset, } if view: params[] = view return self._get("replications/%s/history" % schedule_id, ApiReplicationCommand, True, params=params, api_version=4)
Retrieve a list of commands for a replication schedule. @param schedule_id: The id of the replication schedule. @param limit: Maximum number of commands to retrieve. @param offset: Index of first command to retrieve. @param view: View to materialize. Valid values are 'full', 'summary', 'export', 'export_redacted'. @return: List of commands executed for a replication schedule. @since: API v4
7,431
def _get_index(n_items, item_size, n): index = np.arange(n_items) index = np.repeat(index, item_size) index = index.astype(np.float64) assert index.shape == (n,) return index
Prepare an index attribute for GPU uploading.
7,432
def list_views(app, appbuilder): _appbuilder = import_application(app, appbuilder) echo_header("List of registered views") for view in _appbuilder.baseviews: click.echo( "View:{0} | Route:{1} | Perms:{2}".format( view.__class__.__name__, view.route_base, view.base_permissions ) )
List all registered views
7,433
def last_modified(self) -> Optional[datetime.datetime]: httpdate = self._headers.get(hdrs.LAST_MODIFIED) if httpdate is not None: timetuple = parsedate(httpdate) if timetuple is not None: return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc) return None
The value of Last-Modified HTTP header, or None. This header is represented as a `datetime` object.
7,434
def get_version_naive(cls, name, ignore=): match = cls._get_regex_search(name, cls.REGEX_VERSION.format(SEP=cls.REGEX_SEPARATORS), ignore=ignore) if match is not None: if len(match) > 1: for m in match: m.update({: int(m[].upper().replace(, ))}) compound_version = .join([str(m[]) for m in match]) compound_version = float(compound_version) if compound_version.count() == 1 else compound_version return {: match, : compound_version, : match[0][], : match[0][]} elif len(match) == 1: match = match[0] match.update({: int(match[].upper().replace(, ))}) return match return None
Checks a string for a possible version of an object (no prefix, no suffix) without filtering date out Assumes only up to 4 digit padding :param name: str, string that represents a possible name of an object :return: (float, int, list(str), None), gets the version number then the string matches
7,435
def get_ref_indices(self): ixn_obj = self ref_indices = [] while ixn_obj != ixn_obj.root: ref_indices.append(ixn_obj.ref.split()[-1]) ixn_obj = ixn_obj.parent return ref_indices[::-1]
:return: list of all indices in object reference.
7,436
def jboss_standalone_main_config_files(broker): ps = broker[DefaultSpecs.ps_auxww].content results = [] search = re.compile(r"\-Djboss\.server\.base\.dir=(\S+)").search for p in ps: if in p: match = search(p) if match and match.group(1)[0] == "/": main_config_path = match.group(1) main_config_file = "standalone.xml" if " -c " in p: main_config_file = p.split(" -c ")[1].split()[0] elif "--server-config" in p: main_config_file = p.split("--server-config=")[1].split()[0] results.append(main_config_path + "/" + main_config_file) return list(set(results))
Command: JBoss standalone main config files
7,437
def construct_xblock_from_class(self, cls, scope_ids, field_data=None, *args, **kwargs): return self.mixologist.mix(cls)( runtime=self, field_data=field_data, scope_ids=scope_ids, *args, **kwargs )
Construct a new xblock of type cls, mixing in the mixins defined for this application.
7,438
def run(self): self.statsAndLogging.start() if self.config.metrics: self.toilMetrics = ToilMetrics(provisioner=self.provisioner) try: self.serviceManager.start() try: if self.clusterScaler is not None: self.clusterScaler.start() try: self.innerLoop() finally: if self.clusterScaler is not None: logger.debug() startTime = time.time() self.clusterScaler.shutdown() logger.debug(, time.time() - startTime) finally: self.serviceManager.shutdown() finally: self.statsAndLogging.shutdown() if self.toilMetrics: self.toilMetrics.shutdown() self.toilState.totalFailedJobs = [j for j in self.toilState.totalFailedJobs if self.jobStore.exists(j.jobStoreID)] try: self.create_status_sentinel_file(self.toilState.totalFailedJobs) except IOError as e: logger.debug(.format(e)) logger.info("Finished toil run %s" % ("successfully." if not self.toilState.totalFailedJobs \ else ("with %s failed jobs." % len(self.toilState.totalFailedJobs)))) if len(self.toilState.totalFailedJobs): logger.info("Failed jobs at end of the run: %s", .join(str(job) for job in self.toilState.totalFailedJobs)) if len(self.toilState.totalFailedJobs) > 0: raise FailedJobsException(self.config.jobStore, self.toilState.totalFailedJobs, self.jobStore) return self.jobStore.getRootJobReturnValue()
This runs the leader process to issue and manage jobs. :raises: toil.leader.FailedJobsException if at the end of function their remain \ failed jobs :return: The return value of the root job's run function. :rtype: Any
7,439
def _find_class_construction_fn(cls): for base in type.mro(cls): if in base.__dict__: return base.__init__ if in base.__dict__: return base.__new__
Find the first __init__ or __new__ method in the given class's MRO.
7,440
def getdata(self): if self.bounding_box: return self.image.crop(self.bounding_box).getdata()
A sequence of pixel data relating to the changes that occurred since the last time :py:func:`redraw_required` was last called. :returns: A sequence of pixels or ``None``. :rtype: iterable
7,441
def get_file_size(file_object): position = file_object.tell() file_object.seek(0, 2) file_size = file_object.tell() file_object.seek(position, 0) return file_size
Returns the size, in bytes, of a file. Expects an object that supports seek and tell methods. Args: file_object (file_object) - The object that represents the file Returns: (int): size of the file, in bytes
7,442
def _make_regex(self): return re.compile("|".join(map(re.escape, self.keys())))
Build a re object based on keys in the current dictionary
7,443
def load_word_file(filename): words_file = resource_filename(__name__, "words/%s" % filename) handle = open(words_file, ) words = handle.readlines() handle.close() return words
Loads a words file as a list of lines
7,444
def cache(self, f): if self._memory is None: logger.debug("Joblib is not installed: skipping cacheing.") return f assert f if in inspect.getargspec(f).args: ignore = [] else: ignore = None disk_cached = self._memory.cache(f, ignore=ignore) return disk_cached
Cache a function using the context's cache directory.
7,445
def cosi_posterior(vsini_dist,veq_dist,vgrid=None,npts=100,vgrid_pts=1000): if vgrid is None: vgrid = np.linspace(min(veq_dist.ppf(0.001),vsini_dist.ppf(0.001)), max(veq_dist.ppf(0.999),vsini_dist.ppf(0.999)), vgrid_pts) logging.debug(.format(vgrid_pts, vgrid[0], vgrid[-1])) cs = np.linspace(0,1,npts) Ls = cs*0 for i,c in enumerate(cs): Ls[i] = like_cosi(c,vsini_dist,veq_dist,vgrid=vgrid) if np.isnan(Ls[-1]): Ls[-1] = Ls[-2] Ls /= np.trapz(Ls,cs) return cs,Ls
returns posterior of cosI given dists for vsini and veq (incorporates unc. in vsini)
7,446
def find_recipes(folders, pattern=None, base=None): manifest = find_folder_recipes(base_folder=base_folder, pattern=custom_pattern or pattern, manifest=manifest, base=base) return manifest
find recipes will use a list of base folders, files, or patterns over a subset of content to find recipe files (indicated by Starting with Singularity Parameters ========== base: if defined, consider folders recursively below this level.
7,447
def validate(self): if not self.conf.get(): raise PacketManagerException() if not self.conf.get(): raise PacketManagerException() projects = self.conf.get() if not projects.keys(): raise PacketManagerException() failure = False for project, identifier in projects.items(): if not identifier: failure = True logging.error(, project) if failure: raise PacketManagerException()
Perform some basic configuration validation.
7,448
def __intermediate_addresses(self, interface): address_list = self.get_copy(interface, ) if not address_list: return [{: }] result = [] static = {} dhcp = [] for address in address_list: family = address.get() if address[] == : address[] = if family == else dhcp.append(self.__intermediate_address(address)) continue if in address: uci_key = if family == else interface[uci_key] = address[] address_key = if family == else static.setdefault(address_key, []) static[address_key].append(.format(**address)) static.update(self.__intermediate_address(address)) if static: if len(static.get(, [])) == 1: network = ip_interface(six.text_type(static[][0])) static[] = str(network.ip) static[] = str(network.netmask) if len(static.get(, [])) == 1: static[] = static[][0] result.append(static) if dhcp: result += dhcp return result
converts NetJSON address to UCI intermediate data structure
7,449
def set_duty_cycle(self, pin, dutycycle): if dutycycle < 0.0 or dutycycle > 100.0: raise ValueError() if pin not in self.pwm: raise ValueError(.format(pin)) self.pwm[pin].ChangeDutyCycle(dutycycle)
Set percent duty cycle of PWM output on specified pin. Duty cycle must be a value 0.0 to 100.0 (inclusive).
7,450
def add_dicts(d1, d2): if d1 is None: return d2 if d2 is None: return d1 keys = set(d1) keys.update(set(d2)) ret = {} for key in keys: v1 = d1.get(key) v2 = d2.get(key) if v1 is None: ret[key] = v2 elif v2 is None: ret[key] = v1 else: ret[key] = v1 + v2 return ret
Merge two dicts of addable values
7,451
def intersects_any(self, ray_origins, ray_directions): first = self.intersects_first(ray_origins=ray_origins, ray_directions=ray_directions) hit = first != -1 return hit
Check if a list of rays hits the surface. Parameters ---------- ray_origins: (n,3) float, origins of rays ray_directions: (n,3) float, direction (vector) of rays Returns ---------- hit: (n,) bool, did each ray hit the surface
7,452
def _simplify_feature_value(self, name, value): if name == : channel_modes, channel_chars = value.split() channel_modes = channel_modes[1:] value = OrderedDict(list(zip(channel_modes, channel_chars))[::-1]) return value elif name == : value = value.split() return value elif name == : max_available = {} for sort in value.split(): command, limit = sort.split() command = command.casefold() max_available[command] = limit_to_number(limit) return max_available elif name == : limit_available = {} for sort in value.split(): chan_types, limit = sort.split() for prefix in chan_types: limit_available[prefix] = limit_to_number(limit) return limit_available elif name in _limits: value = limit_to_number(value) return value else: return value
Return simplified and more pythonic feature values.
7,453
def perl_cmd(): perl = which(os.path.join(get_bcbio_bin(), "perl")) if perl: return perl else: return which("perl")
Retrieve path to locally installed conda Perl or first in PATH.
7,454
def _check_backends(self): backends = self.backends_params.keys() for b in self.roles.get_backends(): if b not in backends: raise MissingBackend(b, ) for b in self.attributes.get_backends(): if b not in backends: raise MissingBackend(b, )
Check that every backend in roles and attributes is declared in main configuration
7,455
def hybrid_threaded_worker(selector, workers): result_queue = Queue() job_sink = {k: w.sink() for k, w in workers.items()} @push def dispatch_job(): default_sink = result_queue.sink() while True: msg = yield if msg is EndOfQueue: for k in workers.keys(): try: job_sink[k].send(EndOfQueue) except StopIteration: pass return if msg is FlushQueue: for k in workers.keys(): try: job_sink[k].send(FlushQueue) except StopIteration: pass return worker = selector(msg.node) if worker: job_sink[worker].send(msg) else: default_sink.send(run_job(*msg)) for key, worker in workers.items(): t = threading.Thread( target=patch, args=(worker.source, result_queue.sink)) t.daemon = True t.start() return Connection(result_queue.source, dispatch_job)
Runs a set of workers, each in a separate thread. :param selector: A function that takes a hints-tuple and returns a key indexing a worker in the `workers` dictionary. :param workers: A dictionary of workers. :returns: A connection for the scheduler. :rtype: Connection The hybrid worker dispatches jobs to the different workers based on the information contained in the hints. If no hints were given, the job is run in the main thread. Dispatching is done in the main thread. Retrieving results is done in a separate thread for each worker. In this design it is assumed that dispatching a job takes little time, while waiting for one to return a result may take a long time.
7,456
def to_zhuyin(s, delimiter=, all_readings=False, container=): numbered_pinyin = to_pinyin(s, delimiter, all_readings, container, False) zhuyin = pinyin_to_zhuyin(numbered_pinyin) return zhuyin
Convert a string's Chinese characters to Zhuyin readings. *s* is a string containing Chinese characters. *delimiter* is the character used to indicate word boundaries in *s*. This is used to differentiate between words and characters so that a more accurate reading can be returned. *all_readings* is a boolean value indicating whether or not to return all possible readings in the case of words/characters that have multiple readings. *container* is a two character string that is used to enclose words/characters if *all_readings* is ``True``. The default ``'[]'`` is used like this: ``'[READING1/READING2]'``. Characters not recognized as Chinese are left untouched.
7,457
def get_default_config(self): config = super(VMSFSCollector, self).get_default_config() config.update({ : }) return config
Returns the default collector settings
7,458
def make_slash_number(self): if self.partitioning == and self.codon_positions == : return elif self.partitioning in [, ] and self.codon_positions in [, None]: return else: return
Charset lines have \2 or \3 depending on type of partitioning and codon positions requested for our dataset. :return:
7,459
def enable_hostgroup_passive_svc_checks(self, hostgroup): for host_id in hostgroup.get_hosts(): if host_id in self.daemon.hosts: for service_id in self.daemon.hosts[host_id].services: if service_id in self.daemon.services: self.enable_passive_svc_checks(self.daemon.services[service_id])
Enable service passive checks for a hostgroup Format of the line that triggers function call:: ENABLE_HOSTGROUP_PASSIVE_SVC_CHECKS;<hostgroup_name> :param hostgroup: hostgroup to enable :type hostgroup: alignak.objects.hostgroup.Hostgroup :return: None
7,460
def _get_audio_duration_seconds(self, audio_abs_path): HHMMSS_duration = subprocess.check_output( ( ).format( audio_abs_path, "Duration"), shell=True, universal_newlines=True).rstrip() total_seconds = sum( [float(x) * 60 ** (2 - i) for i, x in enumerate(HHMMSS_duration.split(":"))]) return total_seconds
Parameters ---------- audio_abs_path : str Returns ------- total_seconds : int
7,461
def toString(value, mode): string = angle.toString(value) sign = string[0] separator = CHAR[mode][sign] string = string.replace(, separator, 1) return string[1:]
Converts angle float to string. Mode refers to LAT/LON.
7,462
def flick(self, x, y, speed): self._driver.flick(self, x, y, speed)
Deprecated use touch('drag', { fromX, fromY, toX, toY, duration(s) }) instead. Flick on the touch screen using finger motion events. This flickcommand starts at a particulat screen location. Support: iOS Args: x(float}: The x offset in pixels to flick by. y(float): The y offset in pixels to flick by. speed(float) The speed in pixels per seconds. Returns: WebElement object.
7,463
def batch_update_conversations(self, event, conversation_ids): path = {} data = {} params = {} data["conversation_ids"] = conversation_ids self._validate_enum(event, ["mark_as_read", "mark_as_unread", "star", "unstar", "archive", "destroy"]) data["event"] = event self.logger.debug("PUT /api/v1/conversations with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("PUT", "/api/v1/conversations".format(**path), data=data, params=params, single_item=True)
Batch update conversations. Perform a change on a set of conversations. Operates asynchronously; use the {api:ProgressController#show progress endpoint} to query the status of an operation.
7,464
def hypergraph(raw_events, entity_types=None, opts={}, drop_na=True, drop_edge_attrs=False, verbose=True, direct=False): from . import hyper return hyper.Hypergraph().hypergraph(PyGraphistry, raw_events, entity_types, opts, drop_na, drop_edge_attrs, verbose, direct)
Transform a dataframe into a hypergraph. :param Dataframe raw_events: Dataframe to transform :param List entity_types: Optional list of columns (strings) to turn into nodes, None signifies all :param Dict opts: See below :param bool drop_edge_attrs: Whether to include each row's attributes on its edges, defaults to False (include) :param bool verbose: Whether to print size information :param bool direct: Omit hypernode and instead strongly connect nodes in an event Create a graph out of the dataframe, and return the graph components as dataframes, and the renderable result Plotter. It reveals relationships between the rows and between column values. This transform is useful for lists of events, samples, relationships, and other structured high-dimensional data. The transform creates a node for every row, and turns a row's column entries into node attributes. If direct=False (default), every unique value within a column is also turned into a node. Edges are added to connect a row's nodes to each of its column nodes, or if direct=True, to one another. Nodes are given the attribute 'type' corresponding to the originating column name, or in the case of a row, 'EventID'. Consider a list of events. Each row represents a distinct event, and each column some metadata about an event. If multiple events have common metadata, they will be transitively connected through those metadata values. The layout algorithm will try to cluster the events together. Conversely, if an event has unique metadata, the unique metadata will turn into nodes that only have connections to the event node, and the clustering algorithm will cause them to form a ring around the event node. Best practice is to set EVENTID to a row's unique ID, SKIP to all non-categorical columns (or entity_types to all categorical columns), and CATEGORY to group columns with the same kinds of values. The optional ``opts={...}`` configuration options are: * 'EVENTID': Column name to inspect for a row ID. By default, uses the row index. * 'CATEGORIES': Dictionary mapping a category name to inhabiting columns. E.g., {'IP': ['srcAddress', 'dstAddress']}. If the same IP appears in both columns, this makes the transform generate one node for it, instead of one for each column. * 'DELIM': When creating node IDs, defines the separator used between the column name and node value * 'SKIP': List of column names to not turn into nodes. For example, dates and numbers are often skipped. * 'EDGES': For direct=True, instead of making all edges, pick column pairs. E.g., {'a': ['b', 'd'], 'd': ['d']} creates edges between columns a->b and a->d, and self-edges d->d. :returns: {'entities': DF, 'events': DF, 'edges': DF, 'nodes': DF, 'graph': Plotter} :rtype: Dictionary **Example** :: import graphistry h = graphistry.hypergraph(my_df) g = h['graph'].plot()
7,465
def load(target, source_module=None): module, klass, function = _get_module(target) if not module and source_module: module = source_module if not module: raise MissingModule( "No module name supplied or source_module provided.") actual_module = sys.modules[module] if not klass: return getattr(actual_module, function) class_object = getattr(actual_module, klass) if function: return getattr(class_object, function) return class_object
Get the actual implementation of the target.
7,466
def _get_document_data(f, image_handler=None): if image_handler is None: def image_handler(image_id, relationship_dict): return relationship_dict.get(image_id) document_xml = None numbering_xml = None relationship_xml = None styles_xml = None parser = etree.XMLParser(strip_cdata=False) path, _ = os.path.split(f.filename) media = {} image_sizes = {} for item in f.infolist(): if item.filename == : xml = f.read(item.filename) document_xml = etree.fromstring(xml, parser) elif item.filename == : xml = f.read(item.filename) numbering_xml = etree.fromstring(xml, parser) elif item.filename == : xml = f.read(item.filename) styles_xml = etree.fromstring(xml, parser) elif item.filename == : xml = f.read(item.filename) try: relationship_xml = etree.fromstring(xml, parser) except XMLSyntaxError: relationship_xml = etree.fromstring(, parser) if item.filename.startswith(): media[item.filename[len():]] = f.extract( item.filename, path, ) f.close() numbering_dict = get_numbering_info(numbering_xml) image_sizes = get_image_sizes(document_xml) relationship_dict = get_relationship_info( relationship_xml, media, image_sizes ) styles_dict = get_style_dict(styles_xml) font_sizes_dict = defaultdict(int) if DETECT_FONT_SIZE: font_sizes_dict = get_font_sizes_dict(document_xml, styles_dict) meta_data = MetaData( numbering_dict=numbering_dict, relationship_dict=relationship_dict, styles_dict=styles_dict, font_sizes_dict=font_sizes_dict, image_handler=image_handler, image_sizes=image_sizes, ) return document_xml, meta_data
``f`` is a ``ZipFile`` that is open Extract out the document data, numbering data and the relationship data.
7,467
def volume_mesh(mesh, count): points = (np.random.random((count, 3)) * mesh.extents) + mesh.bounds[0] contained = mesh.contains(points) samples = points[contained][:count] return samples
Use rejection sampling to produce points randomly distributed in the volume of a mesh. Parameters ---------- mesh: Trimesh object count: int, number of samples desired Returns ---------- samples: (n,3) float, points in the volume of the mesh. where: n <= count
7,468
def B(self,value): assert value.shape[0]==self._K, assert value.shape[1]==1, self._B = value self.clear_cache(,)
set phenotype
7,469
def get_storage(self): if self.storage: return self.storage self.storage = self.reconnect_redis() return self.storage
Get the storage instance. :return Redis: Redis instance
7,470
def default_headers(self): _headers = { "User-Agent": "Pyzotero/%s" % __version__, "Zotero-API-Version": "%s" % __api_version__, } if self.api_key: _headers["Authorization"] = "Bearer %s" % self.api_key return _headers
It's always OK to include these headers
7,471
def find_input(self, stream): for i, input_x in enumerate(self.inputs): if input_x[0].matches(stream): return i
Find the input that responds to this stream. Args: stream (DataStream): The stream to find Returns: (index, None): The index if found or None
7,472
def run( self, cluster_config, rg_parser, partition_measurer, cluster_balancer, args, ): self.cluster_config = cluster_config self.args = args with ZK(self.cluster_config) as self.zk: self.log.debug( , self.__class__.__name__, self.cluster_config.name, self.cluster_config.zookeeper, ) brokers = self.zk.get_brokers() assignment = self.zk.get_cluster_assignment() pm = partition_measurer( self.cluster_config, brokers, assignment, args, ) ct = ClusterTopology( assignment, brokers, pm, rg_parser.get_replication_group, ) if len(ct.partitions) == 0: self.log.info("The cluster is empty. No actions to perform.") return if self.is_reassignment_pending(): self.log.error() sys.exit(1) self.run_command(ct, cluster_balancer(ct, args))
Initialize cluster_config, args, and zk then call run_command.
7,473
def open(self): if self._table_exists(): self.mode = "open" self._get_table_info() self.types = dict([ (f[0],self.conv_func[f[1].upper()]) for f in self.fields if f[1].upper() in self.conv_func ]) return self else: raise IOError,"Table %s doesn't exist" %self.name
Open an existing database
7,474
def add(name, device): * cmd = .format(name, device) if __salt__[](cmd) == 0: return True return False
Add new device to RAID array. CLI Example: .. code-block:: bash salt '*' raid.add /dev/md0 /dev/sda1
7,475
def load_yaml_file(filename): try: with open(filename, ) as f: return yaml.safe_load(f) except IOError as e: raise ParserError( + filename + + e.message) except ValueError as e: raise ParserError( .format(filename, e.message))
Load a YAML file from disk, throw a ParserError on failure.
7,476
def add_activity_form(self, activity_pattern, is_active): if is_active: if activity_pattern not in self.active_forms: self.active_forms.append(activity_pattern) else: if activity_pattern not in self.inactive_forms: self.inactive_forms.append(activity_pattern)
Adds the pattern as an active or inactive form to an Agent. Parameters ---------- activity_pattern : dict A dictionary of site names and their states. is_active : bool Is True if the given pattern corresponds to an active state.
7,477
def _kip(self, cycle_end, mix_thresh, xaxis, sparse): original_cyclelist = self.se.cycles cyclelist = original_cyclelist[0:cycle_end:sparse] xx = self.se.ages[:cycle_end:sparse] totalmass = [] m_ini = float(self.se.get()) fig = pl.figure(1) ax = pl.subplot(1,1,1) fsize = 12 def getlims(d_coeff, massco): plotlims = [] if massco[0] > massco[-1]: for j in range(-1,-len(d_coeff)-1,-1): if j == -1: if d_coeff[j] >= mix_thresh: plotlims.append(massco[j]) else: pass elif (d_coeff[j]-mix_thresh)*(d_coeff[j+1]-mix_thresh) < 0: plotlims.append(massco[j]) if j == -len(d_coeff): if d_coeff[j] >= mix_thresh: plotlims.append(massco[j]) return plotlims else: for j in range(len(d_coeff)): if j == 0: if d_coeff[j] >= mix_thresh: plotlims.append(massco[j]) else: pass elif (d_coeff[j]-mix_thresh)*(d_coeff[j-1]-mix_thresh) < 0: plotlims.append(massco[j]) if j == len(d_coeff)-1: if d_coeff[j] >= mix_thresh: plotlims.append(massco[j]) return plotlims if xaxis == : ax.set_xlabel(,fontsize=fsize) elif xaxis == : xx = cyclelist ax.set_xlabel(,fontsize=fsize) elif xaxis == : for i in range(len(xx)): xx[i] = np.log10(xx[i]) ax.set_xlabel(,fontsize=fsize) elif xaxis == : for i in range(len(xx)): xx[i] = np.log10(max(xx)-xx[i]) xx[-2] = xx[-3]-abs(xx[-4]-xx[-3]) xx[-1] = xx[-2]-abs(xx[-3]-xx[-2]) ax.set_xlabel(,fontsize=fsize) flag = False if self.se.get(cyclelist[1],)[0] > self.se.get(cyclelist[1],)[-1]: flag = True for i in range(len(cyclelist)): if flag == True: totalmass.append(self.se.get(cyclelist[i],)[0]) else: totalmass.append(self.se.get(cyclelist[i],)[-1]) percent = int(i*100/len(cyclelist)) sys.stdout.flush() sys.stdout.write("\rcreating color map " + "...%d%%" % percent) d_coeff = self.se.get(cyclelist[i],) massco = self.se.get(cyclelist[i],) plotlims = getlims(d_coeff,massco) for k in range(0,len(plotlims),2): ax.axvline(xx[i],ymin=old_div(plotlims[k],m_ini),ymax=old_div(plotlims[k+1],m_ini),color=,linewidth=0.5) ax.plot(xx, totalmass, color=, linewidth=1) if xaxis == : ax.axis([xx[0],xx[-1],0.,m_ini]) else: ax.axis([min(xx),max(xx),0.,m_ini]) ax.set_ylabel(,fontsize=fsize) pl.show()
*** Should be used with care, therefore has been flagged as a private routine *** This function uses a threshold diffusion coefficient, above which the the shell is considered to be convective, to plot a Kippenhahn diagram. Parameters ---------- cycle_end : integer The final cycle number. mix_thresh : float The threshold diffusion coefficient. xaxis : string Choose one of 'age', 'cycle', 'log_age' or 'log_time_left'. sparse : integer Sparsity factor when plotting from cyclelist. Examples -------- >>> pt=mp.se('/ngpod1/swj/see/mppnp_out/scratch_data/M25.0Z1e-02','.h5') >>> pt.kip(10000,'log_time_left',100)
7,478
def pointAt(self, **axis_values): scene_point = self.renderer().pointAt(self.axes(), axis_values) chart_point = self.uiChartVIEW.mapFromScene(scene_point) return self.uiChartVIEW.mapToParent(chart_point)
Returns the point on the chart where the inputed values are located. :return <QPointF>
7,479
def delete_cluster_role_binding(self, name, **kwargs): kwargs[] = True if kwargs.get(): return self.delete_cluster_role_binding_with_http_info(name, **kwargs) else: (data) = self.delete_cluster_role_binding_with_http_info(name, **kwargs) return data
delete_cluster_role_binding # noqa: E501 delete a ClusterRoleBinding # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_cluster_role_binding(name, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the ClusterRoleBinding (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately. :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both. :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground. :param V1DeleteOptions body: :return: V1Status If the method is called asynchronously, returns the request thread.
7,480
def _delocalize_logging_command(self, logging_path, user_project): logging_prefix = os.path.splitext(logging_path.uri)[0] if logging_path.file_provider == job_model.P_LOCAL: mkdir_cmd = % os.path.dirname(logging_prefix) cp_cmd = elif logging_path.file_provider == job_model.P_GCS: mkdir_cmd = if user_project: cp_cmd = .format(user_project) else: cp_cmd = else: assert False copy_logs_cmd = textwrap.dedent().format( cp_cmd=cp_cmd, prefix=logging_prefix) body = textwrap.dedent().format( mkdir_cmd=mkdir_cmd, copy_logs_cmd=copy_logs_cmd) return body
Returns a command to delocalize logs. Args: logging_path: location of log files. user_project: name of the project to be billed for the request. Returns: eg. 'gs://bucket/path/myfile' or 'gs://bucket/script-foobar-12'
7,481
def points_from_xywh(box): x, y, w, h = box[], box[], box[], box[] return "%i,%i %i,%i %i,%i %i,%i" % ( x, y, x + w, y, x + w, y + h, x, y + h )
Constructs a polygon representation from a rectangle described as a dict with keys x, y, w, h.
7,482
def get_certs(context=_DEFAULT_CONTEXT, store=_DEFAULT_STORE): * ret = dict() cmd = list() blacklist_keys = [] store_path = r.format(context, store) _validate_cert_path(name=store_path) cmd.append(r"Get-ChildItem -Path | Select-Object".format(store_path)) cmd.append() items = _cmd_run(cmd=six.text_type().join(cmd), as_json=True) for item in items: cert_info = dict() for key in item: if key not in blacklist_keys: cert_info[key.lower()] = item[key] names = item.get(, None) if isinstance(names, list): cert_info[] = [name.get() for name in names] else: cert_info[] = [] ret[item[]] = cert_info return ret
Get the available certificates in the given store. :param str context: The name of the certificate store location context. :param str store: The name of the certificate store. :return: A dictionary of the certificate thumbprints and properties. :rtype: dict CLI Example: .. code-block:: bash salt '*' win_pki.get_certs
7,483
def get_choices(cls, condition=None, order_by=None, query=None, value_field=None, text_field=None): result = [] if query is None: query = cls.filter(condition).order_by(order_by) for row in query: if not value_field: value = row._key else: value = getattr(row, value_field) if not text_field: text = unicode(row) else: text = getattr(row, text_field) result.append((value, text)) return result
Get [(value, text),...] list :param condition: :param value_field: default is primary_key :param text_field: default is unicode(obj) :return:
7,484
def json_decode(s: str) -> Any: try: return json.JSONDecoder(object_hook=json_class_decoder_hook).decode(s) except json.JSONDecodeError: log.warning("Failed to decode JSON (returning None): {!r}", s) return None
Decodes an object from JSON using our custom decoder.
7,485
def get_ref(self, cat, refname): if cat not in self.defs: raise errors.GramFuzzError("referenced definition category ({!r}) not defined".format(cat)) if refname == "*": refname = rand.choice(self.defs[cat].keys()) if refname not in self.defs[cat]: raise errors.GramFuzzError("referenced definition ({!r}) not defined".format(refname)) return rand.choice(self.defs[cat][refname])
Return one of the rules in the category ``cat`` with the name ``refname``. If multiple rule defintions exist for the defintion name ``refname``, use :any:`gramfuzz.rand` to choose a rule at random. :param str cat: The category to look for the rule in. :param str refname: The name of the rule definition. If the rule definition's name is ``"*"``, then a rule name will be chosen at random from within the category ``cat``. :returns: gramfuzz.fields.Def
7,486
def write_networking_file(version, pairs): vmnets = OrderedDict(sorted(pairs.items(), key=lambda t: t[0])) try: with open(VMWARE_NETWORKING_FILE, "w", encoding="utf-8") as f: f.write(version) for key, value in vmnets.items(): f.write("answer {} {}\n".format(key, value)) except OSError as e: raise SystemExit("Cannot open {}: {}".format(VMWARE_NETWORKING_FILE, e)) if sys.platform.startswith("darwin"): if not os.path.exists("/Applications/VMware Fusion.app/Contents/Library/vmnet-cli"): raise SystemExit("VMware Fusion is not installed in Applications") os.system(r"/Applications/VMware\ Fusion.app/Contents/Library/vmnet-cli --configure") os.system(r"/Applications/VMware\ Fusion.app/Contents/Library/vmnet-cli --stop") os.system(r"/Applications/VMware\ Fusion.app/Contents/Library/vmnet-cli --start") else: os.system("vmware-networks --stop") os.system("vmware-networks --start")
Write the VMware networking file.
7,487
def _opcode_set(*names): s = set() for name in names: try: s.add(_opcode(name)) except KeyError: pass return s
Return a set of opcodes by the names in `names`.
7,488
def request_client_list(self, req, msg): clients = self._client_conns num_clients = len(clients) for conn in clients: addr = conn.address req.inform(addr) return req.make_reply(, str(num_clients))
Request the list of connected clients. The list of clients is sent as a sequence of #client-list informs. Informs ------- addr : str The address of the client as host:port with host in dotted quad notation. If the address of the client could not be determined (because, for example, the client disconnected suddenly) then a unique string representing the client is sent instead. Returns ------- success : {'ok', 'fail'} Whether sending the client list succeeded. informs : int Number of #client-list inform messages sent. Examples -------- :: ?client-list #client-list 127.0.0.1:53600 !client-list ok 1
7,489
def write_properties(self, properties, file_datetime): with self.__lock: absolute_file_path = self.__file_path make_directory_if_needed(os.path.dirname(absolute_file_path)) exists = os.path.exists(absolute_file_path) if exists: rewrite_zip(absolute_file_path, Utility.clean_dict(properties)) else: write_zip(absolute_file_path, None, Utility.clean_dict(properties)) tz_minutes = Utility.local_utcoffset_minutes(file_datetime) timestamp = calendar.timegm(file_datetime.timetuple()) - tz_minutes * 60 os.utime(absolute_file_path, (time.time(), timestamp))
Write properties to the ndata file specified by reference. :param reference: the reference to which to write :param properties: the dict to write to the file :param file_datetime: the datetime for the file The properties param must not change during this method. Callers should take care to ensure this does not happen.
7,490
def validate_unwrap(self, value): if not isinstance(value, dict): self._fail_validation_type(value, dict) for k, v in value.items(): self._validate_key_unwrap(k) try: self.value_type.validate_unwrap(v) except BadValueException as bve: self._fail_validation(value, % k, cause=bve)
Checks that value is a ``dict``, that every key is a valid MongoDB key, and that every value validates based on DictField.value_type
7,491
def delivery_note_pdf(self, delivery_note_id): return self._create_get_request(resource=DELIVERY_NOTES, billomat_id=delivery_note_id, command=PDF)
Opens a pdf of a delivery note :param delivery_note_id: the delivery note id :return: dict
7,492
def show_instance(name, session=None, call=None): if call == : raise SaltCloudException( ) log.debug(, name, session) if session is None: session = _get_session() vm = _get_vm(name, session=session) record = session.xenapi.VM.get_record(vm) if not record[] and not record[]: try: base_template_name = record[][] except Exception: base_template_name = None log.debug( , record[] ) ret = {: record[], : base_template_name, : record[], : record[], : record[], : get_vm_ip(name, session), : None} __utils__[]( ret, __active_provider_name__, __opts__ ) return ret
Show information about a specific VM or template .. code-block:: bash salt-cloud -a show_instance xenvm01 .. note:: memory is memory_dynamic_max
7,493
def spksub(handle, descr, identin, begin, end, newh): assert len(descr) is 5 handle = ctypes.c_int(handle) descr = stypes.toDoubleVector(descr) identin = stypes.stringToCharP(identin) begin = ctypes.c_double(begin) end = ctypes.c_double(end) newh = ctypes.c_int(newh) libspice.spksub_c(handle, descr, identin, begin, end, newh)
Extract a subset of the data in an SPK segment into a separate segment. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/spksub_c.html :param handle: Handle of source segment. :type handle: int :param descr: Descriptor of source segment. :type descr: 5-Element Array of floats :param identin: Indentifier of source segment. :type identin: str :param begin: Beginning (initial epoch) of subset. :type begin: int :param end: End (fincal epoch) of subset. :type end: int :param newh: Handle of new segment. :type newh: int
7,494
def _dqdv_split_frames(cell, tidy=False, **kwargs): charge_dfs, cycles, minimum_v, maximum_v = _collect_capacity_curves( cell, direction="charge" ) ica_charge_dfs = _make_ica_charge_curves( charge_dfs, cycles, minimum_v, maximum_v, **kwargs, ) ica_charge_df = pd.concat( ica_charge_dfs, axis=1, keys=[k.name for k in ica_charge_dfs] ) dcharge_dfs, cycles, minimum_v, maximum_v = _collect_capacity_curves( cell, direction="discharge" ) ica_dcharge_dfs = _make_ica_charge_curves( dcharge_dfs, cycles, minimum_v, maximum_v, **kwargs, ) ica_discharge_df = pd.concat( ica_dcharge_dfs, axis=1, keys=[k.name for k in ica_dcharge_dfs] ) ica_charge_df.columns.names = ["cycle", "value"] ica_discharge_df.columns.names = ["cycle", "value"] if tidy: ica_charge_df = ica_charge_df.melt( "voltage", var_name="cycle", value_name="dq", col_level=0 ) ica_discharge_df = ica_discharge_df.melt( "voltage", var_name="cycle", value_name="dq", col_level=0 ) return ica_charge_df, ica_discharge_df
Returns dqdv data as pandas.DataFrames for all cycles. Args: cell (CellpyData-object). tidy (bool): return in wide format if False (default), long (tidy) format if True. Returns: (charge_ica_frame, discharge_ica_frame) where the frames are pandas.DataFrames where the first column is voltage ('v') and the following columns are the incremental capcaity for each cycle (multi-indexed, where cycle number is on the top level). Example: >>> from cellpy.utils import ica >>> charge_ica_df, dcharge_ica_df = ica.ica_frames(my_cell) >>> charge_ica_df.plot(x=("voltage", "v"))
7,495
def get_token(self, request): return stripe.Token.create( card={ "number": request.data["number"], "exp_month": request.data["exp_month"], "exp_year": request.data["exp_year"], "cvc": request.data["cvc"] } )
Create a stripe token for a card
7,496
async def create_proof(self, proof_req: dict, briefs: Union[dict, Sequence[dict]], requested_creds: dict) -> str: LOGGER.debug( , proof_req, briefs, requested_creds) if not self.wallet.handle: LOGGER.debug(, self.name) raise WalletState(.format(self.name)) label = await self._assert_link_secret() cd_ids = set() x_cd_ids = set() for brief in iter_briefs(briefs): cd_id = brief[][] if cd_id in cd_ids and cd_id not in x_cd_ids: x_cd_ids.add(cd_id) cd_ids.add(cd_id) if x_cd_ids: LOGGER.debug() raise CredentialFocus(.format(x_cd_ids)) s_id2schema = {} cd_id2cred_def = {} rr_id2timestamp = {} rr_id2cr_id = {} for brief in iter_briefs(briefs): interval = brief.get(, None) cred_info = brief[] s_id = cred_info[] if not ok_schema_id(s_id): LOGGER.debug(, s_id) raise BadIdentifier(.format(s_id)) if s_id not in s_id2schema: schema = json.loads(await self.get_schema(s_id)) if not schema: LOGGER.debug( , s_id) raise AbsentSchema(.format(s_id)) s_id2schema[s_id] = schema cd_id = cred_info[] if not ok_cred_def_id(cd_id): LOGGER.debug(, cd_id) raise BadIdentifier(.format(cd_id)) if cd_id not in cd_id2cred_def: cred_def = json.loads(await self.get_cred_def(cd_id)) cd_id2cred_def[cd_id] = cred_def rr_id = cred_info[] if rr_id: if not ok_rev_reg_id(rr_id): LOGGER.debug(, rr_id) raise BadIdentifier(.format(rr_id)) await self._sync_revoc_for_proof(rr_id) return rv
Create proof as HolderProver. Raise: * AbsentLinkSecret if link secret not set * CredentialFocus on attempt to create proof on no briefs or multiple briefs for a credential definition * AbsentTails if missing required tails file * | BadRevStateTime if a timestamp for a revocation registry state in the proof request | occurs before revocation registry creation * IndyError for any other indy-sdk error * AbsentInterval if briefs missing non-revocation interval, but cred def supports revocation * WalletState if the wallet is closed. :param proof_req: proof request as per Verifier.build_proof_req_json() :param briefs: cred-brief, sequence thereof, or mapping from wallet cred-id to briefs, to prove :param requested_creds: requested credentials data structure; i.e., :: { 'self_attested_attributes': {}, 'requested_attributes': { 'attr0_uuid': { 'cred_id': string, 'timestamp': integer, # for revocation state 'revealed': bool }, ... }, 'requested_predicates': { 'predicate0_uuid': { 'cred_id': string, 'timestamp': integer # for revocation state } } } :return: proof json
7,497
def transfer_sanity_check( name, consensus_hash ): if name is not None and (not is_b40( name ) or "+" in name or name.count(".") > 1): raise Exception("Name has non-base-38 characters" % name) if name is not None and (len(name) > LENGTHS[]): raise Exception("Name is too long; expected %s bytes" % (name, LENGTHS[])) return True
Verify that data for a transfer is valid. Return True on success Raise Exception on error
7,498
def _fracRoiSparse(self): self.frac_roi_sparse = np.min([self.mask_1.frac_roi_sparse,self.mask_2.frac_roi_sparse],axis=0) return self.frac_roi_sparse
Calculate an approximate pixel coverage fraction from the two masks. We have no way to know a priori how much the coverage of the two masks overlap in a give pixel. For example, masks that each have frac = 0.5 could have a combined frac = [0.0 to 0.5]. The limits will be: max: min(frac1,frac2) min: max((frac1+frac2)-1, 0.0) Sometimes we are lucky and our fracdet is actually already calculated for the two masks combined, so that the max condition is satisfied. That is what we will assume...
7,499
def process_file(pyfile_name): print( + pyfile_name) with open(pyfile_name) as fpyfile: pyfile_str = fpyfile.readlines() file_dict = {: pyfile_name.replace(, )} if pyfile_str[0].startswith("summary_comment") else: file_dict[] = pyfile_name file_dict[] = [] for line in pyfile_str: if line.startswith(): line_num = pyfile_str.index(line) fn_def = line[4:] fn_name = fn_def.split()[0] function_info = {: fn_name} extract = extract_code(, fn_def, pyfile_str, line_num) function_info[] = extract[] line_num = extract[] + 1 doc_line = pyfile_str[line_num] if doc_line.startswith(" ", comment_str, pyfile_str, line_num) function_info[] = extract[] file_dict[].append(function_info) return file_dict
Process a Python source file with Google style docstring comments. Reads file header comment, function definitions, function docstrings. Returns dictionary encapsulation for subsequent writing. Args: pyfile_name (str): file name to read. Returns: Dictionary object containing summary comment, with a list of entries for each function.