Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
380,800
def cnmfrm(cname, lenout=_default_len_out): lenout = ctypes.c_int(lenout) frname = stypes.stringToCharP(lenout) cname = stypes.stringToCharP(cname) found = ctypes.c_int() frcode = ctypes.c_int() libspice.cnmfrm_c(cname, lenout, ctypes.byref(frcode), frname, ctypes.byref(found)) return frcode.value, stypes.toPythonString(frname), bool(found.value)
Retrieve frame ID code and name to associate with an object. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/cnmfrm_c.html :param cname: Name of the object to find a frame for. :type cname: int :param lenout: Maximum length available for frame name. :type lenout: int :return: The ID code of the frame associated with cname, The name of the frame with ID frcode. :rtype: tuple
380,801
def contains (self, point): return (point.x >= self.ul.x and point.x <= self.lr.x) and \ (point.y >= self.ul.y and point.y <= self.lr.y)
contains(point) -> True | False Returns True if point is contained inside this Rectangle, False otherwise. Examples: >>> r = Rect( Point(-1, -1), Point(1, 1) ) >>> r.contains( Point(0, 0) ) True >>> r.contains( Point(2, 3) ) False
380,802
def get_model(self): model = BayesianModel() model.add_nodes_from(self.variables) model.add_edges_from(self.edges) model.name = self.model_name tabular_cpds = [] for var, values in self.variable_CPD.items(): evidence = values[] if in values else [] cpd = values[] evidence_card = values[] if in values else [] states = self.variables[var][] cpd = TabularCPD(var, len(states), cpd, evidence=evidence, evidence_card=evidence_card) tabular_cpds.append(cpd) model.add_cpds(*tabular_cpds) if nx.__version__.startswith(): for var, properties in self.variables.items(): model.node[var] = properties else: for var, properties in self.variables.items(): model._node[var] = properties return model
Returns an instance of Bayesian Model.
380,803
def container_clone(object_id, input_params={}, always_retry=False, **kwargs): return DXHTTPRequest( % object_id, input_params, always_retry=always_retry, **kwargs)
Invokes the /container-xxxx/clone API method. For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Cloning#API-method%3A-%2Fclass-xxxx%2Fclone
380,804
def create(zone, brand, zonepath, force=False): * ret = {: True} cfg_file = salt.utils.files.mkstemp() with salt.utils.files.fpopen(cfg_file, , mode=0o600) as fp_: fp_.write("create -b -F\n" if force else "create -b\n") fp_.write("set brand={0}\n".format(_sanitize_value(brand))) fp_.write("set zonepath={0}\n".format(_sanitize_value(zonepath))) if not __salt__[](zonepath): __salt__[](zonepath if zonepath[-1] == else .format(zonepath), mode=) _dump_cfg(cfg_file) res = __salt__[](.format( zone=zone, cfg=cfg_file, )) ret[] = res[] == 0 ret[] = res[] if ret[] else res[] if ret[] == : del ret[] else: ret[] = _clean_message(ret[]) if __salt__[](cfg_file): __salt__[](cfg_file) return ret
Create an in-memory configuration for the specified zone. zone : string name of zone brand : string brand name zonepath : string path of zone force : boolean overwrite configuration CLI Example: .. code-block:: bash salt '*' zonecfg.create deathscythe ipkg /zones/deathscythe
380,805
def default_help_formatter(quick_helps): ret = for line in quick_helps: cmd_path, param_hlp, cmd_hlp = line ret += .join(cmd_path) + if param_hlp: ret += param_hlp + ret += + cmd_hlp + return ret
Apply default formatting for help messages :param quick_helps: list of tuples containing help info
380,806
def get_port_at(self, tile_id, direction): for port in self.ports: if port.tile_id == tile_id and port.direction == direction: return port port = Port(tile_id, direction, PortType.none) self.ports.append(port) return port
If no port is found, a new none port is made and added to self.ports. Returns the port. :param tile_id: :param direction: :return: Port
380,807
def get_unique_nonzeros(arr): rois = np.unique(arr) rois = rois[np.nonzero(rois)] rois.sort() return rois
Return a sorted list of the non-zero unique values of arr. Parameters ---------- arr: numpy.ndarray The data array Returns ------- list of items of arr.
380,808
def upgrade(): op.create_table( , sa.Column(, sa.DateTime(), nullable=False), sa.Column(, sa.DateTime(), nullable=False), sa.Column(, sa.Integer(), nullable=False), sa.Column(, sa.String(length=255), nullable=False), sa.Column(, sa.String(length=255), nullable=True), sa.Column(, sa.Text(), nullable=True), sa.Column(, sa.Text(), nullable=True), sa.PrimaryKeyConstraint(), sa.UniqueConstraint() ) op.create_index( op.f(), , [], unique=False )
Upgrade database.
380,809
def use_openssl(libcrypto_path, libssl_path, trust_list_path=None): if not isinstance(libcrypto_path, str_cls): raise ValueError( % type_name(libcrypto_path)) if not isinstance(libssl_path, str_cls): raise ValueError( % type_name(libssl_path)) if not os.path.exists(libcrypto_path): raise LibraryNotFoundError( % libcrypto_path) if not os.path.exists(libssl_path): raise LibraryNotFoundError( % libssl_path) if trust_list_path is not None: if not isinstance(trust_list_path, str_cls): raise ValueError( % type_name(trust_list_path)) if not os.path.exists(trust_list_path): raise OSError( % trust_list_path) with _backend_lock: if _module_values[] is not None: raise RuntimeError() _module_values[] = _module_values[] = { : libcrypto_path, : libssl_path, : trust_list_path, }
Forces using OpenSSL dynamic libraries on OS X (.dylib) or Windows (.dll), or using a specific dynamic library on Linux/BSD (.so). This can also be used to configure oscrypto to use LibreSSL dynamic libraries. This method must be called before any oscrypto submodules are imported. :param libcrypto_path: A unicode string of the file path to the OpenSSL/LibreSSL libcrypto dynamic library. :param libssl_path: A unicode string of the file path to the OpenSSL/LibreSSL libssl dynamic library. :param trust_list_path: An optional unicode string of the path to a file containing OpenSSL-compatible CA certificates in PEM format. If this is not provided and the platform is OS X or Windows, the system trust roots will be exported from the OS and used for all TLS connections. :raises: ValueError - when one of the paths is not a unicode string OSError - when the trust_list_path does not exist on the filesystem oscrypto.errors.LibraryNotFoundError - when one of the path does not exist on the filesystem RuntimeError - when this function is called after another part of oscrypto has been imported
380,810
def on_failure(self, exc, task_id, args, kwargs, einfo): log.error(.format(task_id, getattr(einfo, , None))) super(LoggedTask, self).on_failure(exc, task_id, args, kwargs, einfo)
Capture the exception that caused the task to fail, if any.
380,811
def _walk_through(job_dir): serial = salt.payload.Serial(__opts__) for top in os.listdir(job_dir): t_path = os.path.join(job_dir, top) if not os.path.exists(t_path): continue for final in os.listdir(t_path): load_path = os.path.join(t_path, final, LOAD_P) if not os.path.isfile(load_path): continue with salt.utils.files.fopen(load_path, ) as rfh: try: job = serial.load(rfh) except Exception: log.exception(, load_path) continue if not job: log.error(, load_path) continue jid = job[] yield jid, job, t_path, final
Walk though the jid dir and look for jobs
380,812
def save_csv( self, name, address=True, class_param=None, class_name=None, matrix_save=True, normalize=False): try: message = None classes = class_filter(self.classes, class_name) csv_file = open(name + ".csv", "w") csv_data = csv_print( classes, self.class_stat, self.digit, class_param) csv_file.write(csv_data) if matrix_save: matrix = self.table if normalize: matrix = self.normalized_table csv_matrix_file = open(name + "_matrix" + ".csv", "w") csv_matrix_data = csv_matrix_print(self.classes, matrix) csv_matrix_file.write(csv_matrix_data) if address: message = os.path.join(os.getcwd(), name + ".csv") return {"Status": True, "Message": message} except Exception as e: return {"Status": False, "Message": str(e)}
Save ConfusionMatrix in CSV file. :param name: filename :type name : str :param address: flag for address return :type address : bool :param class_param : class parameters list for save, Example : ["TPR","TNR","AUC"] :type class_param : list :param class_name : class name (sub set of classes), Example :[1,2,3] :type class_name : list :param matrix_save : save matrix flag :type matrix_save : bool :param normalize : save normalize matrix flag :type normalize : bool :return: saving Status as dict {"Status":bool , "Message":str}
380,813
def calc_ag_v1(self): flu = self.sequences.fluxes.fastaccess flu.ag = flu.am+flu.av[0]+flu.av[1]+flu.avr[0]+flu.avr[1]
Sum the through flown area of the total cross section. Required flux sequences: |AM| |AV| |AVR| Calculated flux sequence: |AG| Example: >>> from hydpy.models.lstream import * >>> parameterstep() >>> fluxes.am = 1.0 >>> fluxes.av= 2.0, 3.0 >>> fluxes.avr = 4.0, 5.0 >>> model.calc_ag_v1() >>> fluxes.ag ag(15.0)
380,814
def refweights(self): return numpy.full(self.shape, 1./self.shape[0], dtype=float)
A |numpy| |numpy.ndarray| with equal weights for all segment junctions.. >>> from hydpy.models.hstream import * >>> parameterstep('1d') >>> states.qjoints.shape = 5 >>> states.qjoints.refweights array([ 0.2, 0.2, 0.2, 0.2, 0.2])
380,815
def update_chain(graph, loc, du, ud): ins = graph.get_ins_from_loc(loc) for var in ins.get_used_vars(): for def_loc in set(ud[var, loc]): du[var, def_loc].remove(loc) ud[var, loc].remove(def_loc) if not ud.get((var, loc)): ud.pop((var, loc)) if def_loc >= 0 and not du[var, def_loc]: du.pop((var, def_loc)) def_ins = graph.get_ins_from_loc(def_loc) if def_ins.is_call(): def_ins.remove_defined_var() elif def_ins.has_side_effect(): continue else: update_chain(graph, def_loc, du, ud) graph.remove_ins(def_loc)
Updates the DU chain of the instruction located at loc such that there is no more reference to it so that we can remove it. When an instruction is found to be dead (i.e it has no side effect, and the register defined is not used) we have to update the DU chain of all the variables that may me used by the dead instruction.
380,816
def convert_args_to_list(args): list_of_pairs = [] if len(args) == 0: return [] if any(isinstance(arg, (list, tuple)) for arg in args): if len(args) == 1 and \ any(isinstance(arg, (list, tuple)) for arg in args[0]): for item in args[0]: list_of_pairs.append(list(item)) else: for item in args: list_of_pairs.append(list(item)) else: if len(args) == 2: list_of_pairs.append(list(args)) else: msg = "The argument type is invalid. ".format(args) raise TypeError(msg) return list_of_pairs
Convert all iterable pairs of inputs into a list of list
380,817
def load_font(self, font_path, font_size): self.__font_path = font_path self.__font_size = font_size if font_path != "": self.__font = pygame.font.Font(font_path, font_size) self.__set_text(self.__text)
Load the specified font from a file.
380,818
def _dens(self,R,z,phi=0.,t=0.): return 1./(1.+(R**2.+z**2.)/self._a2)/4./nu.pi/self._a3
NAME: _dens PURPOSE: evaluate the density for this potential INPUT: R - Galactocentric cylindrical radius z - vertical height phi - azimuth t - time OUTPUT: the density HISTORY: 2015-12-04 - Started - Bovy (UofT)
380,819
def update_todo_menu(self): editorstack = self.get_current_editorstack() results = editorstack.get_todo_results() self.todo_menu.clear() filename = self.get_current_filename() for text, line0 in results: icon = ima.icon() slot = lambda _checked, _l=line0: self.load(filename, goto=_l) action = create_action(self, text=text, icon=icon, triggered=slot) self.todo_menu.addAction(action) self.update_todo_actions()
Update todo list menu
380,820
def refresh(): old_token = guard.read_token_from_header() new_token = guard.refresh_jwt_token(old_token) ret = {: new_token} return flask.jsonify(ret), 200
Refreshes an existing JWT by creating a new one that is a copy of the old except that it has a refrehsed access expiration. .. example:: $ curl http://localhost:5000/refresh -X GET \ -H "Authorization: Bearer <your_token>"
380,821
def _set_remote(self, stream=False): args = self._args_for_remote() if args is None: logger.debug( ) return logger.warning(, .join(args)) args = [] + args self._run_tf(, cmd_args=args, stream=stream) logger.info()
Call :py:meth:`~._args_for_remote`; if the return value is not None, execute 'terraform remote config' with those arguments and ensure it exits 0. :param stream: whether or not to stream TF output in realtime :type stream: bool
380,822
def _Dispatch(ps, server, SendResponse, SendFault, post, action, nsdict={}, **kw): localURL = %(server.server_name,server.server_port,post) address = action service = server.getNode(post) isWSResource = False if isinstance(service, WSAResource): isWSResource = True service.setServiceURL(localURL) address = Address() try: address.parse(ps) except Exception, e: return SendFault(FaultFromException(e, 0, sys.exc_info()[2]), **kw) if action and action != address.getAction(): e = WSActionException( \ %(action,address.getAction())) return SendFault(FaultFromException(e, 0, None), **kw) action = address.getAction() if isinstance(service, ServiceInterface) is False: e = NoSuchService( %(post,server)) return SendFault(FaultFromException(e, 0, sys.exc_info()[2]), **kw) if not service.authorize(None, post, action): return SendFault(Fault(Fault.Server, "Not authorized"), code=401) try: method = service.getOperation(ps, address) except Exception, e: return SendFault(FaultFromException(e, 0, sys.exc_info()[2]), **kw) try: if isWSResource is True: request,result = method(ps, address) else: request,result = method(ps) except Exception, e: return SendFault(FaultFromException(e, 0, sys.exc_info()[2]), **kw) service.verify(ps) if result is None: return SendResponse(, **kw) sw = SoapWriter(nsdict=nsdict) try: sw.serialize(result) except Exception, e: return SendFault(FaultFromException(e, 0, sys.exc_info()[2]), **kw) if isWSResource is True: action = service.getResponseAction(ps, action) addressRsp = Address(action=action) try: addressRsp.setResponseFromWSAddress(address, localURL) addressRsp.serialize(sw) except Exception, e: return SendFault(FaultFromException(e, 0, sys.exc_info()[2]), **kw) service.sign(sw) try: soapdata = str(sw) return SendResponse(soapdata, **kw) except Exception, e: return SendFault(FaultFromException(e, 0, sys.exc_info()[2]), **kw)
Send ParsedSoap instance to ServiceContainer, which dispatches to appropriate service via post, and method via action. Response is a self-describing pyobj, which is passed to a SoapWriter. Call SendResponse or SendFault to send the reply back, appropriately. server -- ServiceContainer instance
380,823
def rst2html(rst_src, **kwargs): pub = rst2pub(rst_src, settings_overrides=kwargs, writer_name=) return pub.writer.parts[]
Convert a reStructuredText string into a unicode HTML fragment. For `kwargs`, see `default_rst_opts` and http://docutils.sourceforge.net/docs/user/config.html
380,824
def _include_module(self, module, mn): if mn in self.topology.include_packages: _debug.debug("_include_module:explicit using __include_packages: module=%s", mn) return True if in mn: for include_package in self.topology.include_packages: if mn.startswith(include_package + ): _debug.debug("_include_module:explicit pattern using __include_packages: module=%s pattern=%s", mn, \ include_package + ) return True if mn in self.topology.exclude_packages: _debug.debug("_include_module:explicit using __exclude_packages: module=%s", mn) return False if in mn: for exclude_package in self.topology.exclude_packages: if mn.startswith(exclude_package + ): _debug.debug("_include_module:explicit pattern using __exclude_packages: module=%s pattern=%s", mn, \ exclude_package + ) return False _debug.debug("_include_module:including: module=%s", mn) return True
See if a module should be included or excluded based upon included_packages and excluded_packages. As some packages have the following format: scipy.special.specfun scipy.linalg Where the top-level package name is just a prefix to a longer package name, we don't want to do a direct comparison. Instead, we want to exclude packages which are either exactly "<package_name>", or start with "<package_name>".
380,825
def _concat_datetimetz(to_concat, name=None): sample = to_concat[0] if isinstance(sample, ABCIndexClass): return sample._concat_same_dtype(to_concat, name=name) elif isinstance(sample, ABCDatetimeArray): return sample._concat_same_type(to_concat)
concat DatetimeIndex with the same tz all inputs must be DatetimeIndex it is used in DatetimeIndex.append also
380,826
def _get(self, url): return self.session.get(self.READ_URL_PRE + url).json()
Helper method: GET data from given URL on TBA's API. :param url: URL string to get data from. :return: Requested data in JSON format.
380,827
def zyz_decomposition(gate: Gate) -> Circuit: if gate.qubit_nb != 1: raise ValueError() q, = gate.qubits U = asarray(gate.asoperator()) U /= np.linalg.det(U) ** (1/2) if abs(U[0, 0]) > abs(U[1, 0]): theta1 = 2 * np.arccos(min(abs(U[0, 0]), 1)) else: theta1 = 2 * np.arcsin(min(abs(U[1, 0]), 1)) cos_halftheta1 = np.cos(theta1/2) if not np.isclose(cos_halftheta1, 0.0): phase = U[1, 1] / cos_halftheta1 theta0_plus_theta2 = 2 * np.arctan2(np.imag(phase), np.real(phase)) else: theta0_plus_theta2 = 0.0 sin_halftheta1 = np.sin(theta1/2) if not np.isclose(sin_halftheta1, 0.0): phase = U[1, 0] / sin_halftheta1 theta0_sub_theta2 = 2 * np.arctan2(np.imag(phase), np.real(phase)) else: theta0_sub_theta2 = 0.0 theta0 = (theta0_plus_theta2 + theta0_sub_theta2) / 2 theta2 = (theta0_plus_theta2 - theta0_sub_theta2) / 2 t0 = theta0/np.pi t1 = theta1/np.pi t2 = theta2/np.pi circ1 = Circuit() circ1 += TZ(t2, q) circ1 += TY(t1, q) circ1 += TZ(t0, q) return circ1
Returns the Euler Z-Y-Z decomposition of a local 1-qubit gate.
380,828
def register_type(klass, type_url=None): if type_url is None: type_url = _compute_type_url(klass) if type_url in _TYPE_URL_MAP: if _TYPE_URL_MAP[type_url] is not klass: raise ValueError("Conflict: %s" % (_TYPE_URL_MAP[type_url],)) _TYPE_URL_MAP[type_url] = klass
Register a klass as the factory for a given type URL. :type klass: :class:`type` :param klass: class to be used as a factory for the given type :type type_url: str :param type_url: (Optional) URL naming the type. If not provided, infers the URL from the type descriptor. :raises ValueError: if a registration already exists for the URL.
380,829
def load_single_dict(pinyin_dict, style=): if style == : for k, v in pinyin_dict.items(): v = _replace_tone2_style_dict_to_default(v) PINYIN_DICT[k] = v else: PINYIN_DICT.update(pinyin_dict) mmseg.retrain(mmseg.seg)
载入用户自定义的单字拼音库 :param pinyin_dict: 单字拼音库。比如: ``{0x963F: u"ā,ē"}`` :param style: pinyin_dict 参数值的拼音库风格. 支持 'default', 'tone2' :type pinyin_dict: dict
380,830
def edit_message_reply_markup( self, chat_id: Union[int, str], message_id: int, reply_markup: "pyrogram.InlineKeyboardMarkup" = None ) -> "pyrogram.Message": r = self.send( functions.messages.EditMessage( peer=self.resolve_peer(chat_id), id=message_id, reply_markup=reply_markup.write() if reply_markup else None ) ) for i in r.updates: if isinstance(i, (types.UpdateEditMessage, types.UpdateEditChannelMessage)): return pyrogram.Message._parse( self, i.message, {i.id: i for i in r.users}, {i.id: i for i in r.chats} )
Use this method to edit only the reply markup of messages sent by the bot or via the bot (for inline bots). Args: chat_id (``int`` | ``str``): Unique identifier (int) or username (str) of the target chat. For your personal cloud (Saved Messages) you can simply use "me" or "self". For a contact that exists in your Telegram address book you can use his phone number (str). message_id (``int``): Message identifier in the chat specified in chat_id. reply_markup (:obj:`InlineKeyboardMarkup`, *optional*): An InlineKeyboardMarkup object. Returns: On success, if edited message is sent by the bot, the edited :obj:`Message <pyrogram.Message>` is returned, otherwise True is returned. Raises: :class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error.
380,831
def create_classifier(self, name, positive_examples, negative_examples=None, negative_examples_filename=None, **kwargs): if name is None: raise ValueError() if not positive_examples: raise ValueError() headers = {} if in kwargs: headers.update(kwargs.get()) sdk_headers = get_sdk_headers(, , ) headers.update(sdk_headers) params = {: self.version} form_data = {} form_data[] = (None, name, ) for key in positive_examples.keys(): part_name = % (key) value = positive_examples[key] if hasattr(value, ): filename = basename(value.name) form_data[part_name] = (filename, value, ) if negative_examples: if not negative_examples_filename and hasattr( negative_examples, ): negative_examples_filename = basename(negative_examples.name) if not negative_examples_filename: raise ValueError() form_data[] = (negative_examples_filename, negative_examples, ) url = response = self.request( method=, url=url, headers=headers, params=params, files=form_data, accept_json=True) return response
Create a classifier. Train a new multi-faceted classifier on the uploaded image data. Create your custom classifier with positive or negative examples. Include at least two sets of examples, either two positive example files or one positive and one negative file. You can upload a maximum of 256 MB per call. Encode all names in UTF-8 if they contain non-ASCII characters (.zip and image file names, and classifier and class names). The service assumes UTF-8 encoding if it encounters non-ASCII characters. :param str name: The name of the new classifier. Encode special characters in UTF-8. :param dict positive_examples: A dictionary that contains the value for each classname. The value is a .zip file of images that depict the visual subject of a class in the new classifier. You can include more than one positive example file in a call. Specify the parameter name by appending `_positive_examples` to the class name. For example, `goldenretriever_positive_examples` creates the class **goldenretriever**. Include at least 10 images in .jpg or .png format. The minimum recommended image resolution is 32X32 pixels. The maximum number of images is 10,000 images or 100 MB per .zip file. Encode special characters in the file name in UTF-8. :param file negative_examples: A .zip file of images that do not depict the visual subject of any of the classes of the new classifier. Must contain a minimum of 10 images. Encode special characters in the file name in UTF-8. :param str negative_examples_filename: The filename for negative_examples. :param dict headers: A `dict` containing the request headers :return: A `DetailedResponse` containing the result, headers and HTTP status code. :rtype: DetailedResponse
380,832
def sqrt_with_finite_grads(x, name=None): with tf.compat.v1.name_scope(name, , [x]): x = tf.convert_to_tensor(value=x, name=) if not x.dtype.is_floating: raise TypeError() def grad(grad_ys): large_float_like_x = np.sqrt(np.finfo(x.dtype.as_numpy_dtype()).max) safe_grads = tf.where( tf.equal(x, 0), tf.fill(tf.shape(input=x), large_float_like_x), 0.5 * tf.math.rsqrt(x)) return grad_ys * safe_grads return tf.sqrt(x), grad
A sqrt function whose gradient at zero is very large but finite. Args: x: a `Tensor` whose sqrt is to be computed. name: a Python `str` prefixed to all ops created by this function. Default `None` (i.e., "sqrt_with_finite_grads"). Returns: sqrt: the square root of `x`, with an overridden gradient at zero grad: a gradient function, which is the same as sqrt's gradient everywhere except at zero, where it is given a large finite value, instead of `inf`. Raises: TypeError: if `tf.convert_to_tensor(x)` is not a `float` type. Often in kernel functions, we need to compute the L2 norm of the difference between two vectors, `x` and `y`: `sqrt(sum_i((x_i - y_i) ** 2))`. In the case where `x` and `y` are identical, e.g., on the diagonal of a kernel matrix, we get `NaN`s when we take gradients with respect to the inputs. To see, this consider the forward pass: ``` [x_1 ... x_N] --> [x_1 ** 2 ... x_N ** 2] --> (x_1 ** 2 + ... + x_N ** 2) --> sqrt((x_1 ** 2 + ... + x_N ** 2)) ``` When we backprop through this forward pass, the `sqrt` yields an `inf` because `grad_z(sqrt(z)) = 1 / (2 * sqrt(z))`. Continuing the backprop to the left, at the `x ** 2` term, we pick up a `2 * x`, and when `x` is zero, we get `0 * inf`, which is `NaN`. We'd like to avoid these `NaN`s, since they infect the rest of the connected computation graph. Practically, when two inputs to a kernel function are equal, we are in one of two scenarios: 1. We are actually computing k(x, x), in which case norm(x - x) is identically zero, independent of x. In this case, we'd like the gradient to reflect this independence: it should be zero. 2. We are computing k(x, y), and x just *happens* to have the same value as y. The gradient at such inputs is in fact ill-defined (there is a cusp in the sqrt((x - y) ** 2) surface along the line x = y). There are, however, an infinite number of sub-gradients, all of which are valid at all such inputs. By symmetry, there is exactly one which is "special": zero, and we elect to use that value here. In practice, having two identical inputs to a kernel matrix is probably a pathological situation to be avoided, but that is better resolved at a higher level than this. To avoid the infinite gradient at zero, we use tf.custom_gradient to redefine the gradient at zero. We assign it to be a very large value, specifically the sqrt of the max value of the floating point dtype of the input. We use the sqrt (as opposed to just using the max floating point value) to avoid potential overflow when combining this value with others downstream.
380,833
def _shuffled(seq): fixed_random = random.Random() if six.PY2: fixed_random.seed(FIXED_RANDOM_SEED) else: fixed_random.seed(FIXED_RANDOM_SEED, version=1) seq = list(seq) random.shuffle(seq, random=fixed_random.random) return seq
Deterministically shuffle identically under both py2 + py3.
380,834
def coords(self): yy, xx = np.nonzero(self.data_cutout_ma) return (yy + self._slice[0].start, xx + self._slice[1].start)
A tuple of two `~numpy.ndarray` containing the ``y`` and ``x`` pixel coordinates of unmasked pixels within the source segment. Non-finite pixel values (e.g. NaN, infs) are excluded (automatically masked). If all pixels are masked, ``coords`` will be a tuple of two empty arrays.
380,835
def all(iterable = None, *, name = None, metric = call_default): if iterable is None: return _iter_decorator(name, metric) else: return _do_all(iterable, name, metric)
Measure total time and item count for consuming an iterable :arg iterable: any iterable :arg function metric: f(name, count, total_time) :arg str name: name for the metric
380,836
def _set_range(self, v, load=False): if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("range_address range_mask",range.range, yang_name="range", rest_name="range", parent=self, is_container=, user_ordered=False, path_helper=self._path_helper, yang_keys=, extensions={u: {u: u, u: None, u: None, u: None, u: None, u: None, u: u}}), is_container=, yang_name="range", rest_name="range", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u: {u: u, u: None, u: None, u: None, u: None, u: None, u: u}}, namespace=, defining_module=, yang_type=, is_config=True) except (TypeError, ValueError): raise ValueError({ : , : "list", : , }) self.__range = t if hasattr(self, ): self._set()
Setter method for range, mapped from YANG variable /rbridge_id/router/ospf/area/range (list) If this variable is read-only (config: false) in the source YANG file, then _set_range is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_range() directly.
380,837
def hdel(self, name, *keys): with self.pipe as pipe: m_encode = self.memberparse.encode keys = [m_encode(m) for m in self._parse_values(keys)] return pipe.hdel(self.redis_key(name), *keys)
Delete one or more hash field. :param name: str the name of the redis key :param keys: on or more members to remove from the key. :return: Future()
380,838
def create_awslambda(self): utils.banner("Creating Lambda Function") awslambdaobj = awslambda.LambdaFunction( app=self.app, env=self.env, region=self.region, prop_path=self.json_path) awslambdaobj.create_lambda_function() utils.banner("Creating Lambda Event") lambdaeventobj = awslambda.LambdaEvent(app=self.app, env=self.env, region=self.region, prop_path=self.json_path) lambdaeventobj.create_lambda_events()
Create security groups as defined in the configs.
380,839
def copy_session(session: requests.Session) -> requests.Session: new = requests.Session() new.cookies = requests.utils.cookiejar_from_dict(requests.utils.dict_from_cookiejar(session.cookies)) new.headers = session.headers.copy() return new
Duplicates a requests.Session.
380,840
def get_auth(host, app_name, database_name): from .hooks import _get_auth_hook return _get_auth_hook(host, app_name, database_name)
Authentication hook to allow plugging in custom authentication credential providers
380,841
def _fire(self, layers, things, the_plot): self._teleport((row-1, col))
Launches a new bolt from the player.
380,842
def integer( element_name, attribute=None, required=True, alias=None, default=0, omit_empty=False, hooks=None ): value_parser = _number_parser(int) return _PrimitiveValue( element_name, value_parser, attribute, required, alias, default, omit_empty, hooks )
Create a processor for integer values. See also :func:`declxml.boolean`
380,843
def add_file(self, fileGrp, mimetype=None, url=None, ID=None, pageId=None, force=False, local_filename=None, **kwargs): if not ID: raise Exception("Must set ID of the mets:file") el_fileGrp = self._tree.getroot().find(".//mets:fileGrp[@USE=]" % (fileGrp), NS) if el_fileGrp is None: el_fileGrp = self.add_file_group(fileGrp) if ID is not None and self.find_files(ID=ID) != []: if not force: raise Exception("File with ID= already exists" % ID) mets_file = self.find_files(ID=ID)[0] else: mets_file = OcrdFile(ET.SubElement(el_fileGrp, TAG_METS_FILE), mets=self) mets_file.url = url mets_file.mimetype = mimetype mets_file.ID = ID mets_file.pageId = pageId mets_file.local_filename = local_filename self._file_by_id[ID] = mets_file return mets_file
Add a `OcrdFile </../../ocrd_models/ocrd_models.ocrd_file.html>`_. Arguments: fileGrp (string): Add file to ``mets:fileGrp`` with this ``USE`` attribute mimetype (string): url (string): ID (string): pageId (string): force (boolean): Whether to add the file even if a ``mets:file`` with the same ``ID`` already exists. local_filename (string): mimetype (string):
380,844
def constraint(self, n=-1, fid=0): c = self._getval("constr", fid) if n < 0 or n > self.deficiency(fid): return c else: raise RuntimeError("Not yet implemented")
Obtain the set of orthogonal equations that make the solution of the rank deficient normal equations possible. :param fid: the id of the sub-fitter (numerical)
380,845
def toJson(self, data=None, pretty=False): if data==None: data = self.attrs data = self.flatten(data) ret = json.dumps(data, indent=4, sort_keys=True) return ret
convert the flattened dictionary into json
380,846
def dump(rt, from_date, with_json=True, latest_only=False, **kwargs): return dict(id_remote_account=rt.id_remote_account, token_type=rt.token_type, access_token=rt.access_token, secret=rt.secret)
Dump the remote tokens as a list of dictionaries. :param ra: Remote toekn to be dumped. :type ra: `invenio_oauthclient.models.RemoteToken [Invenio2.x]` :returns: Remote tokens serialized to dictionary. :rtype: dict
380,847
def clear(self): parent = self.parent if parent is not self and parent.is_built_coherence(self.graph) is Build.YES: raise GPflowError() self._clear()
Calls `_clear` abstract method which must be implemented by descendants. :raises: GPflowError exception when parent of the node is built.
380,848
def _findOptionValueAdvAudit(option): if not in __context__: system_root = os.environ.get(, ) f_audit = os.path.join(system_root, , , ) f_audit_gpo = os.path.join(system_root, , , , , , , ) if not __salt__[](f_audit): if __salt__[](f_audit_gpo): __salt__[](f_audit) __salt__[](f_audit, .join(field_names)) audit_settings = {} with salt.utils.files.fopen(f_audit, mode=) as csv_file: reader = csv.DictReader(csv_file) for row in reader: audit_settings.update( {row[]: row[]}) __context__[] = audit_settings return __context__[].get(option, None)
Get the Advanced Auditing policy as configured in ``C:\\Windows\\Security\\Audit\\audit.csv`` Args: option (str): The name of the setting as it appears in audit.csv Returns: bool: ``True`` if successful, otherwise ``False``
380,849
def score_pairwise(aseq, bseq): assert len(aseq) == len(bseq) GAP_OPEN = -10.0 GAP_EXTEND = -0.5 GAP_CHARS = frozenset() score = 0.0 in_gap = True for ares, bres in zip(aseq.upper(), bseq.upper()): if ares in GAP_CHARS and bres in GAP_CHARS: continue match = blosum62.get((ares, bres), None) if match is None: assert GAP_CHARS.intersection((ares, bres)), \ "Expected one gap in: " + str((ares, bres)) if not in_gap: score += GAP_OPEN in_gap = True score += GAP_EXTEND else: in_gap = False score += match if in_gap: score -= GAP_OPEN return score
Compute pairwise distances between two sequences (raw strings).
380,850
def hessian(self, x, y, Rs, theta_Rs, r_core, center_x=0, center_y=0): if Rs < 0.0001: Rs = 0.0001 x_ = x - center_x y_ = y - center_y R = np.sqrt(x_ ** 2 + y_ ** 2) rho0 = self._alpha2rho0(theta_Rs=theta_Rs, Rs=Rs, r_core=r_core) kappa = self.density_2d(x_, y_, Rs, rho0, r_core) gamma1, gamma2 = self.cBurkGamma(R, Rs, rho0, r_core, x_, y_) f_xx = kappa + gamma1 f_yy = kappa - gamma1 f_xy = gamma2 return f_xx, f_yy, f_xy
:param x: x coordinate :param y: y coordinate :param Rs: scale radius :param rho0: central core density :param r_core: core radius :param center_x: :param center_y: :return:
380,851
async def monitor_status(self, alarm_status_callback=None, zone_changed_callback=None, output_changed_callback=None): self._alarm_status_callback = alarm_status_callback self._zone_changed_callback = zone_changed_callback self._output_changed_callback = output_changed_callback _LOGGER.info("Starting monitor_status loop") while not self.closed: _LOGGER.debug("Iteration... ") while not self.connected: _LOGGER.info("Not connected, re-connecting... ") await self.connect() if not self.connected: _LOGGER.warning("Not connected, sleeping for 10s... ") await asyncio.sleep(self._reconnection_timeout) continue await self.start_monitoring() if not self.connected: _LOGGER.warning("Start monitoring failed, sleeping for 10s...") await asyncio.sleep(self._reconnection_timeout) continue while True: await self._update_status() _LOGGER.debug("Got status!") if not self.connected: _LOGGER.info("Got connection broken, reconnecting!") break _LOGGER.info("Closed, quit monitoring.")
Start monitoring of the alarm status. Send command to satel integra to start sending updates. Read in a loop and call respective callbacks when received messages.
380,852
def register_site(self): if self.oxd_id: logger.info(, self.oxd_id) return self.oxd_id params = { "authorization_redirect_uri": self.authorization_redirect_uri, "oxd_rp_programming_language": "python", } for op in self.opt_params: if self.config.get("client", op): params[op] = self.config.get("client", op) for olp in self.opt_list_params: if self.config.get("client", olp): params[olp] = self.config.get("client", olp).split(",") logger.debug("Sending command `register_site` with params %s", params) response = self.msgr.request("register_site", **params) logger.debug("Received response: %s", response) if response[] == : raise OxdServerError(response[]) self.oxd_id = response["data"]["oxd_id"] self.config.set("oxd", "id", self.oxd_id) logger.info("Site registration successful. Oxd ID: %s", self.oxd_id) return self.oxd_id
Function to register the site and generate a unique ID for the site Returns: **string:** The ID of the site (also called client id) if the registration is successful Raises: **OxdServerError:** If the site registration fails.
380,853
def _process_file(self, obj, fobj, field): from uliweb import settings paths = [] upload_to = self.upload_to or self._get_upload_path(field, , obj) if upload_to: self.fileserving.to_path = upload_to upload_to_sub = self.upload_to_sub or self._get_upload_path(field, , obj) if upload_to_sub: paths.append(upload_to_sub) paths.append(fobj[]) return self.fileserving.save_file(os.path.join(*paths), fobj[], replace=self.file_replace, convert=self.file_convert)
obj is record object fobj is data field is FileField instance
380,854
def _set_foreign_attributes_for_create(self, model): model.set_attribute(self.get_plain_foreign_key(), self.get_parent_key()) model.set_attribute(self.get_plain_morph_type(), self._morph_name)
Set the foreign ID and type for creation a related model.
380,855
def make_tf_example(features, pi, value): return tf.train.Example(features=tf.train.Features(feature={ : tf.train.Feature( bytes_list=tf.train.BytesList( value=[features.tostring()])), : tf.train.Feature( bytes_list=tf.train.BytesList( value=[pi.tostring()])), : tf.train.Feature( float_list=tf.train.FloatList( value=[value]))}))
Args: features: [N, N, FEATURE_DIM] nparray of uint8 pi: [N * N + 1] nparray of float32 value: float
380,856
def ping(proxy=None, hostport=None): schema = { : , : { : { : }, }, : [ ] } assert proxy or hostport, if proxy is None: proxy = connect_hostport(hostport) resp = {} try: resp = proxy.ping() resp = json_validate( schema, resp ) if json_is_error(resp): return resp assert resp[] == except ValidationError as e: if BLOCKSTACK_DEBUG: log.exception(e) resp = {: , : 502} return resp except socket.timeout: log.error("Connection timed out") resp = {: , : 503} return resp except socket.error as se: log.error("Connection error {}".format(se.errno)) resp = {: , : 502} return resp except Exception as ee: if BLOCKSTACK_DEBUG: log.exception(ee) log.error("Caught exception while connecting to Blockstack node: {}".format(ee)) resp = {: , : 500} return resp return resp
rpc_ping Returns {'alive': True} on succcess Returns {'error': ...} on error
380,857
def paste_action_callback(self, *event): if react_to_event(self.view, self.oc_list_ctrl.tree_view, event) and self.oc_list_ctrl.active_entry_widget is None: global_clipboard.paste(self.model, limited=[]) return True
Callback method for paste action
380,858
def TakeWhile(self: dict, f): if is_to_destruct(f): f = destruct_func(f) for e in self.items(): if not f(e): break yield e
[ { 'self': [1, 2, 3, 4, 5], 'f': lambda x: x < 4, 'assert': lambda ret: list(ret) == [1, 2, 3] } ]
380,859
def get_pd_by_id(self, id): for pd in self.conn.protection_domains: if pd.id == id: return pd raise KeyError("Protection Domain with ID " + id + " not found")
Get ScaleIO ProtectionDomain object by its id :param name: ID of ProtectionDomain :return: ScaleIO ProctectionDomain object :raise KeyError: No ProtectionDomain with specified name found :rtype: ProtectionDomain object
380,860
def call_jira_rest(self, url, user, password, method="GET", data=None): headers = {: } self._logger.debug(.format(url)) if method == "GET": response = requests.get(self.base_url + url, auth=requests.auth.HTTPBasicAuth(user, password)) elif method == "POST": response = requests.post(self.base_url + url, data=json.dumps(data), auth=requests.auth.HTTPBasicAuth(user, password), headers=headers) else: raise ValueError() self._logger.debug() return response.json()
Make JIRA REST call :param data: data for rest call :param method: type of call: GET or POST for now :param url: url to call :param user: user for authentication :param password: password for authentication :return:
380,861
def min_cost(self): if self._min_cost: return self._min_cost self._min_cost = np.sum(self.c[np.arange(self.nx), self.solution]) return self._min_cost
Returns the cost of the best assignment
380,862
def save_archive(archive): _assert_obj_type(archive, obj_type=DBArchive) _get_handler().store_object(archive) return archive.to_comm(light_request=True)
Save `archive` into database and into proper indexes. Attr: archive (obj): Instance of the :class:`.DBArchive`. Returns: obj: :class:`.DBArchive` without data. Raises: InvalidType: When the `archive` is not instance of :class:`.DBArchive`. UnindexablePublication: When there is no index (property) which can be used to index `archive` in database.
380,863
def show_popup(self, *args, **kwargs): self.mw = JB_MainWindow(parent=self, flags=QtCore.Qt.Dialog) self.mw.setWindowTitle(self.popuptitle) self.mw.setWindowModality(QtCore.Qt.ApplicationModal) w = QtGui.QWidget() self.mw.setCentralWidget(w) vbox = QtGui.QVBoxLayout(w) pte = QtGui.QPlainTextEdit() pte.setPlainText(self.get_popup_text()) vbox.addWidget(pte) d = self.cursor().pos() - self.mw.mapToGlobal(self.mw.pos()) self.mw.move(d) self.mw.show()
Show a popup with a textedit :returns: None :rtype: None :raises: None
380,864
def _proxy(self): if self._context is None: self._context = IpAccessControlListContext( self._version, account_sid=self._solution[], sid=self._solution[], ) return self._context
Generate an instance context for the instance, the context is capable of performing various actions. All instance actions are proxied to the context :returns: IpAccessControlListContext for this IpAccessControlListInstance :rtype: twilio.rest.api.v2010.account.sip.ip_access_control_list.IpAccessControlListContext
380,865
def eig_seg(mask, img_list, apply_segmentation_to_images=False, cthresh=0, smooth=1): maskvox = mask > 0 maskseg = mask.clone() maskseg[maskvox] = 0 if isinstance(img_list, np.ndarray): mydata = img_list elif isinstance(img_list, (tuple, list)): mydata = core.image_list_to_matrix(img_list, mask) if (smooth > 0): for i in range(mydata.shape[0]): temp_img = core.make_image(mask, mydata[i,:], pixeltype=) temp_img = utils.smooth_image(temp_img, smooth, sigma_in_physical_coordinates=True) mydata[i,:] = temp_img[mask >= 0.5] segids = np.argmax(np.abs(mydata), axis=0)+1 segmax = np.max(np.abs(mydata), axis=0) maskseg[maskvox] = (segids * (segmax > 1e-09)) if cthresh > 0: for kk in range(int(maskseg.max())): timg = utils.threshold_image(maskseg, kk, kk) timg = utils.label_clusters(timg, cthresh) timg = utils.threshold_image(timg, 1, 1e15) * float(kk) maskseg[maskseg == kk] = timg[maskseg == kk] if (apply_segmentation_to_images) and (not isinstance(img_list, np.ndarray)): for i in range(len(img_list)): img = img_list[i] img[maskseg != float(i)] = 0 img_list[i] = img return maskseg
Segment a mask into regions based on the max value in an image list. At a given voxel the segmentation label will contain the index to the image that has the largest value. If the 3rd image has the greatest value, the segmentation label will be 3 at that voxel. Arguments --------- mask : ANTsImage D-dimensional mask > 0 defining segmentation region. img_list : collection of ANTsImage or np.ndarray images to use apply_segmentation_to_images : boolean determines if original image list is modified by the segmentation. cthresh : integer throw away isolated clusters smaller than this value smooth : float smooth the input data first by this value Returns ------- ANTsImage Example ------- >>> import ants >>> mylist = [ants.image_read(ants.get_ants_data('r16')), ants.image_read(ants.get_ants_data('r27')), ants.image_read(ants.get_ants_data('r85'))] >>> myseg = ants.eig_seg(ants.get_mask(mylist[0]), mylist)
380,866
def _fmt_metric(value, show_stdv=True): if len(value) == 2: return % (value[0], value[1]) if len(value) == 3: if show_stdv: return % (value[0], value[1], value[2]) return % (value[0], value[1]) raise ValueError("wrong metric value")
format metric string
380,867
def translify(text): try: res = translit.translify(smart_text(text, encoding)) except Exception as err: res = default_value % {: err, : text} return res
Translify russian text
380,868
def delete_keyvault_secret(access_token, vault_uri, secret_name): endpoint = .join([vault_uri, , secret_name, , ]) return do_delete(endpoint, access_token)
Deletes a secret from a key vault using the key vault URI. Args: access_token (str): A valid Azure authentication token. vault_uri (str): Vault URI e.g. https://myvault.azure.net. secret_name (str): Name of the secret to add. Returns: HTTP response. 200 OK.
380,869
def list_logical_volumes(select_criteria=None, path_mode=False): lvs -S helpvg/lv lv_diplay_attr = if path_mode: lv_diplay_attr = + lv_diplay_attr cmd = [, , lv_diplay_attr, ] if select_criteria: cmd.extend([, select_criteria]) lvs = [] for lv in check_output(cmd).decode().splitlines(): if not lv: continue if path_mode: lvs.append(.join(lv.strip().split())) else: lvs.append(lv.strip()) return lvs
List logical volumes :param select_criteria: str: Limit list to those volumes matching this criteria (see 'lvs -S help' for more details) :param path_mode: bool: return logical volume name in 'vg/lv' format, this format is required for some commands like lvextend :returns: [str]: List of logical volumes
380,870
def fromseconds(cls, seconds): try: seconds = int(seconds) except TypeError: seconds = int(seconds.flatten()[0]) return cls(datetime.timedelta(0, int(seconds)))
Return a |Period| instance based on a given number of seconds.
380,871
def emit(self, **kwargs): self._ensure_emit_kwargs(kwargs) for slot in self.slots: slot(**kwargs)
Emit signal by calling all connected slots. The arguments supplied have to match the signal definition. Args: kwargs: Keyword arguments to be passed to connected slots. Raises: :exc:`InvalidEmit`: If arguments don't match signal specification.
380,872
def limit_disk_io(self, uuid, media, totalbytessecset=False, totalbytessec=0, readbytessecset=False, readbytessec=0, writebytessecset=False, writebytessec=0, totaliopssecset=False, totaliopssec=0, readiopssecset=False, readiopssec=0, writeiopssecset=False, writeiopssec=0, totalbytessecmaxset=False, totalbytessecmax=0, readbytessecmaxset=False, readbytessecmax=0, writebytessecmaxset=False, writebytessecmax=0, totaliopssecmaxset=False, totaliopssecmax=0, readiopssecmaxset=False, readiopssecmax=0, writeiopssecmaxset=False, writeiopssecmax=0, totalbytessecmaxlengthset=False, totalbytessecmaxlength=0, readbytessecmaxlengthset=False, readbytessecmaxlength=0, writebytessecmaxlengthset=False, writebytessecmaxlength=0, totaliopssecmaxlengthset=False, totaliopssecmaxlength=0, readiopssecmaxlengthset=False, readiopssecmaxlength=0, writeiopssecmaxlengthset=False, writeiopssecmaxlength=0, sizeiopssecset=False, sizeiopssec=0, groupnameset=False, groupname=): args = { : uuid, : media, : totalbytessecset, : totalbytessec, : readbytessecset, : readbytessec, : writebytessecset, : writebytessec, : totaliopssecset, : totaliopssec, : readiopssecset, : readiopssec, : writeiopssecset, : writeiopssec, : totalbytessecmaxset, : totalbytessecmax, : readbytessecmaxset, : readbytessecmax, : writebytessecmaxset, : writebytessecmax, : totaliopssecmaxset, : totaliopssecmax, : readiopssecmaxset, : readiopssecmax, : writeiopssecmaxset, : writeiopssecmax, : totalbytessecmaxlengthset, : totalbytessecmaxlength, : readbytessecmaxlengthset, : readbytessecmaxlength, : writebytessecmaxlengthset, : writebytessecmaxlength, : totaliopssecmaxlengthset, : totaliopssecmaxlength, : readiopssecmaxlengthset, : readiopssecmaxlength, : writeiopssecmaxlengthset, : writeiopssecmaxlength, : sizeiopssecset, : sizeiopssec, : groupnameset, : groupname, } self._limit_disk_io_action_chk.check(args) self._client.sync(, args)
Remove a nic from a machine :param uuid: uuid of the kvm container (same as the used in create) :param media: the media to limit the diskio :return:
380,873
def get_occurrence(event_id, occurrence_id=None, year=None, month=None, day=None, hour=None, minute=None, second=None, tzinfo=None): if(occurrence_id): occurrence = get_object_or_404(Occurrence, id=occurrence_id) event = occurrence.event elif None not in (year, month, day, hour, minute, second): event = get_object_or_404(Event, id=event_id) date = timezone.make_aware(datetime.datetime(int(year), int(month), int(day), int(hour), int(minute), int(second)), tzinfo) occurrence = event.get_occurrence(date) if occurrence is None: raise Http404 else: raise Http404 return event, occurrence
Because occurrences don't have to be persisted, there must be two ways to retrieve them. both need an event, but if its persisted the occurrence can be retrieved with an id. If it is not persisted it takes a date to retrieve it. This function returns an event and occurrence regardless of which method is used.
380,874
def get_item_list(self, item_list_url): resp = self.api_request(str(item_list_url)) return ItemList(resp[], self, str(item_list_url), resp[])
Retrieve an item list from the server as an ItemList object :type item_list_url: String or ItemList :param item_list_url: URL of the item list to retrieve, or an ItemList object :rtype: ItemList :returns: The ItemList :raises: APIError if the request was not successful
380,875
def fast_deepcopy(obj): with BytesIO() as buf: pickle.dump(obj, buf) buf.seek(0) obj_new = pickle.load(buf) return obj_new
This is a faster implementation of deepcopy via pickle. It is meant primarily for sets of Statements with complex hierarchies but can be used for any object.
380,876
def set_lock_code(ctx, lock_code, new_lock_code, clear, generate, force): dev = ctx.obj[] def prompt_new_lock_code(): return prompt_lock_code(prompt=) def prompt_current_lock_code(): return prompt_lock_code(prompt=) def change_lock_code(lock_code, new_lock_code): lock_code = _parse_lock_code(ctx, lock_code) new_lock_code = _parse_lock_code(ctx, new_lock_code) try: dev.write_config( device_config( config_lock=new_lock_code), reboot=True, lock_key=lock_code) except Exception as e: logger.error(, exc_info=e) ctx.fail() def set_lock_code(new_lock_code): new_lock_code = _parse_lock_code(ctx, new_lock_code) try: dev.write_config( device_config( config_lock=new_lock_code), reboot=True) except Exception as e: logger.error(, exc_info=e) ctx.fail() if generate and new_lock_code: ctx.fail() if clear: new_lock_code = CLEAR_LOCK_CODE if generate: new_lock_code = b2a_hex(os.urandom(16)).decode() click.echo( .format(new_lock_code)) force or click.confirm( , abort=True, err=True) if dev.config.configuration_locked: if lock_code: if new_lock_code: change_lock_code(lock_code, new_lock_code) else: new_lock_code = prompt_new_lock_code() change_lock_code(lock_code, new_lock_code) else: if new_lock_code: lock_code = prompt_current_lock_code() change_lock_code(lock_code, new_lock_code) else: lock_code = prompt_current_lock_code() new_lock_code = prompt_new_lock_code() change_lock_code(lock_code, new_lock_code) else: if lock_code: ctx.fail( ) else: if new_lock_code: set_lock_code(new_lock_code) else: new_lock_code = prompt_new_lock_code() set_lock_code(new_lock_code)
Set or change the configuration lock code. A lock code may be used to protect the application configuration. The lock code must be a 32 characters (16 bytes) hex value.
380,877
def apply(self, vpc): assert vpc is not None logger.debug("Authorizing %s %s %s to address:%s name:%s", rule.protocol, rule.from_port, rule.to_port, rule.address, rule.group_name) group_to_authorize = groups.get(rule.group_name, None) try: group.authorize(rule.protocol, rule.from_port, rule.to_port, rule.address, group_to_authorize, None) except Exception as e: print "could not authorize group %s" % group_to_authorize raise return self
returns a list of new security groups that will be added
380,878
def setup(self): if len(self.plugins_tabs) == 0: self.close() return self.list.clear() current_path = self.current_path filter_text = self.filter_text trying_for_symbol = ( in self.filter_text) if trying_for_symbol: self.mode = self.SYMBOL_MODE self.setup_symbol_list(filter_text, current_path) else: self.mode = self.FILE_MODE self.setup_file_list(filter_text, current_path) self.set_dialog_position()
Setup list widget content.
380,879
def get_asset_from_edit_extension_draft(self, publisher_name, draft_id, asset_type, extension_name, **kwargs): route_values = {} if publisher_name is not None: route_values[] = self._serialize.url(, publisher_name, ) if draft_id is not None: route_values[] = self._serialize.url(, draft_id, ) if asset_type is not None: route_values[] = self._serialize.url(, asset_type, ) query_parameters = {} if extension_name is not None: query_parameters[] = self._serialize.query(, extension_name, ) response = self._send(http_method=, location_id=, version=, route_values=route_values, query_parameters=query_parameters, accept_media_type=) if "callback" in kwargs: callback = kwargs["callback"] else: callback = None return self._client.stream_download(response, callback=callback)
GetAssetFromEditExtensionDraft. [Preview API] :param str publisher_name: :param str draft_id: :param str asset_type: :param str extension_name: :rtype: object
380,880
def bitop_xor(self, dest, key, *keys): return self.execute(b, b, dest, key, *keys)
Perform bitwise XOR operations between strings.
380,881
def persistent_object_context_changed(self): super().persistent_object_context_changed() def change_registration(registered_object, unregistered_object): if registered_object and registered_object.uuid == self.parent_uuid: self.__parent = registered_object if self.persistent_object_context: self.__registration_listener = self.persistent_object_context.registration_event.listen(change_registration) self.__parent = self.persistent_object_context.get_registered_object(self.parent_uuid)
Override from PersistentObject.
380,882
def best_training_job(self): self._ensure_last_tuning_job() tuning_job_describe_result = \ self.estimator.sagemaker_session.sagemaker_client.describe_hyper_parameter_tuning_job( HyperParameterTuningJobName=self.latest_tuning_job.name) try: return tuning_job_describe_result[][] except KeyError: raise Exception(.format(self.latest_tuning_job.name))
Return name of the best training job for the latest hyperparameter tuning job. Raises: Exception: If there is no best training job available for the hyperparameter tuning job.
380,883
def set_error_page(self, loadbalancer, html): uri = "/loadbalancers/%s/errorpage" % utils.get_id(loadbalancer) req_body = {"errorpage": {"content": html}} resp, body = self.api.method_put(uri, body=req_body) return body
A single custom error page may be added per account load balancer with an HTTP protocol. Page updates will override existing content. If a custom error page is deleted, or the load balancer is changed to a non-HTTP protocol, the default error page will be restored.
380,884
def structure_recursion(self, struct, folder): has_subfolder = False for name, substruct in struct.items(): subfolder = os.path.join(folder, name) if not isinstance(substruct, dict): product_name, data_name = self._url_to_props(substruct) if in data_name: data_type = MimeType(data_name.split()[-1]) data_name = data_name.rsplit(, 1)[0] else: data_type = MimeType.RAW if data_name in self.bands + self.metafiles: self.download_list.append(DownloadRequest(url=substruct, filename=subfolder, data_type=data_type, data_name=data_name, product_name=product_name)) else: has_subfolder = True self.structure_recursion(substruct, subfolder) if not has_subfolder: self.folder_list.append(folder)
From nested dictionaries representing .SAFE structure it recursively extracts all the files that need to be downloaded and stores them into class attribute `download_list`. :param struct: nested dictionaries representing a part of .SAFE structure :type struct: dict :param folder: name of folder where this structure will be saved :type folder: str
380,885
def get_droplet(self, droplet_id): return Droplet.get_object(api_token=self.token, droplet_id=droplet_id)
Return a Droplet by its ID.
380,886
def get_inspector(): global _INSPECTOR if _INSPECTOR: return _INSPECTOR else: bind = op.get_bind() _INSPECTOR = sa.engine.reflection.Inspector.from_engine(bind) return _INSPECTOR
Reuse inspector
380,887
def write_format_data(self, format_dict): result_repr = format_dict[] if in result_repr: prompt_template = self.shell.prompt_manager.out_template if prompt_template and not prompt_template.endswith(): result_repr = + result_repr print >>io.stdout, result_repr
Write the format data dict to the frontend. This default version of this method simply writes the plain text representation of the object to ``io.stdout``. Subclasses should override this method to send the entire `format_dict` to the frontends. Parameters ---------- format_dict : dict The format dict for the object passed to `sys.displayhook`.
380,888
async def wait_tasks(tasks, flatten=True): rets = await asyncio.gather(*tasks) if flatten and all(map(lambda x: hasattr(x, ), rets)): rets = list(itertools.chain(*rets)) return rets
Gather a list of asynchronous tasks and wait their completion. :param list tasks: A list of *asyncio* tasks wrapped in :func:`asyncio.ensure_future`. :param bool flatten: If ``True`` the returned results are flattened into one list if the tasks return iterable objects. The parameter does nothing if all the results are not iterable. :returns: The results of tasks as a list or as a flattened list
380,889
def create_database(self, dbname, partitioned=False, **kwargs): new_db = self._DATABASE_CLASS(self, dbname, partitioned=partitioned) try: new_db.create(kwargs.get(, False)) except CloudantDatabaseException as ex: if ex.status_code == 412: raise CloudantClientException(412, dbname) super(CouchDB, self).__setitem__(dbname, new_db) return new_db
Creates a new database on the remote server with the name provided and adds the new database object to the client's locally cached dictionary before returning it to the caller. The method will optionally throw a CloudantClientException if the database exists remotely. :param str dbname: Name used to create the database. :param bool throw_on_exists: Boolean flag dictating whether or not to throw a CloudantClientException when attempting to create a database that already exists. :param bool partitioned: Create as a partitioned database. Defaults to ``False``. :returns: The newly created database object
380,890
def _cmp_models(self, m1, m2): if k in d: del d[k] for v in list(d.values()): if isinstance(v, dict): _cleanup(v) _cleanup(m1) _cleanup(m2) return not m1 == m2
Compare two models from different swagger APIs and tell if they are equal (return 0), or not (return != 0)
380,891
def get_canonical_and_alternates_urls( url, drop_ln=True, washed_argd=None, quote_path=False): dummy_scheme, dummy_netloc, path, dummy_params, query, fragment = urlparse( url) canonical_scheme, canonical_netloc = urlparse(cfg.get())[0:2] parsed_query = washed_argd or parse_qsl(query) no_ln_parsed_query = [(key, value) for (key, value) in parsed_query if key != ] if drop_ln: canonical_parsed_query = no_ln_parsed_query else: canonical_parsed_query = parsed_query if quote_path: path = urllib.quote(path) canonical_query = urlencode(canonical_parsed_query) canonical_url = urlunparse( (canonical_scheme, canonical_netloc, path, dummy_params, canonical_query, fragment)) alternate_urls = {} for ln in cfg.get(): alternate_query = urlencode(no_ln_parsed_query + [(, ln)]) alternate_url = urlunparse( (canonical_scheme, canonical_netloc, path, dummy_params, alternate_query, fragment)) alternate_urls[ln] = alternate_url return canonical_url, alternate_urls
Given an Invenio URL returns a tuple with two elements. The first is the canonical URL, that is the original URL with CFG_SITE_URL prefix, and where the ln= argument stripped. The second element element is mapping, language code -> alternate URL @param quote_path: if True, the path section of the given C{url} is quoted according to RFC 2396
380,892
def call_parallel(self, cdata, low): name = (cdata.get() or [None])[0] or cdata[].get() if not name: name = low.get(, low.get()) proc = salt.utils.process.MultiprocessingProcess( target=self._call_parallel_target, args=(name, cdata, low)) proc.start() ret = {: name, : None, : {}, : , : proc} return ret
Call the state defined in the given cdata in parallel
380,893
def start(parallel, items, config, dirs=None, name=None, multiplier=1, max_multicore=None): if name: checkpoint_dir = utils.safe_makedir(os.path.join(dirs["work"], "checkpoints_parallel")) checkpoint_file = os.path.join(checkpoint_dir, "%s.done" % name) else: checkpoint_file = None sysinfo = system.get_info(dirs, parallel, config.get("resources", {})) items = [x for x in items if x is not None] if items else [] max_multicore = int(max_multicore or sysinfo.get("cores", 1)) parallel = resources.calculate(parallel, items, sysinfo, config, multiplier=multiplier, max_multicore=max_multicore) try: view = None if parallel["type"] == "ipython": if checkpoint_file and os.path.exists(checkpoint_file): logger.info("Running locally instead of distributed -- checkpoint passed: %s" % name) parallel["cores_per_job"] = 1 parallel["num_jobs"] = 1 parallel["checkpointed"] = True yield multi.runner(parallel, config) else: from bcbio.distributed import ipython with ipython.create(parallel, dirs, config) as view: yield ipython.runner(view, parallel, dirs, config) else: yield multi.runner(parallel, config) except: if view is not None: from bcbio.distributed import ipython ipython.stop(view) raise else: for x in ["cores_per_job", "num_jobs", "mem"]: parallel.pop(x, None) if checkpoint_file: with open(checkpoint_file, "w") as out_handle: out_handle.write("done\n")
Start a parallel cluster or machines to be used for running remote functions. Returns a function used to process, in parallel items with a given function. Allows sharing of a single cluster across multiple functions with identical resource requirements. Uses local execution for non-distributed clusters or completed jobs. A checkpoint directory keeps track of finished tasks, avoiding spinning up clusters for sections that have been previous processed. multiplier - Number of expected jobs per initial input item. Used to avoid underscheduling cores when an item is split during processing. max_multicore -- The maximum number of cores to use for each process. Can be used to process less multicore usage when jobs run faster on more single cores.
380,894
def get_serializer(self, *args, **kwargs): serializer_class = self.get_serializer_class() kwargs[] = self.get_serializer_context() return serializer_class(*args, **kwargs)
Return the serializer instance that should be used for validating and deserializing input, and for serializing output.
380,895
def list_icmp_block(zone, permanent=True): * cmd = .format(zone) if permanent: cmd += return __firewall_cmd(cmd).split()
List ICMP blocks on a zone .. versionadded:: 2015.8.0 CLI Example: .. code-block:: bash salt '*' firewlld.list_icmp_block zone
380,896
def ttl(self, response): if response.code != 200: return 0 if not self.request.method in [, , ]: return 0 try: pragma = self.request.headers[] if pragma == : return 0 except KeyError: pass try: cache_control = self.request.headers[] for option in [, , , , ]: if cache_control.find(option): return 0 options = parse_cache_control(cache_control) try: return int(options[]) except KeyError: pass try: return int(options[]) except KeyError: pass if in options: max_age = options[] if max_age < ttl: ttl = max_age if in options: max_age = options[] if max_age < ttl: ttl = max_age return ttl except KeyError: pass try: expires = self.request.headers[] return time.mktime(time.strptime(expires, )) - time.time() except KeyError: pass
Returns time to live in seconds. 0 means no caching. Criteria: - response code 200 - read-only method (GET, HEAD, OPTIONS) Plus http headers: - cache-control: option1, option2, ... where options are: private | public no-cache no-store max-age: seconds s-maxage: seconds must-revalidate proxy-revalidate - expires: Thu, 01 Dec 1983 20:00:00 GMT - pragma: no-cache (=cache-control: no-cache) See http://www.mobify.com/blog/beginners-guide-to-http-cache-headers/ TODO: tests
380,897
def _send_event(self, event): flush = False if self._merge_data_trace_events(event): return if isinstance(event, events.TraceTimestamp): for ev in self._pending_events: ev.timestamp = event.timestamp flush = True else: self._pending_events.append(event) if isinstance(event, events.TraceOverflow): flush = True if flush: self._flush_events()
! @brief Process event objects and decide when to send to event sink. This method handles the logic to associate a timestamp event with the prior other event. A list of pending events is built up until either a timestamp or overflow event is generated, at which point all pending events are flushed to the event sink. If a timestamp is seen, the timestamp of all pending events is set prior to flushing.
380,898
def headerize(provenances): special_case = { : , : , : , : , : , : } for key, value in list(provenances.items()): if in key: header = key.replace(, ).title() else: header = key.title() header_list = header.split() proper_word = None proper_word_index = None for index, word in enumerate(header_list): if word in list(special_case.keys()): proper_word = special_case[word] proper_word_index = index if proper_word: header_list[proper_word_index] = proper_word header = .join(header_list) provenances.update( { key: { : .format(header=header), : value } }) return provenances
Create a header for each keyword. :param provenances: The keywords. :type provenances: dict :return: New keywords with header for every keyword. :rtype: dict
380,899
def dataset_search(self, dataset_returning_query): self._validate_search_query(dataset_returning_query) return self._execute_search_query( dataset_returning_query, DatasetSearchResult )
Run a dataset query against Citrination. :param dataset_returning_query: :class:`DatasetReturningQuery` to execute. :type dataset_returning_query: :class:`DatasetReturningQuery` :return: Dataset search result object with the results of the query. :rtype: :class:`DatasetSearchResult`