Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
376,800
def no_company_with_insufficient_companies_house_data(value): for prefix, name in company_types_with_insufficient_companies_house_data: if value.upper().startswith(prefix): raise ValidationError( MESSAGE_INSUFFICIENT_DATA, params={: name} )
Confirms that the company number is not for for a company that Companies House does not hold information on. Args: value (string): The company number to check. Raises: django.forms.ValidationError
376,801
def isSuperTagSetOf(self, tagSet): if len(tagSet) < self.__lenOfSuperTags: return False return self.__superTags == tagSet[:self.__lenOfSuperTags]
Test type relationship against given *TagSet* The callee is considered to be a supertype of given *TagSet* tag-wise if all tags in *TagSet* are present in the callee and they are in the same order. Parameters ---------- tagSet: :class:`~pyasn1.type.tag.TagSet` *TagSet* object to evaluate against the callee Returns ------- : :py:class:`bool` `True` if callee is a supertype of *tagSet*
376,802
def _maybe_repeat(self, x): if isinstance(x, list): assert len(x) == self.n return x else: return [x] * self.n
Utility function for processing arguments that are singletons or lists. Args: x: either a list of self.n elements, or not a list. Returns: a list of self.n elements.
376,803
def remove_object(self, name): if name not in self._object_map: raise RuntimeError(.format(name)) for fn_name in list(self._function_map.keys()): if fn_name.startswith(name + ) or fn_name.startswith(name + ): self._remove_function(fn_name) del self._object_map[name]
Remove the object exposed under that name. If no object is registered under the supplied name, a RuntimeError is raised. :param name: Name of object to be removed.
376,804
def _base64_to_file(b64str, outfpath, writetostrio=False): .pngs path as a str. If it is True, will return a StringIO object directly. If writing the file fails in either case, will return None. re writing back to a stringio object if writetostrio: outobj = StrIO(filebytes) return outobj return None
This converts the base64 encoded string to a file. Parameters ---------- b64str : str A base64 encoded strin that is the output of `base64.b64encode`. outfpath : str The path to where the file will be written. This should include an appropriate extension for the file (e.g. a base64 encoded string that represents a PNG should have its `outfpath` end in a '.png') so the OS can open these files correctly. writetostrio : bool If this is True, will return a StringIO object with the binary stream decoded from the base64-encoded input string `b64str`. This can be useful to embed these into other files without having to write them to disk. Returns ------- str or StringIO object If `writetostrio` is False, will return the output file's path as a str. If it is True, will return a StringIO object directly. If writing the file fails in either case, will return None.
376,805
def get_userids_for_address(self, address: Address) -> Set[str]: if not self.is_address_known(address): return set() return self._address_to_userids[address]
Return all known user ids for the given ``address``.
376,806
def rfc2426(self): if self.type: p={"type":self.type} else: p={} return rfc2425encode(self.name,self.cred,p)
RFC2426-encode the field content. :return: the field in the RFC 2426 format. :returntype: `str`
376,807
def get_model(): if not hasattr(g, ): g.model = load_model(current_app.config[], current_app.config[], current_app.config[]) return g.model
Get the NN model that's being analyzed from the request context. Put the model in the request context if it is not yet there. Returns: instance of :class:`.models.model.Model` or derived class
376,808
def append_id(expr, id_col=): if hasattr(expr, ): return expr._xflow_append_id(id_col) else: return _append_id(expr, id_col)
Append an ID column to current column to form a new DataFrame. :param str id_col: name of appended ID field. :return: DataFrame with ID field :rtype: DataFrame
376,809
def refresh_token(): current_user = get_jwt_identity() if not current_user: return ErrorResponse(status=401, message="Not logged in") access_token = create_access_token(identity=current_user) return AuthResponse(status=201, message=, access_token=access_token, auth=UserAuth())
Refreshes login token using refresh token Refreshes login token using refresh token # noqa: E501 :rtype: UserAuth
376,810
def vMh2_to_m2Lambda(v, Mh2, C, scale_high): if C[] == 0 and C[] == 0 and C[] == 0: return _vMh2_to_m2Lambda_SM(v, Mh2) else: def f0(x): m2, Lambda = x d = m2Lambda_to_vMh2(m2=m2.real, Lambda=Lambda.real, C=C, scale_high=scale_high) return np.array([d[] - v, d[] - Mh2]) dSM = _vMh2_to_m2Lambda_SM(v, Mh2) x0 = np.array([dSM[], dSM[]]) try: xres = scipy.optimize.newton_krylov(f0, x0) except scipy.optimize.nonlin.NoConvergence: raise ValueError("No solution for m^2 and Lambda found") return {: xres[0], : xres[1]}
Function to numerically determine the parameters of the Higgs potential given the physical Higgs VEV and mass.
376,811
def image_list(name=None, profile=None, **kwargs): ** conn = _auth(profile, **kwargs) return conn.image_list(name)
Return a list of available images (nova images-list + nova image-show) If a name is provided, only that image will be displayed. CLI Examples: .. code-block:: bash salt '*' nova.image_list salt '*' nova.image_list myimage
376,812
def check_datetime(method, dictionary, fields, label=None): improperly_formatted = [] values = [] for field in fields: if field in dictionary and dictionary[field] is not None: if type(dictionary[field]) not in (datetime.datetime, datetime.date) and not ISO_8601_REGEX.match(dictionary[field]): improperly_formatted.append(field) values.append(dictionary[field]) if improperly_formatted: error_label = % label if label else raise PyCronofyValidationError( % ( method, error_label, improperly_formatted, values), method, improperly_formatted, values )
Checks if the specified fields are formatted correctly if they have a value. Throws an exception on incorrectly formatted fields. :param dict dictionary: Dictionary to check. :param typle fields: Fields to check. :param string label: Dictionary name.
376,813
def do_fuzzy(self, word): word = list(preprocess_query(word))[0] print(white(make_fuzzy(word)))
Compute fuzzy extensions of word. FUZZY lilas
376,814
def set_foreground(self, fg, isRGBA=None): DEBUG_MSG("set_foreground()", 1, self) self.select() GraphicsContextBase.set_foreground(self, fg, isRGBA) self._pen.SetColour(self.get_wxcolour(self.get_rgb())) self.gfx_ctx.SetPen(self._pen) self.unselect()
Set the foreground color. fg can be a matlab format string, a html hex color string, an rgb unit tuple, or a float between 0 and 1. In the latter case, grayscale is used.
376,815
def expire(self, time=None): if time is None: time = self.__timer() root = self.__root curr = root.next links = self.__links cache_delitem = Cache.__delitem__ while curr is not root and curr.expire < time: cache_delitem(self, curr.key) del links[curr.key] next = curr.next curr.unlink() curr = next
Remove expired items from the cache.
376,816
def weight_field(self, f): if f is None: raise ValueError("Field name cannot be None.") self._assert_ml_fields_valid(f) return _change_singleton_roles(self, {f: FieldRole.WEIGHT}, clear_feature=True)
Select one field as the weight field. Note that this field will be exclude from feature fields. :param f: Selected weight field :type f: str :rtype: DataFrame
376,817
def validate_full_path(cls, full_path, **kwargs): _client = kwargs.pop(, None) or cls._client or client full_path = full_path.strip() if not full_path: raise Exception( .format(full_path) ) match = cls.VAULT_PATH_RE.match(full_path) if not match: raise Exception( .format(full_path) ) path_parts = match.groupdict() if path_parts.get() == : path_parts = dict(domain=None, vault=None) if None in path_parts.values(): user = _client.get(, {}) defaults = { : user[][], : .format(user[]) } path_parts = dict((k, v or defaults.get(k)) for k, v in path_parts.items()) full_path = .format(**path_parts) path_parts[] = full_path return full_path, path_parts
Helper method to return a full path from a full or partial path. If no domain, assumes user's account domain If the vault is "~", assumes personal vault. Valid vault paths include: domain:vault domain:vault:/path domain:vault/path vault:/path vault ~/ Invalid vault paths include: /vault/ /path / :/ Does not allow overrides for any vault path components.
376,818
def ensure_dir(path): os.makedirs(os.path.abspath(os.path.dirname(path)), exist_ok=True)
Create all parent directories of path if they don't exist. Args: path. Path-like object. Create parent dirs to this path. Return: None.
376,819
def termination_check(self): with self._transfer_lock: with self._disk_operation_lock: return (self._download_terminate or len(self._exceptions) > 0 or (self._all_remote_files_processed and len(self._transfer_set) == 0 and len(self._disk_set) == 0))
Check if terminated :param Downloader self: this :rtype: bool :return: if terminated
376,820
def Archimedes(L, rhof, rhop, mu, g=g): rs Chemical Engineers return L**3*rhof*(rhop-rhof)*g/mu**2
r'''Calculates Archimedes number, `Ar`, for a fluid and particle with the given densities, characteristic length, viscosity, and gravity (usually diameter of particle). .. math:: Ar = \frac{L^3 \rho_f(\rho_p-\rho_f)g}{\mu^2} Parameters ---------- L : float Characteristic length, typically particle diameter [m] rhof : float Density of fluid, [kg/m^3] rhop : float Density of particle, [kg/m^3] mu : float Viscosity of fluid, [N/m] g : float, optional Acceleration due to gravity, [m/s^2] Returns ------- Ar : float Archimedes number [] Notes ----- Used in fluid-particle interaction calculations. .. math:: Ar = \frac{\text{Gravitational force}}{\text{Viscous force}} Examples -------- >>> Archimedes(0.002, 2., 3000, 1E-3) 470.4053872 References ---------- .. [1] Green, Don, and Robert Perry. Perry's Chemical Engineers' Handbook, Eighth Edition. McGraw-Hill Professional, 2007. .. [2] Cengel, Yunus, and John Cimbala. Fluid Mechanics: Fundamentals and Applications. Boston: McGraw Hill Higher Education, 2006.
376,821
def chassis(name, chassis_name=None, password=None, datacenter=None, location=None, mode=None, idrac_launch=None, slot_names=None, blade_power_states=None): ret = {: chassis_name, : chassis_name, : True, : {}, : } chassis_cmd = cfg_tuning = mode_cmd = launch_cmd = inventory = __salt__[chassis_cmd]() if idrac_launch: idrac_launch = six.text_type(idrac_launch) current_name = __salt__[chassis_cmd]() if chassis_name != current_name: ret[].update({: {: current_name, : chassis_name}}) current_dc = __salt__[chassis_cmd]() if datacenter and datacenter != current_dc: ret[].update({: {: current_dc, : datacenter}}) if password: ret[].update({: {: , : }}) if location: current_location = __salt__[chassis_cmd]() if location != current_location: ret[].update({: {: current_location, : location}}) if mode: current_mode = __salt__[chassis_cmd](, cfg_tuning, mode_cmd) if mode != current_mode: ret[].update({: {: current_mode, : mode}}) if idrac_launch: current_launch_method = __salt__[chassis_cmd](, cfg_tuning, launch_cmd) if idrac_launch != current_launch_method: ret[].update({: {: current_launch_method, : idrac_launch}}) if slot_names: current_slot_names = __salt__[chassis_cmd]() for s in slot_names: key = s.keys()[0] new_name = s[key] if key.startswith(): key = key[5:] current_slot_name = current_slot_names.get(key).get() if current_slot_name != new_name: old = {key: current_slot_name} new = {key: new_name} if ret[].get() is None: ret[].update({: {: {}, : {}}}) ret[][][].update(old) ret[][][].update(new) current_power_states = {} target_power_states = {} if blade_power_states: for b in blade_power_states: key = b.keys()[0] status = __salt__[chassis_cmd](, module=key) current_power_states[key] = status.get(, -1) if b[key] == : if current_power_states[key] != -1 and current_power_states[key]: target_power_states[key] = if b[key] == : if current_power_states[key] != -1 and not current_power_states[key]: target_power_states[key] = if b[key] == : if current_power_states[key] != -1 and not current_power_states[key]: target_power_states[key] = if current_power_states[key] != -1 and current_power_states[key]: target_power_states[key] = for k, v in six.iteritems(target_power_states): old = {k: current_power_states[k]} new = {k: v} if ret[].get() is None: ret[].update({: {: {}, : {}}}) ret[][][].update(old) ret[][][].update(new) if ret[] == {}: ret[] = return ret if __opts__[]: ret[] = None ret[] = return ret name = __salt__[chassis_cmd](, chassis_name) if location: location = __salt__[chassis_cmd](, location) pw_result = True if password: pw_single = True if __salt__[chassis_cmd](, username=, uid=1, password=password): for blade in inventory[]: pw_single = __salt__[chassis_cmd](, username=, password=password, module=blade) if not pw_single: pw_result = False else: pw_result = False if datacenter: datacenter_result = __salt__[chassis_cmd](, datacenter) if mode: mode = __salt__[chassis_cmd](, cfg_tuning, mode_cmd, mode) if idrac_launch: idrac_launch = __salt__[chassis_cmd](, cfg_tuning, launch_cmd, idrac_launch) if ret[].get() is not None: slot_rets = [] for s in slot_names: key = s.keys()[0] new_name = s[key] if key.startswith(): key = key[5:] slot_rets.append(__salt__[chassis_cmd](, key, new_name)) if any(slot_rets) is False: slot_names = False else: slot_names = True powerchange_all_ok = True for k, v in six.iteritems(target_power_states): powerchange_ok = __salt__[chassis_cmd](, v, module=k) if not powerchange_ok: powerchange_all_ok = False if any([name, location, mode, idrac_launch, slot_names, powerchange_all_ok]) is False: ret[] = False ret[] = ret[] = return ret
Manage a Dell Chassis. chassis_name The name of the chassis. datacenter The datacenter in which the chassis is located location The location of the chassis. password Password for the chassis. Note: If this password is set for the chassis, the current implementation of this state will set this password both on the chassis and the iDrac passwords on any configured blades. If the password for the blades should be distinct, they should be set separately with the blade_idrac function. mode The management mode of the chassis. Viable options are: - 0: None - 1: Monitor - 2: Manage and Monitor idrac_launch The iDRAC launch method of the chassis. Viable options are: - 0: Disabled (launch iDRAC using IP address) - 1: Enabled (launch iDRAC using DNS name) slot_names The names of the slots, provided as a list identified by their slot numbers. blade_power_states The power states of a blade server, provided as a list and identified by their server numbers. Viable options are: - on: Ensure the blade server is powered on. - off: Ensure the blade server is powered off. - powercycle: Power cycle the blade server. Example: .. code-block:: yaml my-dell-chassis: dellchassis.chassis: - chassis_name: my-dell-chassis - location: my-location - datacenter: london - mode: 2 - idrac_launch: 1 - slot_names: - 1: my-slot-name - 2: my-other-slot-name - blade_power_states: - server-1: on - server-2: off - server-3: powercycle
376,822
def make_symmetric_matrix_from_upper_tri(val): idx = [0,3,4,1,5,2] val = np.array(val)[idx] mask = ~np.tri(3,k=-1,dtype=bool) out = np.zeros((3,3),dtype=val.dtype) out[mask] = val out.T[mask] = val return out
Given a symmetric matrix in upper triangular matrix form as flat array indexes as: [A_xx,A_yy,A_zz,A_xy,A_xz,A_yz] This will generate the full matrix: [[A_xx,A_xy,A_xz],[A_xy,A_yy,A_yz],[A_xz,A_yz,A_zz]
376,823
def create_section( aggregation_summary, analysis_layer, postprocessor_fields, section_header, use_aggregation=True, units_label=None, use_rounding=True, extra_component_args=None): if use_aggregation: return create_section_with_aggregation( aggregation_summary, analysis_layer, postprocessor_fields, section_header, units_label=units_label, use_rounding=use_rounding, extra_component_args=extra_component_args) else: return create_section_without_aggregation( aggregation_summary, analysis_layer, postprocessor_fields, section_header, units_label=units_label, use_rounding=use_rounding, extra_component_args=extra_component_args)
Create demographic section context. :param aggregation_summary: Aggregation summary :type aggregation_summary: qgis.core.QgsVectorlayer :param analysis_layer: Analysis layer :type analysis_layer: qgis.core.QgsVectorLayer :param postprocessor_fields: Postprocessor fields to extract :type postprocessor_fields: list[dict] :param section_header: Section header text :type section_header: qgis.core.QgsVectorLayer :param use_aggregation: Flag, if using aggregation layer :type use_aggregation: bool :param units_label: Unit label for each column :type units_label: list[str] :param use_rounding: flag for rounding, affect number representations :type use_rounding: bool :param extra_component_args: extra_args passed from report component metadata :type extra_component_args: dict :return: context for gender section :rtype: dict .. versionadded:: 4.0
376,824
def searchResponse(self, queryType, query, vendorSpecific=None, **kwargs): return self.GET( [, queryType, query], headers=vendorSpecific, query=kwargs )
CNRead.search(session, queryType, query) → ObjectList https://releases.dataone.org/online/api- documentation-v2.0.1/apis/CN_APIs.html#CNRead.search. Args: queryType: query: vendorSpecific: **kwargs: Returns:
376,825
def available_services(): all_datas = () data = () for class_path in settings.TH_SERVICES: class_name = class_path.rsplit(, 1)[1] data = (class_name, class_name.rsplit(, 1)[1]) all_datas = (data,) + all_datas return all_datas
get the available services to be activated read the models dir to find the services installed to be added to the system by the administrator
376,826
def get(self, names_to_get, extract_numpy_scalars=True, on_new_output=None): self._check_open() single_item = isinstance(names_to_get, (unicode, str)) if single_item: names_to_get = [names_to_get] if names_to_get == None: self.process.stdin.write() else: for name in names_to_get: self.eval( % name, print_expression=False, on_new_output=on_new_output) self.process.stdin.write( "save(, , );\n" % , \.join(names_to_get)) self._read_until(, on_new_output=on_new_output) temp_str = self._sync_output(on_new_output=on_new_output) if self.matlab_version == (2010, ): temp_str = temp_str[:-len(self.expected_output_end)-6] else: temp_str = temp_str[:-len(self.expected_output_end)-3] temp = StringIO(temp_str) ret = mlabio.loadmat(temp, chars_as_strings=True, squeeze_me=True) temp.close() if single_item: return ret.values()[0] for key in ret.iterkeys(): while ret[key].shape and ret[key].shape[-1] == 1: ret[key] = ret[key][0] if extract_numpy_scalars: if isinstance(ret[key], np.ndarray) and not ret[key].shape: ret[key] = ret[key].tolist() return ret
Loads the requested variables from the matlab shell. names_to_get can be either a variable name, a list of variable names, or None. If it is a variable name, the values is returned. If it is a list, a dictionary of variable_name -> value is returned. If it is None, a dictionary with all variables is returned. If extract_numpy_scalars is true, the method will convert numpy scalars (0-dimension arrays) to a regular python variable.
376,827
def __output_thread(self): "Output thread" while self.alive: instructions = self.__get_instructions() self.__process_instructions(instructions)
Output thread
376,828
def create_user(self, name, password, is_super=False): statement = ddl.CreateUser( name=name, password=password, is_super=is_super ) self._execute(statement)
Create a new MapD user Parameters ---------- name : string User name password : string Password is_super : bool if user is a superuser
376,829
def policy_net(rng_key, batch_observations_shape, num_actions, bottom_layers=None): if bottom_layers is None: bottom_layers = [] bottom_layers.extend([layers.Dense(num_actions), layers.LogSoftmax()]) net = layers.Serial(*bottom_layers) return net.initialize(batch_observations_shape, rng_key), net
A policy net function.
376,830
def _all_unique_texts(text, final): if not char_opening in text: if not text in final: final.append(text) return stack = [] indexes = [] for i, c in enumerate(text): if c == char_closing: if stack[-1] == char_opening: start_index = indexes.pop() substring = if i == start_index + 1 else text[start_index:i + 1] combination = next(_choices(substring)) new_text = text.replace(substring, combination) _all_unique_texts(new_text, final) return elif c == char_opening: stack.append(c) indexes.append(i)
Compute all the possible unique texts @type text: str @param text: Text written used spin syntax @type final: list @param final: An empty list where all the unique texts will be stored @return: Nothing. The result will be in the 'final' list
376,831
def Marginal(self, i, name=): pmf = Pmf(name=name) for vs, prob in self.Items(): pmf.Incr(vs[i], prob) return pmf
Gets the marginal distribution of the indicated variable. i: index of the variable we want Returns: Pmf
376,832
def _get_user_ns_object(shell, path): parts = path.split(, 1) name, attr = parts[0], parts[1:] if name in shell.user_ns: if attr: try: return _getattr(shell.user_ns[name], attr[0]) except AttributeError: return None else: return shell.user_ns[name] return None
Get object from the user namespace, given a path containing zero or more dots. Return None if the path is not valid.
376,833
def get_types_by_attr(resource, template_id=None): resource_type_templates = [] attr_ids = [] for res_attr in resource.attributes: attr_ids.append(res_attr.attr_id) all_resource_attr_ids = set(attr_ids) all_types = db.DBSession.query(TemplateType).options(joinedload_all()).filter(TemplateType.resource_type==resource.ref_key) if template_id is not None: all_types = all_types.filter(TemplateType.template_id==template_id) all_types = all_types.all() for ttype in all_types: type_attr_ids = [] for typeattr in ttype.typeattrs: type_attr_ids.append(typeattr.attr_id) if set(type_attr_ids).issubset(all_resource_attr_ids): resource_type_templates.append(ttype) return resource_type_templates
Using the attributes of the resource, get all the types that this resource matches. @returns a dictionary, keyed on the template name, with the value being the list of type names which match the resources attributes.
376,834
def acquireConnection(self): self._logger.debug("Acquiring connection") dbConn = self._pool.connection(shareable=False) connWrap = ConnectionWrapper(dbConn=dbConn, cursor=dbConn.cursor(), releaser=self._releaseConnection, logger=self._logger) return connWrap
Get a connection from the pool. Parameters: ---------------------------------------------------------------- retval: A ConnectionWrapper instance. NOTE: Caller is responsible for calling the ConnectionWrapper instance's release() method or use it in a context manager expression (with ... as:) to release resources.
376,835
def process(self): if self.__ccore is True: ccore_metric = metric_wrapper.create_instance(self.__metric) self.__clusters, self.__medians = wrapper.kmedians(self.__pointer_data, self.__medians, self.__tolerance, self.__itermax, ccore_metric.get_pointer()) else: changes = float() if len(self.__pointer_data[0]) != len(self.__medians[0]): raise NameError() iterations = 0 while changes > self.__tolerance and iterations < self.__itermax: self.__clusters = self.__update_clusters() updated_centers = self.__update_medians() changes = max([self.__metric(self.__medians[index], updated_centers[index]) for index in range(len(updated_centers))]) self.__medians = updated_centers iterations += 1 return self
! @brief Performs cluster analysis in line with rules of K-Medians algorithm. @return (kmedians) Returns itself (K-Medians instance). @remark Results of clustering can be obtained using corresponding get methods. @see get_clusters() @see get_medians()
376,836
def mt_report(context, case_id, test, outpath=None): LOG.info(.format(case_id)) adapter = context.obj[] query = {:} case_obj = adapter.case(case_id=case_id) if not case_obj: LOG.warning(.format(case_id)) context.abort() samples = case_obj.get() mt_variants = list(adapter.variants(case_id=case_id, query=query, nr_of_variants= -1, sort_key=)) if not mt_variants: LOG.warning(.format(case_id)) context.abort() today = datetime.datetime.now().strftime() if not outpath: outpath = str(os.getcwd()) workbook = Workbook(os.path.join(outpath,document_name)) Report_Sheet = workbook.add_worksheet() if test and sample_lines and workbook: written_files +=1 continue row = 0 for col,field in enumerate(MT_EXPORT_HEADER): Report_Sheet.write(row,col,field) for row, line in enumerate(sample_lines,1): for col, field in enumerate(line): Report_Sheet.write(row,col,field) workbook.close() if os.path.exists(os.path.join(outpath,document_name)): written_files += 1 if test: LOG.info("Number of excel files that can be written to folder {0}: {1}".format(outpath, written_files)) else: LOG.info("Number of excel files written to folder {0}: {1}".format(outpath, written_files)) return written_files
Export all mitochondrial variants for each sample of a case and write them to an excel file Args: adapter(MongoAdapter) case_id(str) test(bool): True if the function is called for testing purposes outpath(str): path to output file Returns: written_files(int): number of written or simulated files
376,837
def _fetch_result(self): self._result = self.conn.query_single(self.object_type, self.url_params, self.query_params)
Fetch the queried object.
376,838
def parse(cls, msg): lines = msg.splitlines() method, uri, version = lines[0].split() headers = cls.parse_headers(.join(lines[1:])) return cls(version=version, uri=uri, method=method, headers=headers)
Parse message string to request object.
376,839
def make_generic_validator(validator: AnyCallable) -> : sig = signature(validator) args = list(sig.parameters.keys()) first_arg = args.pop(0) if first_arg == : raise ConfigError( f f ) elif first_arg == : return wraps(validator)(_generic_validator_cls(validator, sig, set(args[1:]))) else: return wraps(validator)(_generic_validator_basic(validator, sig, set(args)))
Make a generic function which calls a validator with the right arguments. Unfortunately other approaches (eg. return a partial of a function that builds the arguments) is slow, hence this laborious way of doing things. It's done like this so validators don't all need **kwargs in their signature, eg. any combination of the arguments "values", "fields" and/or "config" are permitted.
376,840
def compliance_schedule(self, column=None, value=None, **kwargs): return self._resolve_call(, column, value, **kwargs)
A sequence of activities with associated milestones which pertains to a given permit. >>> PCS().compliance_schedule('cmpl_schd_evt', '62099')
376,841
def receive_message(self, message, data): if data[MESSAGE_TYPE] == TYPE_RECEIVER_STATUS: self._process_get_status(data) return True elif data[MESSAGE_TYPE] == TYPE_LAUNCH_ERROR: self._process_launch_error(data) return True return False
Called when a receiver-message has been received.
376,842
def lookup_providers(self, lookup): if lookup is None: lookup = if lookup == : providers = set() for alias, drivers in six.iteritems(self.opts[]): for driver in drivers: providers.add((alias, driver)) if not providers: raise SaltCloudSystemExit( ) return providers if in lookup: alias, driver = lookup.split() if alias not in self.opts[] or \ driver not in self.opts[][alias]: raise SaltCloudSystemExit( {0}\.format( lookup, .join(self.get_configured_providers()) ) ) providers = set() for alias, drivers in six.iteritems(self.opts[]): for driver in drivers: if lookup in (alias, driver): providers.add((alias, driver)) if not providers: raise SaltCloudSystemExit( {0}\ .format( lookup, .join(self.get_configured_providers()) ) ) return providers
Get a dict describing the configured providers
376,843
def _deserialize(self, value, attr, data): if value: value = self._format_phone_number(value, attr) return super(PhoneNumberField, self)._deserialize(value, attr, data)
Format and validate the phone number using libphonenumber.
376,844
def titleCounts(readsAlignments): titles = defaultdict(int) for readAlignments in readsAlignments: for alignment in readAlignments: titles[alignment.subjectTitle] += 1 return titles
Count the number of times each title in a readsAlignments instance is matched. This is useful for rapidly discovering what titles were matched and with what frequency. @param readsAlignments: A L{dark.alignments.ReadsAlignments} instance. @return: A C{dict} whose keys are titles and whose values are the integer counts of the number of reads that matched that title.
376,845
def _export_table(dataset, column_names=None, byteorder="=", shuffle=False, selection=False, progress=None, virtual=True, sort=None, ascending=True): column_names = column_names or dataset.get_column_names(virtual=virtual, strings=True) for name in column_names: if name not in dataset.columns: warnings.warn() N = len(dataset) if not selection else dataset.selected_length(selection) if N == 0: raise ValueError("Cannot export empty table") if shuffle and sort: raise ValueError("Cannot shuffle and sort at the same time") if shuffle: random_index_column = "random_index" while random_index_column in dataset.get_column_names(): random_index_column += "_new" partial_shuffle = shuffle and len(dataset) != N order_array = None if partial_shuffle: shuffle_array_full = np.random.choice(len(dataset), len(dataset), replace=False) shuffle_array = shuffle_array_full[shuffle_array_full < N] del shuffle_array_full order_array = shuffle_array elif shuffle: shuffle_array = np.random.choice(N, N, replace=False) order_array = shuffle_array if sort: if selection: raise ValueError("sorting selections not yet supported") logger.info("sorting...") indices = np.argsort(dataset.evaluate(sort)) order_array = indices if ascending else indices[::-1] logger.info("sorting done") if selection: full_mask = dataset.evaluate_selection_mask(selection) else: full_mask = None arrow_arrays = [] for column_name in column_names: mask = full_mask if selection: values = dataset.evaluate(column_name, filtered=False) values = values[mask] else: values = dataset.evaluate(column_name) if shuffle or sort: indices = order_array values = values[indices] arrow_arrays.append(arrow_array_from_numpy_array(values)) if shuffle: arrow_arrays.append(arrow_array_from_numpy_array(order_array)) column_names = column_names + [random_index_column] table = pa.Table.from_arrays(arrow_arrays, column_names) return table
:param DatasetLocal dataset: dataset to export :param str path: path for file :param lis[str] column_names: list of column names to export or None for all columns :param str byteorder: = for native, < for little endian and > for big endian :param bool shuffle: export rows in random order :param bool selection: export selection or not :param progress: progress callback that gets a progress fraction as argument and should return True to continue, or a default progress bar when progress=True :param: bool virtual: When True, export virtual columns :return:
376,846
def regex(self, *pattern, **kwargs): set_defaults(self._kwargs, kwargs) set_defaults(self._regex_defaults, kwargs) set_defaults(self._defaults, kwargs) pattern = self.rebulk.build_re(*pattern, **kwargs) part = ChainPart(self, pattern) self.parts.append(part) return part
Add re pattern :param pattern: :type pattern: :param kwargs: :type kwargs: :return: :rtype:
376,847
def record_big_endian(self): if not self._initialized: raise pycdlibexception.PyCdlibInternalError() return self._record(utils.swab_32bit(self.extent_location), utils.swab_16bit(self.parent_directory_num))
A method to generate a string representing the big endian version of this Path Table Record. Parameters: None. Returns: A string representing the big endian version of this Path Table Record.
376,848
def find_route_functions_taint_args(self): for definition in _get_func_nodes(): if self.is_route_function(definition.node): yield self.get_func_cfg_with_tainted_args(definition)
Find all route functions and taint all of their arguments. Yields: CFG of each route function, with args marked as tainted.
376,849
def to_text(self, line): return getattr(self, self.ENTRY_TRANSFORMERS[line.__class__])(line)
Return the textual representation of the given `line`.
376,850
def build_dataset(instruction_dicts, dataset_from_file_fn, shuffle_files=False, parallel_reads=64): if _no_examples_skipped(instruction_dicts): instruction_ds = tf.data.Dataset.from_tensor_slices([ d["filepath"] for d in instruction_dicts ]) build_ds_from_instruction = dataset_from_file_fn else: instruction_ds = _build_instruction_ds(instruction_dicts) build_ds_from_instruction = functools.partial( _build_ds_from_instruction, ds_from_file_fn=dataset_from_file_fn, ) if shuffle_files: instruction_ds = instruction_ds.shuffle(len(instruction_dicts)) ds = instruction_ds.interleave( build_ds_from_instruction, cycle_length=parallel_reads, num_parallel_calls=tf.data.experimental.AUTOTUNE) return ds
Constructs a `tf.data.Dataset` from TFRecord files. Args: instruction_dicts: `list` of {'filepath':, 'mask':, 'offset_mask':} containing the information about which files and which examples to use. The boolean mask will be repeated and zipped with the examples from filepath. dataset_from_file_fn: function returning a `tf.data.Dataset` given a filename. shuffle_files: `bool`, Whether to shuffle the input filenames. parallel_reads: `int`, how many files to read in parallel. Returns: `tf.data.Dataset`
376,851
def update(self, *args, **kwargs): preprocess = kwargs.get() for s in args: for e in s: self._dict_set(preprocess(e) if preprocess else e, True)
Updates the set to include all arguments passed in. If the keyword argument preprocess is passed, then each element is preprocessed before being added.
376,852
def p0(self): if self._p0 is None: raise ValueError("initial positions not set; run set_p0") p0 = {param: self._p0[..., k] for (k, param) in enumerate(self.sampling_params)} return p0
A dictionary of the initial position of the walkers. This is set by using ``set_p0``. If not set yet, a ``ValueError`` is raised when the attribute is accessed.
376,853
def phrase_contains_special_keys(expansion: model.Expansion) -> bool: found_special_keys = KEY_FIND_RE.findall(expansion.string.lower()) return bool(found_special_keys)
Determine if the expansion contains any special keys, including those resulting from any processed macros (<script>, <file>, etc). If any are found, the phrase cannot be undone. Python Zen: »In the face of ambiguity, refuse the temptation to guess.« The question 'What does the phrase expansion "<ctrl>+a<shift>+<insert>" do?' cannot be answered. Because the key bindings cannot be assumed to result in the actions "select all text, then replace with clipboard content", the undo operation can not be performed. Thus always disable undo, when special keys are found.
376,854
def html_to_cnxml(html_source, cnxml_source): source = _string2io(html_source) xml = etree.parse(source) cnxml = etree.parse(_string2io(cnxml_source)) xml = _transform(, xml) namespaces = {: } xpath = etree.XPath(, namespaces=namespaces) replaceable_node = xpath(cnxml)[0] replaceable_node.getparent().replace(replaceable_node, xml.getroot()) return etree.tostring(cnxml)
Transform the HTML to CNXML. We need the original CNXML content in order to preserve the metadata in the CNXML document.
376,855
def canonical_value(self, query): for d in self.descriptors: if query in d: return d.canonical_label return None
Return the canonical value corresponding to the given query value. Return ``None`` if the query value is not present in any descriptor of the group. :param str query: the descriptor value to be checked against
376,856
def load_model(model_name, epoch_num, data_shapes, label_shapes, label_names, gpus=): sym, arg_params, aux_params = mx.model.load_checkpoint(model_name, epoch_num) mod = create_module(sym, data_shapes, label_shapes, label_names, gpus) mod.set_params( arg_params=arg_params, aux_params=aux_params, allow_missing=True ) return mod
Returns a module loaded with the provided model. Parameters ---------- model_name: str Prefix of the MXNet model name as stored on the local directory. epoch_num : int Epoch number of model we would like to load. input_shape: tuple The shape of the input data in the form of (batch_size, channels, height, width) files: list of strings List of URLs pertaining to files that need to be downloaded in order to use the model. data_shapes: list of tuples. List of tuples where each tuple is a pair of input variable name and its shape. label_shapes: list of (str, tuple) Typically is ``data_iter.provide_label``. label_names: list of str Name of the output labels in the MXNet symbolic graph. gpus: str Comma separated string of gpu ids on which inferences are executed. E.g. 3,5,6 would refer to GPUs 3, 5 and 6. If empty, we use CPU. Returns ------- MXNet module
376,857
def derive_key(self, master_password): encoder = encoding.Encoder(self.charset) bytes = ( % (master_password, self.name)).encode() start_time = time.clock() digest = scrypt.hash(bytes, self.salt, N=1<<14, r=8, p=1) key = encoder.encode(digest, self.key_length) derivation_time_in_s = time.clock() - start_time _logger.debug(, derivation_time_in_s*1000) return key
Computes the key from the salt and the master password.
376,858
def insert(self, before, name, attrs=None, data=None): if isinstance(before, self.__class__): if before.parent != self: raise ValueError() before = before.index before = min(max(0, before), len(self._children)) elem = self.__class__(name, attrs, data, parent=self, index=before) self._children.insert(before, elem) for idx, c in enumerate(self._children): c.index = idx return elem
Inserts a new element as a child of this element, before the specified index or sibling. :param before: An :class:`XmlElement` or a numeric index to insert the new node before :param name: The tag name to add :param attrs: Attributes for the new tag :param data: CDATA for the new tag :returns: The newly-created element :rtype: :class:`XmlElement`
376,859
def _language_index_from_code(self, code, name_mode): if code < 1 or code > sum( _LANG_DICT[_] for _ in BMDATA[name_mode][] ): return L_ANY if ( code & (code - 1) ) != 0: return L_ANY return code
Return the index value for a language code. This returns l_any if more than one code is specified or the code is out of bounds. Parameters ---------- code : int The language code to interpret name_mode : str The name mode of the algorithm: ``gen`` (default), ``ash`` (Ashkenazi), or ``sep`` (Sephardic) Returns ------- int Language code index
376,860
def send_config_set( self, config_commands=None, exit_config_mode=True, delay_factor=1, max_loops=150, strip_prompt=False, strip_command=False, config_mode_command=None, ): delay_factor = self.select_delay_factor(delay_factor) if config_commands is None: return "" elif isinstance(config_commands, string_types): config_commands = (config_commands,) if not hasattr(config_commands, "__iter__"): raise ValueError("Invalid argument passed into send_config_set") cfg_mode_args = (config_mode_command,) if config_mode_command else tuple() output = self.config_mode(*cfg_mode_args) for cmd in config_commands: self.write_channel(self.normalize_cmd(cmd)) if self.fast_cli: pass else: time.sleep(delay_factor * 0.05) output += self._read_channel_timing( delay_factor=delay_factor, max_loops=max_loops ) if exit_config_mode: output += self.exit_config_mode() output = self._sanitize_output(output) log.debug("{}".format(output)) return output
Send configuration commands down the SSH channel. config_commands is an iterable containing all of the configuration commands. The commands will be executed one after the other. Automatically exits/enters configuration mode. :param config_commands: Multiple configuration commands to be sent to the device :type config_commands: list or string :param exit_config_mode: Determines whether or not to exit config mode after complete :type exit_config_mode: bool :param delay_factor: Factor to adjust delays :type delay_factor: int :param max_loops: Controls wait time in conjunction with delay_factor (default: 150) :type max_loops: int :param strip_prompt: Determines whether or not to strip the prompt :type strip_prompt: bool :param strip_command: Determines whether or not to strip the command :type strip_command: bool :param config_mode_command: The command to enter into config mode :type config_mode_command: str
376,861
def _translate_cond(self, c): if isinstance(c, claripy.ast.Base) and not c.singlevalued: raise SimFastMemoryError("size not supported") if c is None: return True else: return self.state.solver.eval_upto(c, 1)[0]
Checks whether this condition can be supported by FastMemory."
376,862
def register_event(self, event): self.log(, event.cmd, event.thing, pretty=True, lvl=verbose) self.hooks[event.cmd] = event.thing
Registers a new command line interface event hook as command
376,863
def find_one(cls, *args, **kwargs): d = cls.collection.find_one(*args, **kwargs) if d: return cls(**d)
Same as ``collection.find_one``, returns model object instead of dict.
376,864
def _normalize_helper(number, replacements, remove_non_matches): normalized_number = [] for char in number: new_digit = replacements.get(char.upper(), None) if new_digit is not None: normalized_number.append(new_digit) elif not remove_non_matches: normalized_number.append(char) return U_EMPTY_STRING.join(normalized_number)
Normalizes a string of characters representing a phone number by replacing all characters found in the accompanying map with the values therein, and stripping all other characters if remove_non_matches is true. Arguments: number -- a string representing a phone number replacements -- a mapping of characters to what they should be replaced by in the normalized version of the phone number remove_non_matches -- indicates whether characters that are not able to be replaced should be stripped from the number. If this is False, they will be left unchanged in the number. Returns the normalized string version of the phone number.
376,865
def univariate(self, data: [, str] = None, by: [str, list] = None, cdfplot: str = None, cls: [str, list] = None, freq: str = None, histogram: str = None, id: [str, list] = None, inset: str = None, output: [str, bool, ] = None, ppplot: str = None, probplot: str = None, qqplot: str = None, var: str = None, weight: str = None, procopts: str = None, stmtpassthrough: str = None, **kwargs: dict) -> :
Python method to call the UNIVARIATE procedure Documentation link: https://go.documentation.sas.com/?cdcId=pgmsascdc&cdcVersion=9.4_3.4&docsetId=procstat&docsetTarget=procstat_univariate_syntax.htm&locale=en The PROC UNIVARIATE statement invokes the procedure. The VAR statement specifies the numeric variables to be analyzed, and it is required if the OUTPUT statement is used to save summary statistics in an output data set. If you do not use the VAR statement, all numeric variables in the data set are analyzed. The plot statements (CDFPLOT, HISTOGRAM, PPPLOT, PROBPLOT, and QQPLOT) create graphical displays, and the INSET statement enhances these displays by adding a table of summary statistics directly on the graph. You can specify one or more of each of the plot statements, the INSET statement, and the OUTPUT statement. If you use a VAR statement, the variables listed in a plot statement must be a subset of the variables listed in the VAR statement. You can specify a BY statement to obtain separate analyses for each BY group. The FREQ statement specifies a variable whose values provide the frequency for each observation. The ID statement specifies one or more variables to identify the extreme observations. The WEIGHT statement specifies a variable whose values are used to weight certain statistics. You can use a CLASS statement to specify one or two variables that group the data into classification levels. The analysis is carried out for each combination of levels in the input data set, or within each BY group if you also specify a BY statement. You can use the CLASS statement with plot statements to create comparative displays, in which each cell contains a plot for one combination of classification levels. :param data: SASdata object or string. This parameter is required. :parm by: The by variable can be a string or list type. :parm cdfplot: The cdfplot variable can only be a string type. :parm cls: The cls variable can be a string or list type. It refers to the categorical, or nominal variables. :parm freq: The freq variable can only be a string type. :parm histogram: The histogram variable can only be a string type. :parm id: The id variable can be a string or list type. :parm inset: The inset variable can only be a string type. :parm output: The output variable can be a string, boolean or SASdata type. The member name for a boolean is "_output". :parm ppplot: The ppplot variable can only be a string type. :parm probplot: The probplot variable can only be a string type. :parm qqplot: The qqplot variable can only be a string type. :parm var: The var variable can only be a string type. :parm weight: The weight variable can only be a string type. :parm procopts: The procopts variable is a generic option available for advanced use. It can only be a string type. :parm stmtpassthrough: The stmtpassthrough variable is a generic option available for advanced use. It can only be a string type. :return: SAS Result Object
376,866
def qos(self, prefetch_size=0, prefetch_count=0, is_global=False): args = Writer() args.write_long(prefetch_size).\ write_short(prefetch_count).\ write_bit(is_global) self.send_frame(MethodFrame(self.channel_id, 60, 10, args)) self.channel.add_synchronous_cb(self._recv_qos_ok)
Set QoS on this channel.
376,867
def SNM0(T, Tc, Vc, omega, delta_SRK=None): rs is 3.4613e-05. The fit increases the error on this occasion. >>> SNM0(121, 150.8, 7.49e-05, -0.004) 3.4402256402733416e-05 >>> SNM0(121, 150.8, 7.49e-05, -0.004, -0.03259620) 3.493288100008123e-05 References ---------- .. [1] Mchaweh, A., A. Alsaygh, Kh. Nasrifar, and M. Moshfeghian. "A Simplified Method for Calculating Saturated Liquid Densities." Fluid Phase Equilibria 224, no. 2 (October 1, 2004): 157-67. doi:10.1016/j.fluid.2004.06.054 ' Tr = T/Tc m = 0.480 + 1.574*omega - 0.176*omega*omega alpha_SRK = (1. + m*(1. - Tr**0.5))**2 tau = 1. - Tr/alpha_SRK rho0 = 1. + 1.169*tau**(1/3.) + 1.818*tau**(2/3.) - 2.658*tau + 2.161*tau**(4/3.) V0 = 1./rho0 if not delta_SRK: return Vc*V0 else: return Vc*V0/(1. + delta_SRK*(alpha_SRK - 1.)**(1/3.))
r'''Calculates saturated liquid density using the Mchaweh, Moshfeghian model [1]_. Designed for simple calculations. .. math:: V_s = V_c/(1+1.169\tau^{1/3}+1.818\tau^{2/3}-2.658\tau+2.161\tau^{4/3} \tau = 1-\frac{(T/T_c)}{\alpha_{SRK}} \alpha_{SRK} = [1 + m(1-\sqrt{T/T_C}]^2 m = 0.480+1.574\omega-0.176\omega^2 If the fit parameter `delta_SRK` is provided, the following is used: .. math:: V_s = V_C/(1+1.169\tau^{1/3}+1.818\tau^{2/3}-2.658\tau+2.161\tau^{4/3}) /\left[1+\delta_{SRK}(\alpha_{SRK}-1)^{1/3}\right] Parameters ---------- T : float Temperature of fluid [K] Tc : float Critical temperature of fluid [K] Vc : float Critical volume of fluid [m^3/mol] omega : float Acentric factor for fluid, [-] delta_SRK : float, optional Fitting parameter [-] Returns ------- Vs : float Saturation liquid volume, [m^3/mol] Notes ----- 73 fit parameters have been gathered from the article. Examples -------- Argon, without the fit parameter and with it. Tabulated result in Perry's is 3.4613e-05. The fit increases the error on this occasion. >>> SNM0(121, 150.8, 7.49e-05, -0.004) 3.4402256402733416e-05 >>> SNM0(121, 150.8, 7.49e-05, -0.004, -0.03259620) 3.493288100008123e-05 References ---------- .. [1] Mchaweh, A., A. Alsaygh, Kh. Nasrifar, and M. Moshfeghian. "A Simplified Method for Calculating Saturated Liquid Densities." Fluid Phase Equilibria 224, no. 2 (October 1, 2004): 157-67. doi:10.1016/j.fluid.2004.06.054
376,868
def get_folder(self, folder): folder_id = obj_or_id(folder, "folder", (Folder,)) response = self.__requester.request( , .format(folder_id) ) return Folder(self.__requester, response.json())
Return the details for a folder :calls: `GET /api/v1/folders/:id \ <https://canvas.instructure.com/doc/api/files.html#method.folders.show>`_ :param folder: The object or ID of the folder to retrieve. :type folder: :class:`canvasapi.folder.Folder` or int :rtype: :class:`canvasapi.folder.Folder`
376,869
async def api_request(self, url, params): request = None try: with async_timeout.timeout(DEFAULT_TIMEOUT, loop=self._event_loop): request = await self._api_session.get( url, params=params) if request.status != 200: _LOGGER.error(, request.status) return None request_json = await request.json() if in request_json: _LOGGER.error(, request_json[][], request_json[][]) return None return request_json except (aiohttp.ClientError, asyncio.TimeoutError, ConnectionRefusedError) as err: _LOGGER.error(, err) return None
Make api fetch request.
376,870
def _get_column_ends(self): ends = collections.Counter() for line in self.text.splitlines(): for matchobj in re.finditer(, line.lstrip()): ends[matchobj.end()] += 1 return ends
Guess where the ends of the columns lie.
376,871
def patch_namespaced_network_policy(self, name, namespace, body, **kwargs): kwargs[] = True if kwargs.get(): return self.patch_namespaced_network_policy_with_http_info(name, namespace, body, **kwargs) else: (data) = self.patch_namespaced_network_policy_with_http_info(name, namespace, body, **kwargs) return data
partially update the specified NetworkPolicy This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_network_policy(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the NetworkPolicy (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param object body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :param str field_manager: fieldManager is a name associated with the actor or entity that is making these changes. The value must be less than or 128 characters long, and only contain printable characters, as defined by https://golang.org/pkg/unicode/#IsPrint. This field is required for apply requests (application/apply-patch) but optional for non-apply patch types (JsonPatch, MergePatch, StrategicMergePatch). :param bool force: Force is going to \"force\" Apply requests. It means user will re-acquire conflicting fields owned by other people. Force flag must be unset for non-apply patch requests. :return: V1beta1NetworkPolicy If the method is called asynchronously, returns the request thread.
376,872
def delete_os_dummy_rtr_nwk(self, rtr_id, net_id, subnet_id): subnet_lst = set() subnet_lst.add(subnet_id) ret = self.os_helper.delete_intf_router(None, None, rtr_id, subnet_lst) if not ret: return ret return self.os_helper.delete_network_all_subnets(net_id)
Delete the dummy interface to the router.
376,873
def _geom_series_uint32(r, n): if n == 0: return 0 if n == 1 or r == 0: return 1 m = 2**32 other_factors = r - 1 common_factor = 1 while (other_factors % 2) == 0: other_factors //= 2 common_factor *= 2 other_factors_inverse = pow(other_factors, m - 1, m) numerator = pow(r, n, common_factor * m) - 1 return (numerator // common_factor * other_factors_inverse) % m
Unsigned integer calculation of sum of geometric series: 1 + r + r^2 + r^3 + ... r^(n-1) summed to n terms. Calculated modulo 2**32. Use the formula (r**n - 1) / (r - 1)
376,874
def timeout_thread_handler(timeout, stop_event): stop_happened = stop_event.wait(timeout) if stop_happened is False: print("Killing program due to %f second timeout" % timeout) os._exit(2)
A background thread to kill the process if it takes too long. Args: timeout (float): The number of seconds to wait before killing the process. stop_event (Event): An optional event to cleanly stop the background thread if required during testing.
376,875
def _check_status(status): if status != constants.STATUS_SUCCESS: exception = STATUS_TO_EXCEPTION.get(status, CairoError) status_name = ffi.string(ffi.cast("cairo_status_t", status)) message = % ( status_name, ffi.string(cairo.cairo_status_to_string(status))) raise exception(message, status)
Take a cairo status code and raise an exception if/as appropriate.
376,876
async def _chunked_upload(self, media, media_size, path=None, media_type=None, media_category=None, chunk_size=2**20, **params): if isinstance(media, bytes): media = io.BytesIO(media) chunk = media.read(chunk_size) is_coro = asyncio.iscoroutine(chunk) if is_coro: chunk = await chunk if media_type is None: media_metadata = await utils.get_media_metadata(chunk, path) media_type, media_category = media_metadata elif media_category is None: media_category = utils.get_category(media_type) response = await self.upload.media.upload.post( command="INIT", total_bytes=media_size, media_type=media_type, media_category=media_category, **params ) media_id = response[] i = 0 while chunk: if is_coro: req = self.upload.media.upload.post(command="APPEND", media_id=media_id, media=chunk, segment_index=i) chunk, _ = await asyncio.gather(media.read(chunk_size), req) else: await self.upload.media.upload.post(command="APPEND", media_id=media_id, media=chunk, segment_index=i) chunk = media.read(chunk_size) i += 1 status = await self.upload.media.upload.post(command="FINALIZE", media_id=media_id) if in status: while status[].get() != "succeeded": processing_info = status[] if processing_info.get() == "failed": error = processing_info.get(, {}) message = error.get(, str(status)) raise exceptions.MediaProcessingError(data=status, message=message, **params) delay = processing_info[] await asyncio.sleep(delay) status = await self.upload.media.upload.get( command="STATUS", media_id=media_id, **params ) return response
upload media in chunks Parameters ---------- media : file object a file object of the media media_size : int size of the media path : str, optional filename of the media media_type : str, optional mime type of the media media_category : str, optional twitter media category, must be used with ``media_type`` chunk_size : int, optional size of a chunk in bytes params : dict, optional additional parameters of the request Returns ------- .data_processing.PeonyResponse Response of the request
376,877
def get_wifi_packet(frame, no_rtap=False): _, packet = WiHelper._strip_rtap(frame) frame_control = struct.unpack(, packet[:2]) cat = (frame_control[0] >> 2) & 0b0011 s_type = frame_control[0] >> 4 if cat not in _CATEGORIES_.keys(): logging.warning("unknown category: %d" % (cat)) return Unknown(frame, no_rtap) if s_type not in _SUBTYPES_[cat].keys(): logging.warning("unknown subtype %d in %s category" % (s_type, _CATEGORIES_[cat])) return Unknown(frame, no_rtap) if cat == 0: if s_type == 4: return ProbeReq(frame, no_rtap) elif s_type == 5: return ProbeResp(frame, no_rtap) elif s_type == 8: return Beacon(frame, no_rtap) else: return Management(frame, no_rtap) elif cat == 1: if s_type == 11: return RTS(frame, no_rtap) elif s_type == 12: return CTS(frame, no_rtap) elif s_type == 9: return BACK(frame, no_rtap) else: return Control(frame, no_rtap) elif cat == 2: if s_type == 8: return QosData(frame, no_rtap, parse_amsdu=True) else: return Data(frame, no_rtap)
Discriminates Wi-Fi packet and creates packet object. :frame: ctypes.Structure :no_rtap: Bool :return: obj Wi-Fi packet
376,878
def get_search_page(self, query): query_web_page = Webpage(self.url + self.parse_query(query)) query_web_page.get_html_source() return query_web_page.source
Gets HTML source :param query: query to search engine :return: HTML source of search page of given query
376,879
def find_same_between_dicts(dict1, dict2): Same_info = namedtuple(, [, , ]) same_info = Same_info(set(dict1.items()) & set(dict2.items()), set(dict1.keys()) & set(dict2.keys()), set(dict1.values()) & set(dict2.values())) return same_info
查找两个字典中的相同点,包括键、值、项,仅支持 hashable 对象 :param: * dict1: (dict) 比较的字典 1 * dict2: (dict) 比较的字典 2 :return: * dup_info: (namedtuple) 返回两个字典中相同的信息组成的具名元组 举例如下:: print('--- find_same_between_dicts demo---') dict1 = {'x':1, 'y':2, 'z':3} dict2 = {'w':10, 'x':1, 'y':2} res = find_same_between_dicts(dict1, dict2) print(res.item) print(res.key) print(res.value) print('---') 执行结果:: --- find_same_between_dicts demo--- set([('x', 1)]) {'x', 'y'} {1} ---
376,880
def create_new(self, **kwargs): kwargs[] = True if kwargs.get(): return self.create_new_with_http_info(**kwargs) else: (data) = self.create_new_with_http_info(**kwargs) return data
Creates a new License This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.create_new(callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param LicenseRest body: :return: LicenseSingleton If the method is called asynchronously, returns the request thread.
376,881
def _parse_url_and_validate(cls, url): parsed_url = urlparse(url) if parsed_url.scheme and parsed_url.netloc: final_url = parsed_url.geturl() else: raise BadURLException return final_url
Recieves a URL string and validates it using urlparse. Args: url: A URL string Returns: parsed_url: A validated URL Raises: BadURLException
376,882
def release(self, forceRelease=False): t hold it. @return - True if lock is released, otherwise False ' if not self.held: if forceRelease is False: return False else: self.held = True if not os.path.exists(self.lockPath): self.held = False self.acquiredAt = None return True if forceRelease is False: if self.maxLockAge and time.time() > self.acquiredAt + self.maxLockAge: self.held = False self.acquiredAt = None return False self.acquiredAt = None try: os.rmdir(self.lockPath) self.held = False return True except: self.held = False return False
release - Release the lock. @param forceRelease <bool> default False - If True, will release the lock even if we don't hold it. @return - True if lock is released, otherwise False
376,883
def arr_base10toN(anum10, aradix, *args): new_num_arr = array(()) current = anum10 while current != 0: remainder = current % aradix new_num_arr = r_[remainder, new_num_arr] current = current / aradix forcelength = new_num_arr.size if len(args): forcelength = args[0] while new_num_arr.size < forcelength: new_num_arr = r_[0, new_num_arr] return new_num_arr
ARGS anum10 in number in base 10 aradix in convert <anum10> to number in base + <aradix> OPTIONAL forcelength in if nonzero, indicates the length + of the return array. Useful if + array needs to be zero padded. DESC Converts a scalar from base 10 to base radix. Return an array.
376,884
def bulk_copy(self, ids): schema = PackageSchema() return self.service.bulk_copy(self.base, self.RESOURCE, ids, schema)
Bulk copy a set of packages. :param ids: Int list of package IDs. :return: :class:`packages.Package <packages.Package>` list
376,885
def setup(self): self.blocks = self.config[] self.lines = self.config[] self.group_comments = self.config[] self.jsdocs = self.config[] self.decode_escapes = self.config[] self.strings = self.config[] self.prefix =
Setup.
376,886
def size_to_content(self, horizontal_padding=None, vertical_padding=None): if horizontal_padding is None: horizontal_padding = 0 if vertical_padding is None: vertical_padding = 0 self.sizing.set_fixed_size(Geometry.IntSize(18 + 2 * horizontal_padding, 18 + 2 * vertical_padding))
Size the canvas item to the text content.
376,887
def bulkdownload(database, **kwargs): if in kwargs: raise InvalidRequestError(Message.ERROR_AUTHTOKEN_NOT_SUPPORTED) ApiKeyUtil.init_api_key_from_args(kwargs) filename = kwargs.pop(, ) return Database(database).bulk_download_to_file(filename, params=kwargs)
Downloads an entire database. :param str database: The database code to download :param str filename: The filename for the download. \ If not specified, will download to the current working directory :param str api_key: Most databases require api_key for bulk download :param str download_type: 'partial' or 'complete'. \ See: https://www.quandl.com/docs/api#database-metadata
376,888
def sorted_exists(values, x): i = bisect_left(values, x) j = bisect_right(values, x) exists = x in values[i:j] return exists, i
For list, values, returns the insert position for item x and whether the item already exists in the list. This allows one function call to return either the index to overwrite an existing value in the list, or the index to insert a new item in the list and keep the list in sorted order. :param values: list :param x: item :return: (exists, index) tuple
376,889
def css(self): css_list = [DEFAULT_MARK_CSS] for aes in self.aesthetics: css_list.extend(get_mark_css(aes, self.values[aes])) return .join(css_list)
Returns ------- str The CSS.
376,890
def coroutine( func: Callable[..., "Generator[Any, Any, _T]"] ) -> Callable[..., "Future[_T]"]: @functools.wraps(func) def wrapper(*args, **kwargs): future = _create_future() try: result = func(*args, **kwargs) except (Return, StopIteration) as e: result = _value_from_stopiteration(e) except Exception: future_set_exc_info(future, sys.exc_info()) try: return future finally: future = None else: if isinstance(result, Generator): try: yielded = next(result) except (StopIteration, Return) as e: future_set_result_unless_cancelled( future, _value_from_stopiteration(e) ) except Exception: future_set_exc_info(future, sys.exc_info()) else: runner = Runner(result, future, yielded) future.add_done_callback(lambda _: runner) yielded = None try: return future finally: future = None future_set_result_unless_cancelled(future, result) return future wrapper.__wrapped__ = func wrapper.__tornado_coroutine__ = True return wrapper
Decorator for asynchronous generators. For compatibility with older versions of Python, coroutines may also "return" by raising the special exception `Return(value) <Return>`. Functions with this decorator return a `.Future`. .. warning:: When exceptions occur inside a coroutine, the exception information will be stored in the `.Future` object. You must examine the result of the `.Future` object, or the exception may go unnoticed by your code. This means yielding the function if called from another coroutine, using something like `.IOLoop.run_sync` for top-level calls, or passing the `.Future` to `.IOLoop.add_future`. .. versionchanged:: 6.0 The ``callback`` argument was removed. Use the returned awaitable object instead.
376,891
def stop(self): if self._disconnector: self._disconnector.stop() self.client.disconnect()
Stop this gateway agent.
376,892
def extract_metric_name(self, metric_name): for metric_type in self.supported_sar_types: if metric_type in metric_name: return metric_type logger.error( , metric_name, self.supported_sar_types) return
Method to extract SAR metric names from the section given in the config. The SARMetric class assumes that the section name will contain the SAR types listed in self.supported_sar_types tuple :param str metric_name: Section name from the config :return: str which identifies what kind of SAR metric the section represents
376,893
def dir(cls, label, children): return FSEntry(label=label, children=children, type=u"Directory", use=None)
Return ``FSEntry`` directory object.
376,894
def json(cls, message): if type(message) is OrderedDict: pprint(dict(message)) else: pprint(message)
Print a nice JSON output Args: message: the message to print
376,895
def change(script, layer_num=None): if layer_num is None: if isinstance(script, mlx.FilterScript): layer_num = script.last_layer() else: layer_num = 0 filter_xml = .join([ , , .format(layer_num), , , , ]) util.write_filter(script, filter_xml) if isinstance(script, mlx.FilterScript): script.set_current_layer(layer_num) return None
Change the current layer by specifying the new layer number. Args: script: the mlx.FilterScript object or script filename to write the filter to. layer_num (int): the number of the layer to change to. Default is the last layer if script is a mlx.FilterScript object; if script is a filename the default is the first layer. Layer stack: Modifies current layer MeshLab versions: 2016.12 1.3.4BETA
376,896
def factory(cls, registry): cls_name = str(cls.__name__) MyMetricsHandler = type(cls_name, (cls, object), {"registry": registry}) return MyMetricsHandler
Returns a dynamic MetricsHandler class tied to the passed registry.
376,897
def put(self, item, *args, **kwargs): if not self.enabled: return timeout = kwargs.pop(, None) if timeout is None: timeout = self.default_timeout cache_key = self.make_key(args, kwargs) with self._cache_lock: self._cache[cache_key] = (time() + timeout, item)
Put an item into the cache, for this combination of args and kwargs. Args: *args: any arguments. **kwargs: any keyword arguments. If ``timeout`` is specified as one of the keyword arguments, the item will remain available for retrieval for ``timeout`` seconds. If ``timeout`` is `None` or not specified, the ``default_timeout`` for this cache will be used. Specify a ``timeout`` of 0 (or ensure that the ``default_timeout`` for this cache is 0) if this item is not to be cached.
376,898
def load_app(config, **kwargs): from .configuration import _runtime_conf, set_config set_config(config, overwrite=True) for package_name in getattr(_runtime_conf.app, , []): module = __import__(package_name, fromlist=[]) if hasattr(module, ) and hasattr(module.app, ): app = module.app.setup_app(_runtime_conf, **kwargs) app.config = _runtime_conf return app raise RuntimeError( )
Used to load a ``Pecan`` application and its environment based on passed configuration. :param config: Can be a dictionary containing configuration, a string which represents a (relative) configuration filename returns a pecan.Pecan object
376,899
def insertVariantSet(self, variantSet): metadataJson = json.dumps( [protocol.toJsonDict(metadata) for metadata in variantSet.getMetadata()]) urlMapJson = json.dumps(variantSet.getReferenceToDataUrlIndexMap()) try: models.Variantset.create( id=variantSet.getId(), datasetid=variantSet.getParentContainer().getId(), referencesetid=variantSet.getReferenceSet().getId(), name=variantSet.getLocalId(), created=datetime.datetime.now(), updated=datetime.datetime.now(), metadata=metadataJson, dataurlindexmap=urlMapJson, attributes=json.dumps(variantSet.getAttributes())) except Exception as e: raise exceptions.RepoManagerException(e) for callSet in variantSet.getCallSets(): self.insertCallSet(callSet)
Inserts a the specified variantSet into this repository.