Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
8,500
def log(*args, **kwargs): level = kwargs.pop(, logging.INFO) logger.log(level, *args, **kwargs)
Log things with the global logger.
8,501
def _relative_score(self, start_eot, end_eot, active, passive): active_start = self._score_eot_for_actor(start_eot, active) passive_start = self._score_eot_for_actor(start_eot, passive) active_end = self._score_eot_for_actor(end_eot, active) passive_end = self._score_eot_for_actor(end_eot, passive) return (active_end - passive_end) - (active_start - passive_start)
Return the balance of perception between the two nodes. A positive score indicates the result is relatively better for active.
8,502
def default_arguments(self): d = OrderedDict() for arg in self._default_args: d.update({arg.name: arg}) return d
:rtype dict :rtype dict
8,503
def fetch_bug_details(self, bug_ids): params = {: } params[] = bug_ids try: response = self.session.get(settings.BZ_API_URL + , headers=self.session.headers, params=params, timeout=30) response.raise_for_status() except RequestException as e: logger.warning(.format(e)) return None if response.headers[] == : return None data = response.json() if not in data: return None return data[]
Fetches bug metadata from bugzilla and returns an encoded dict if successful, otherwise returns None.
8,504
def _init_metadata(self): QuestionTextFormRecord._init_metadata(self) QuestionFilesFormRecord._init_metadata(self) super(QuestionTextAndFilesMixin, self)._init_metadata()
stub
8,505
def gc_velocity_update(particle, social, state): gbest = state.swarm[gbest_idx(state.swarm)].position if not np.array_equal(gbest, particle.position): return std_velocity(particle, social, state) rho = state.params[] inertia = state.params[] v_max = state.params[] size = particle.position.size r2 = state.rng.uniform(0.0, 1.0, size) velocity = __gc_velocity_equation__(inertia, rho, r2, particle, gbest) return __clamp__(velocity, v_max)
Guaranteed convergence velocity update. Args: particle: cipy.algorithms.pso.Particle: Particle to update the velocity for. social: cipy.algorithms.pso.Particle: The social best for the particle. state: cipy.algorithms.pso.State: The state of the PSO algorithm. Returns: numpy.ndarray: the calculated velocity.
8,506
def parse_cache_control(self, headers): retval = {} cc_header = if in headers: cc_header = if cc_header in headers: parts = headers[cc_header].split() parts_with_args = [ tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=") ] parts_wo_args = [ (name.strip().lower(), 1) for name in parts if -1 == name.find("=") ] retval = dict(parts_with_args + parts_wo_args) return retval
Parse the cache control headers returning a dictionary with values for the different directives.
8,507
def new(localfile, jottapath, JFS): with open(localfile) as lf: _new = JFS.up(jottapath, lf) return _new
Upload a new file from local disk (doesn't exist on JottaCloud). Returns JottaFile object
8,508
def generate_secret(length=30): rand = random.SystemRandom() ascii_characters = string.ascii_letters + string.digits return .join(rand.choice(ascii_characters) for _ in range(length))
Generate an ASCII secret using random.SysRandom Based on oauthlib's common.generate_token function
8,509
def _check_jwt_claims(jwt_claims): current_time = time.time() expiration = jwt_claims[u"exp"] if not isinstance(expiration, INT_TYPES): raise suppliers.UnauthenticatedException(u) if current_time >= expiration: raise suppliers.UnauthenticatedException(u"The auth token has already expired") if u"nbf" not in jwt_claims: return not_before_time = jwt_claims[u"nbf"] if not isinstance(not_before_time, INT_TYPES): raise suppliers.UnauthenticatedException(u) if current_time < not_before_time: raise suppliers.UnauthenticatedException(u)
Checks whether the JWT claims should be accepted. Specifically, this method checks the "exp" claim and the "nbf" claim (if present), and raises UnauthenticatedException if 1) the current time is before the time identified by the "nbf" claim, or 2) the current time is equal to or after the time identified by the "exp" claim. Args: jwt_claims: the JWT claims whose expiratio to be checked. Raises: UnauthenticatedException: When the "exp" claim is malformed or the JWT has already expired.
8,510
def clean_ret_type(ret_type): ret_type = get_printable(ret_type).strip() if ret_type == : ret_type = for bad in [ , , , , , , , ]: if bad in ret_type: ret_type = ret_type.replace(bad, ).strip() logging.debug(_(), bad) return ret_type
Clean the erraneous parsed return type.
8,511
def digest(instr, checksum=): *get salted hashing_funcs = { : __salt__[], : __salt__[], : __salt__[], } hash_func = hashing_funcs.get(checksum) if hash_func is None: raise salt.exceptions.CommandExecutionError( "Hash func is not supported.".format(checksum)) return hash_func(instr)
Return a checksum digest for a string instr A string checksum : ``md5`` The hashing algorithm to use to generate checksums. Valid options: md5, sha256, sha512. CLI Example: .. code-block:: bash salt '*' hashutil.digest 'get salted'
8,512
def render_to_response(self, context, indent=None): "Returns a JSON response containing as payload" return self.get_json_response(self.convert_context_to_json(context, indent=indent))
Returns a JSON response containing 'context' as payload
8,513
def deleteAllNetworkViews(self, networkId, verbose=None): response=api(url=self.___url++str(networkId)+, method="DELETE", verbose=verbose) return response
Deletes all Network Views available in the Network specified by the `networkId` parameter. Cytoscape can have multiple views per network model, but this feature is not exposed in the Cytoscape GUI. GUI access is limited to the first available view only. :param networkId: SUID of the Network :param verbose: print more :returns: default: successful operation
8,514
def as_indexable(array): if isinstance(array, ExplicitlyIndexed): return array if isinstance(array, np.ndarray): return NumpyIndexingAdapter(array) if isinstance(array, pd.Index): return PandasIndexAdapter(array) if isinstance(array, dask_array_type): return DaskIndexingAdapter(array) raise TypeError(.format(type(array)))
This function always returns a ExplicitlyIndexed subclass, so that the vectorized indexing is always possible with the returned object.
8,515
def remove_line(self, section, line): try: s = self._get_section(section, create=False) except KeyError: return 0 return s.remove(line)
Remove all instances of a line. Returns: int: the number of lines removed
8,516
def vqa_attention_base(): hparams = common_hparams.basic_params1() hparams.batch_size = 128 hparams.use_fixed_batch_size = True, hparams.optimizer = "adam" hparams.optimizer_adam_beta1 = 0.9 hparams.optimizer_adam_beta2 = 0.999 hparams.optimizer_adam_epsilon = 1e-8 hparams.weight_decay = 0. hparams.clip_grad_norm = 0. hparams.initializer = "xavier" hparams.learning_rate = 0.5 hparams.learning_rate_schedule = "legacy" hparams.learning_rate_warmup_steps = 0 hparams.learning_rate_decay_scheme = "exp" hparams.learning_rate_decay_rate = 0.5 hparams.learning_rate_decay_steps = 50000 hparams.dropout = 0.5 hparams.summarize_grads = True hparams.summarize_vars = True hparams.label_smoothing = 0. hparams.multiply_embedding_mode = "" hparams.add_hparam("resize_side", 512) hparams.add_hparam("height", 448) hparams.add_hparam("width", 448) hparams.add_hparam("distort", True) hparams.add_hparam("train_resnet", False) hparams.add_hparam("rnn_type", "lstm") hparams.add_hparam("num_rnn_layers", 1) hparams.add_hparam("max_question_length", 15) hparams.hidden_size = 512 hparams.add_hparam("attn_dim", 512) hparams.add_hparam("num_glimps", 2) hparams.add_hparam("num_mlp_layers", 1) hparams.add_hparam("mlp_dim", 1024) hparams.add_hparam("image_input_type", "image") hparams.add_hparam("image_model_fn", "resnet_v1_152") hparams.add_hparam("image_feat_size", 0) hparams.norm_type = "layer" hparams.layer_preprocess_sequence = "n" hparams.layer_postprocess_sequence = "da" hparams.layer_prepostprocess_dropout = 0.3 hparams.attention_dropout = 0.1 hparams.relu_dropout = 0.1 hparams.image_hidden_size = 2048 hparams.add_hparam("num_encoder_layers", 1) hparams.add_hparam("num_heads", 8) hparams.add_hparam("attention_key_channels", 0) hparams.add_hparam("attention_value_channels", 0) hparams.add_hparam("image_filter_size", 1024) hparams.add_hparam("self_attention_type", "dot_product") hparams.add_hparam("scale_dotproduct", True) return hparams
VQA attention baseline hparams.
8,517
def fcoe_get_interface_output_fcoe_intf_list_fcoe_intf_tx_accepts(self, **kwargs): config = ET.Element("config") fcoe_get_interface = ET.Element("fcoe_get_interface") config = fcoe_get_interface output = ET.SubElement(fcoe_get_interface, "output") fcoe_intf_list = ET.SubElement(output, "fcoe-intf-list") fcoe_intf_fcoe_port_id_key = ET.SubElement(fcoe_intf_list, "fcoe-intf-fcoe-port-id") fcoe_intf_fcoe_port_id_key.text = kwargs.pop() fcoe_intf_tx_accepts = ET.SubElement(fcoe_intf_list, "fcoe-intf-tx-accepts") fcoe_intf_tx_accepts.text = kwargs.pop() callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code
8,518
def serialize_with_sampled_logs(self, logs_limit=-1): return { : self.id, : self.path_name, : self.name, : self.is_unregistered, : [log.serialize for log in self.sampled_logs(logs_limit)], : self.args.serialize if self.args is not None else [], : [cmd.serialize for cmd in self.commands], : [cmd.serialize for cmd in self.snapshots], : self.log_modified_at.isoformat() }
serialize a result with up to `logs_limit` logs. If `logs_limit` is -1, this function will return a result with all its logs.
8,519
def parse(): parser = argparse.ArgumentParser( description=) parser.add_argument( , , help=) parser.add_argument( , action=, help=) parser.add_argument( , action=, help=) parser.add_argument( , action=, help=) parser.add_argument( , type=int, help=) parser.add_argument( , help=) parser.add_argument( , choices=[, , , ], help=) parser.add_argument( , help=( )) parser.add_argument( , action=, help=) parser.add_argument( , help="Override Boto configuration with the following AWS access key") parser.add_argument( , help="Override Boto configuration with the following AWS secret key") daemon_ag = parser.add_argument_group() daemon_ag.add_argument( , help=( )) daemon_ag.add_argument( , default=, help=( )) daemon_ag.add_argument( , default=, help=) dynamodb_ag = parser.add_argument_group() dynamodb_ag.add_argument( , , help=) dynamodb_ag.add_argument( , , help=( )) r_scaling_ag = parser.add_argument_group() r_scaling_ag.add_argument( , type=int, help=) r_scaling_ag.add_argument( , type=int, help=) r_scaling_ag.add_argument( , type=int, help=) r_scaling_ag.add_argument( , type=int, help=) r_scaling_ag.add_argument( , type=int, help=) r_scaling_ag.add_argument( , type=str, help=) r_scaling_ag.add_argument( , type=str, help=) r_scaling_ag.add_argument( , type=int, help=) r_scaling_ag.add_argument( , type=int, help=) r_scaling_ag.add_argument( , type=int, help=) r_scaling_ag.add_argument( , type=int, help=) w_scaling_ag = parser.add_argument_group() w_scaling_ag.add_argument( , type=int, help=) w_scaling_ag.add_argument( , type=int, help=) w_scaling_ag.add_argument( , type=int, help=) w_scaling_ag.add_argument( , type=int, help=) w_scaling_ag.add_argument( , type=int, help=) w_scaling_ag.add_argument( , type=str, help=) w_scaling_ag.add_argument( , type=str, help=) w_scaling_ag.add_argument( , type=int, help=) w_scaling_ag.add_argument( , type=int, help=) w_scaling_ag.add_argument( , type=int, help=) w_scaling_ag.add_argument( , type=int, help=) args = parser.parse_args() if args.version: internal_config_file = ConfigParser.RawConfigParser() internal_config_file.optionxform = lambda option: option internal_config_file.read( os.path.abspath( os.path.join( os.path.dirname(__file__), ))) print .format( internal_config_file.get(, )) sys.exit(0) configuration = {} for arg in args.__dict__: if args.__dict__.get(arg) is not None: configuration[arg] = args.__dict__.get(arg) return configuration
Parse command line options
8,520
def to_time(value, ctx): if isinstance(value, str): time = ctx.get_date_parser().time(value) if time is not None: return time elif isinstance(value, datetime.time): return value elif isinstance(value, datetime.datetime): return value.astimezone(ctx.timezone).time() raise EvaluationError("Can%s' to a time" % str(value))
Tries conversion of any value to a time
8,521
def recursive_glob(base_directory, regex=): files = glob(op.join(base_directory, regex)) for path, dirlist, filelist in os.walk(base_directory): for dir_name in dirlist: files.extend(glob(op.join(path, dir_name, regex))) return files
Uses glob to find all files or folders that match the regex starting from the base_directory. Parameters ---------- base_directory: str regex: str Returns ------- files: list
8,522
def _get_summary_struct(self): sections = [] fields = [] _features = _precomputed_field(_internal_utils.pretty_print_list(self.features)) _exclude = _precomputed_field(_internal_utils.pretty_print_list(self.excluded_features)) header_fields = [("Features", "features"), ("Excluded Features", "excluded_features")] sections.append("Model Fields") fields.append(header_fields) if self.user_column_interpretations: sections.append("User Specified Interpretations") fields.append(list(sorted(self._get("user_column_interpretations").items()))) column_interpretations = self._get("column_interpretations") features = self._get("features") if self._get("fitted") and features is not None: n_rows = len(features) transform_info = [None]*n_rows for i, f in enumerate(features): interpretation = column_interpretations[f] input_type = self.input_types[f] description, output_type = _get_interpretation_description_and_output_type( interpretation, input_type) transform_info[i] = (f, input_type.__name__, interpretation, description, output_type.__name__) transform_table = _SFrame() transform_table["Column"] = [t[0] for t in transform_info] transform_table["Type"] = [t[1] for t in transform_info] transform_table["Interpretation"] = [t[2] for t in transform_info] transform_table["Transforms"] = [t[3] for t in transform_info] transform_table["Output Type"] = [t[4] for t in transform_info] fields[-1].append(transform_table) return fields, sections
Returns a structured description of the model, including (where relevant) the schema of the training data, description of the training data, training statistics, and model hyperparameters. Returns ------- sections : list (of list of tuples) A list of summary sections. Each section is a list. Each item in a section list is a tuple of the form: ('<feature>','<field>') section_titles: list A list of section titles. The order matches that of the 'sections' object.
8,523
def get(self, queue_get): if isinstance(queue_get, (tuple, list)): self.result.extend(queue_get)
to get states from multiprocessing.queue
8,524
def find_and_convert(self, attr_name: str, attr_value: S, desired_attr_type: Type[T], logger: Logger, options: Dict[str, Dict[str, Any]]) -> T: if robust_isinstance(attr_value, desired_attr_type) and not is_collection(desired_attr_type): return attr_value else: generic, approx, exact = self.get_all_conversion_chains(type(attr_value), desired_attr_type) all_chains = generic + approx + exact if len(all_chains) > 0: all_errors = dict() for chain in reversed(all_chains): try: return chain.convert(desired_attr_type, attr_value, logger, options) except Exception as e: all_errors[chain] = e raise AttrConversionException.create(attr_name, attr_value, desired_attr_type, all_errors) else: raise NoConverterFoundForObjectType.create(self, attr_value, desired_attr_type)
Utility method to convert some value into the desired type. It relies on get_all_conversion_chains to find the converters, and apply them in correct order :return:
8,525
def inference_q(self, next_action_arr): q_arr = next_action_arr.reshape((next_action_arr.shape[0], -1)) self.__q_arr_list.append(q_arr) while len(self.__q_arr_list) > self.__seq_len: self.__q_arr_list = self.__q_arr_list[1:] while len(self.__q_arr_list) < self.__seq_len: self.__q_arr_list.append(self.__q_arr_list[-1]) q_arr = np.array(self.__q_arr_list) q_arr = q_arr.transpose((1, 0, 2)) q_arr = self.__lstm_model.inference(q_arr) return q_arr[:, -1].reshape((q_arr.shape[0], 1))
Infernce Q-Value. Args: next_action_arr: `np.ndarray` of action. Returns: `np.ndarray` of Q-Values.
8,526
def do_load_modules(self, modules): _ts = time.time() logger.info("Loading modules...") if self.modules_manager.load_and_init(modules): if self.modules_manager.instances: logger.info("I correctly loaded my modules: [%s]", .join([inst.name for inst in self.modules_manager.instances])) else: logger.info("I do not have any module") else: logger.error("Errors were encountered when checking and loading modules:") for msg in self.modules_manager.configuration_errors: logger.error(msg) if self.modules_manager.configuration_warnings: for msg in self.modules_manager.configuration_warnings: logger.warning(msg) statsmgr.gauge(, len(modules)) statsmgr.timer(, time.time() - _ts)
Wrapper for calling load_and_init method of modules_manager attribute :param modules: list of modules that should be loaded by the daemon :return: None
8,527
def skip(type_name, filename): report = [.format(type_name, filename)] report_stats = ReportStats(filename, report=report) return report_stats
Provide reporting statistics for a skipped file.
8,528
def set_selection(self, selection, name="default", executor=None): def create(current): return selection self._selection(create, name, executor=executor, execute_fully=True)
Sets the selection object :param selection: Selection object :param name: selection 'slot' :param executor: :return:
8,529
def load(self): print "Loading data for %s..." % self.getName() self._dataHandle = self._stream.data( since=self._since, until=self._until, limit=self._limit, aggregate=self._aggregate ) self._data = self._dataHandle.data() self._headers = self._dataHandle.headers() print "Loaded %i rows." % len(self)
Loads this stream by calling River View for data.
8,530
def get_property(self): scope = self def fget(self): value = scope.func(self) if value is None or value is undefined: return None return scope.validate(self, value) def fset(self, value): if scope.set_func is None: raise AttributeError() scope.set_func(self, scope.validate(self, value)) def fdel(self): if scope.del_func is None: raise AttributeError() scope.del_func(self) return property(fget=fget, fset=fset, fdel=fdel, doc=scope.sphinx())
Establishes the dynamic behavior of Property values
8,531
def _CreateNewSeasonDir(self, seasonNum): seasonDirName = "Season {0}".format(seasonNum) goodlogging.Log.Info("RENAMER", "Generated directory name: ".format(seasonDirName)) if self._skipUserInput is False: response = goodlogging.Log.Input("RENAMER", "Enter to accept this directory, to use base show directory, to skip this file or enter a new directory name to use: ") response = util.CheckEmptyResponse(response) else: response = if response.lower() == : return elif response.lower() == : return seasonDirName elif response.lower() == : return None else: return response
Creates a new season directory name in the form 'Season <NUM>'. If skipUserInput is True this will be accepted by default otherwise the user can choose to accept this, use the base show directory or enter a different name. Parameters ---------- seasonNum : int Season number. Returns ---------- string or None If the user accepts the generated directory name or gives a new name this will be returned. If it the user chooses to use the base directory an empty string is returned. If the user chooses to skip at this input stage None is returned.
8,532
def _write_particle_information(gsd_file, structure, xyz, ref_distance, ref_mass, ref_energy, rigid_bodies): gsd_file.particles.N = len(structure.atoms) gsd_file.particles.position = xyz / ref_distance types = [atom.name if atom.type == else atom.type for atom in structure.atoms] unique_types = list(set(types)) unique_types.sort(key=natural_sort) gsd_file.particles.types = unique_types typeids = np.array([unique_types.index(t) for t in types]) gsd_file.particles.typeid = typeids masses = np.array([atom.mass for atom in structure.atoms]) masses[masses==0] = 1.0 gsd_file.particles.mass = masses / ref_mass charges = np.array([atom.charge for atom in structure.atoms]) e0 = 2.39725e-4 charge_factor = (4.0*np.pi*e0*ref_distance*ref_energy)**0.5 gsd_file.particles.charge = charges / charge_factor if rigid_bodies: rigid_bodies = [-1 if body is None else body for body in rigid_bodies] gsd_file.particles.body = rigid_bodies
Write out the particle information.
8,533
def EnableEditingOnService(self, url, definition = None): adminFS = AdminFeatureService(url=url, securityHandler=self._securityHandler) if definition is None: definition = collections.OrderedDict() definition[] = False definition[] = True definition[] = {} definition[][] = False definition[][] = False definition[][] = True definition[][] = True definition[] = "Query,Editing,Create,Update,Delete" existingDef = {} existingDef[] = adminFS.capabilities existingDef[] = adminFS.allowGeometryUpdates enableResults = adminFS.updateDefinition(json_dict=definition) if in enableResults: return enableResults[] adminFS = None del adminFS print (enableResults) return existingDef
Enables editing capabilities on a feature service. Args: url (str): The URL of the feature service. definition (dict): A dictionary containing valid definition values. Defaults to ``None``. Returns: dict: The existing feature service definition capabilities. When ``definition`` is not provided (``None``), the following values are used by default: +------------------------------+------------------------------------------+ | Key | Value | +------------------------------+------------------------------------------+ | hasStaticData | ``False`` | +------------------------------+------------------------------------------+ | allowGeometryUpdates | ``True`` | +------------------------------+------------------------------------------+ | enableEditorTracking | ``False`` | +------------------------------+------------------------------------------+ | enableOwnershipAccessControl | ``False`` | +------------------------------+------------------------------------------+ | allowOthersToUpdate | ``True`` | +------------------------------+------------------------------------------+ | allowOthersToDelete | ``True`` | +------------------------------+------------------------------------------+ | capabilities | ``"Query,Editing,Create,Update,Delete"`` | +------------------------------+------------------------------------------+
8,534
def QA_util_get_trade_datetime(dt=datetime.datetime.now()): if QA_util_if_trade(str(dt.date())) and dt.time() < datetime.time(15, 0, 0): return str(dt.date()) else: return QA_util_get_real_date(str(dt.date()), trade_date_sse, 1)
交易的真实日期 Returns: [type] -- [description]
8,535
def load_mnist(): mnist = skdata.mnist.dataset.MNIST() mnist.meta def arr(n, dtype): arr = mnist.arrays[n] return arr.reshape((len(arr), -1)).astype(dtype) train_images = arr(, np.float32) / 128 - 1 train_labels = arr(, np.uint8) return ((train_images[:50000], train_labels[:50000, 0]), (train_images[50000:], train_labels[50000:, 0]))
Load the MNIST digits dataset.
8,536
def output_eol_literal_marker(self, m): marker = if m.group(1) is None else return self.renderer.eol_literal_marker(marker)
Pass through rest link.
8,537
def from_edgelist(self, edges, strict=True): for edge in edges: if len(edge) == 3: self.update(edge[1], edge[0], **edge[2]) elif len(edge) == 2: self.update(edge[1], edge[0]) elif strict: raise ValueError(.format(str(edge)))
Load transform data from an edge list into the current scene graph. Parameters ------------- edgelist : (n,) tuples (node_a, node_b, {key: value}) strict : bool If true, raise a ValueError when a malformed edge is passed in a tuple.
8,538
def get_summary_str(self, sec2d_nt): data = self.get_summary_data(sec2d_nt) return "{M} GO IDs placed into {N} sections; {U} unplaced GO IDs".format( N=len(data[]), M=len(data[]), U=len(data[]))
Get string describing counts of placed/unplaced GO IDs and count of sections.
8,539
def add_line_data(self, line_data): for filename, linenos in iitems(line_data): self.lines.setdefault(filename, {}).update(linenos)
Add executed line data. `line_data` is { filename: { lineno: None, ... }, ...}
8,540
def collect_from_bundles(self, bundles: List[Bundle]) -> Dict[str, Any]: all_objects = {} key_bundles = {} object_keys = set() for bundle in bundles: from_bundle = self.collect_from_bundle(bundle) if isinstance(bundle, AppBundle): all_objects.update(from_bundle) break from_bundle_keys = set(from_bundle.keys()) conflicts = object_keys.intersection(from_bundle_keys) if conflicts: msg = [f f] for key in conflicts: msg.append(f) raise NameCollisionError(.join(msg)) all_objects.update(from_bundle) object_keys = object_keys.union(from_bundle_keys) key_bundles.update({k: bundle for k in from_bundle_keys}) return all_objects
Collect objects where :meth:`type_check` returns ``True`` from bundles. Names (keys) are expected to be unique across bundles, except for the app bundle, which can override anything from other bundles.
8,541
def do_roles(self, service): if not self.has_cluster(): return None if not service: return None if service == "all": if not self.CACHED_SERVICES: self.services_autocomplete(, service, 0, 0) for s in self.CACHED_SERVICES: print("= " + s.upper() + " =") self.do_roles(s) return None try: service = api.get_cluster(self.cluster).get_service(service) headers = ["ROLE TYPE", "HOST", "ROLE NAME", "STATE", "HEALTH", "CONFIG"] align = ["ROLE TYPE", "ROLE NAME", "HOST"] rows = [] for roletype in service.get_role_types(): for role in service.get_roles_by_type(roletype): if role.configStale: config = "STALE" else: config = "UP TO DATE" rows.append([role.type, role.hostRef.hostId, role.name, role.roleState, role.healthSummary, config]) self.generate_output(headers, rows, align=align) except ApiException: print("Service not found")
Role information Usage: > roles <servicename> Display role information for service > roles all Display all role information for cluster
8,542
def load_pyproject_toml( use_pep517, pyproject_toml, setup_py, req_name ): has_pyproject = os.path.isfile(pyproject_toml) has_setup = os.path.isfile(setup_py) if has_pyproject: with io.open(pyproject_toml, encoding="utf-8") as f: pp_toml = pytoml.load(f) build_system = pp_toml.get("build-system") else: build_system = None assert use_pep517 is not None assert build_system is not None error_template = ( "{package} has a pyproject.toml file that does not comply " "with PEP 518: {reason}" ) if "requires" not in build_system: raise InstallationError( error_template.format(package=req_name, reason=( "it has a table but not " " which is mandatory in the table" )) ) requires = build_system["requires"] if not _is_list_of_str(requires): raise InstallationError(error_template.format( package=req_name, reason=" is not a list of strings.", )) backend = build_system.get("build-backend") check = [] if backend is None: backend = "setuptools.build_meta:__legacy__" check = ["setuptools>=40.8.0", "wheel"] return (requires, backend, check)
Load the pyproject.toml file. Parameters: use_pep517 - Has the user requested PEP 517 processing? None means the user hasn't explicitly specified. pyproject_toml - Location of the project's pyproject.toml file setup_py - Location of the project's setup.py file req_name - The name of the requirement we're processing (for error reporting) Returns: None if we should use the legacy code path, otherwise a tuple ( requirements from pyproject.toml, name of PEP 517 backend, requirements we should check are installed after setting up the build environment )
8,543
def set_source_nodes(self, source_nodes): r if max(source_nodes) >= self.__nodes or min(source_nodes) < 0: raise ValueError(.format(max(source_nodes), min(source_nodes), self.__nodes - 1)) for snode in source_nodes: self.__graph.add_tweights(int(snode), self.MAX, 0)
r""" Set multiple source nodes and compute their t-weights. Parameters ---------- source_nodes : sequence of integers Declare the source nodes via their ids. Raises ------ ValueError If a passed node id does not refer to any node of the graph (i.e. it is either higher than the initially set number of nodes or lower than zero). Notes ----- It does not get checked if one of the supplied source-nodes already has a weight assigned (e.g. by passing it to `set_sink_nodes`). This can occur when the foreground- and background-markers cover the same region. In this case the order of setting the terminal nodes can affect the graph and therefore the graph-cut result.
8,544
def exists(self, path): (bucket, key) = self._path_to_bucket_and_key(path) if self._is_root(key): return True if self._exists(bucket, key): return True if self.isdir(path): return True logger.debug(, path) return False
Does provided path exist on S3?
8,545
def read_preferences_file(self): user_data_dir = find_pmag_dir.find_user_data_dir("thellier_gui") if not user_data_dir: return {} if os.path.exists(user_data_dir): pref_file = os.path.join(user_data_dir, "thellier_gui_preferences.json") if os.path.exists(pref_file): with open(pref_file, "r") as pfile: return json.load(pfile) return {}
If json preferences file exists, read it in.
8,546
def load(self, filename): with open(filename, ) as fin: proxies = json.load(fin) for protocol in proxies: for proxy in proxies[protocol]: self.proxies[protocol][proxy[]] = Proxy( proxy[], proxy[], proxy[], proxy[]) self.addr_list[protocol].append(proxy[])
Load proxies from file
8,547
def in_git_clone(): gitdir = return os.path.isdir(gitdir) and ( os.path.isdir(os.path.join(gitdir, )) and os.path.isdir(os.path.join(gitdir, )) and os.path.exists(os.path.join(gitdir, )) )
Returns `True` if the current directory is a git repository Logic is 'borrowed' from :func:`git.repo.fun.is_git_dir`
8,548
def example_clinical_data(study_name, environment): odm = ODM("test system")( ClinicalData("Mediflex", "DEV")( SubjectData("MDSOL", "IJS TEST4", transaction_type="Insert")( StudyEventData("SUBJECT")( FormData("EN", transaction_type="Update")( ItemGroupData()( ItemData("SUBJINIT", "AAA")( AuditRecord(edit_point=AuditRecord.EDIT_DATA_MANAGEMENT, used_imputation_method= False, identifier=, include_file_oid=False)( UserRef("isparks"), LocationRef("MDSOL"), ReasonForChange("Data Entry Error"), DateTimeStamp(datetime(2015, 9, 11, 10, 15, 22, 80)) ), MdsolQuery(value="Subject initials should be 2 chars only.", recipient="Site from System", status=QueryStatusType.Open) ), ItemData("SUBJID", ) ) ) ) ) ) ) return odm
Test demonstrating building clinical data
8,549
def select_code(self, code): def _select_code(code): return self.data.loc[(slice(None), code), :] try: return self.new(_select_code(code), self.type, self.if_fq) except: raise ValueError(.format(code))
选择股票 @2018/06/03 pandas 的索引问题导致 https://github.com/pandas-dev/pandas/issues/21299 因此先用set_index去重做一次index 影响的有selects,select_time,select_month,get_bar @2018/06/04 当选择的时间越界/股票不存在,raise ValueError @2018/06/04 pandas索引问题已经解决 全部恢复
8,550
def normalize(Y, normalization_type=): Y = np.asarray(Y, dtype=float) if np.max(Y.shape) != Y.size: raise NotImplementedError() if normalization_type == : Y_norm = Y - Y.mean() std = Y.std() if std > 0: Y_norm /= std elif normalization_type == : Y_norm = Y - Y.min() y_range = np.ptp(Y) if y_range > 0: Y_norm /= y_range Y_norm = 2 * (Y_norm - 0.5) else: raise ValueError(.format(normalization_type)) return Y_norm
Normalize the vector Y using statistics or its range. :param Y: Row or column vector that you want to normalize. :param normalization_type: String specifying the kind of normalization to use. Options are 'stats' to use mean and standard deviation, or 'maxmin' to use the range of function values. :return Y_normalized: The normalized vector.
8,551
def set_network_connection(self, network): mode = network.mask if isinstance(network, self.ConnectionType) else network return self.ConnectionType(self._driver.execute( Command.SET_NETWORK_CONNECTION, { : , : {: mode}})[])
Set the network connection for the remote device. Example of setting airplane mode:: driver.mobile.set_network_connection(driver.mobile.AIRPLANE_MODE)
8,552
def site(self, site): if site is None: raise ValueError("Invalid value for `site`, must not be `None`") if site is not None and len(site) > 255: raise ValueError("Invalid value for `site`, length must be less than or equal to `255`") if site is not None and len(site) < 3: raise ValueError("Invalid value for `site`, length must be greater than or equal to `3`") if site is not None and not re.search(, site): raise ValueError("Invalid value for `site`, must be a follow pattern or equal to `/(?:[a-z](?:[-a-z0-9]{0,61}[a-z0-9])?\\.)+[a-z]{2,}(?:@[a-z0-9](?:[-.](?=[a-z0-9])|[a-z0-9]){0,29})?/`") self._site = site
Sets the site of this OauthTokenReference. :param site: The site of this OauthTokenReference. :type: str
8,553
def connect(self): self.serial = serial.Serial(port=self.port, baudrate=self.baudrate, timeout=self.timeout) self.alive = True self.rxThread = threading.Thread(target=self._readLoop) self.rxThread.daemon = True self.rxThread.start()
Connects to the device and starts the read thread
8,554
def get_games(ctx): username = ctx.obj[] games = User(username).get_games_owned() for game in sorted(games.values(), key=itemgetter()): click.echo( % (game[], game[])) click.secho( % (username, len(games)), fg=)
Prints out games owned by a Steam user.
8,555
def format(self, record): if record.levelno >= logging.ERROR: color = colorama.Fore.RED elif record.levelno >= logging.WARNING: color = colorama.Fore.YELLOW elif record.levelno >= logging.INFO: color = colorama.Fore.RESET else: color = colorama.Fore.CYAN format_template = ( ) if sys.stdout.isatty(): self._fmt = format_template.format( colorama.Style.BRIGHT, color, colorama.Fore.RESET, colorama.Style.RESET_ALL ) else: self._fmt = format_template.format(*[] * 4) if six.PY3: self._style._fmt = self._fmt return super(_LogColorFormatter, self).format(record)
Format the log record with timestamps and level based colors. Args: record: The log record to format. Returns: The formatted log record.
8,556
def notify_peer_message(self, message, sender_id): payload = message.SerializeToString() self._notify( "consensus_notifier_notify_peer_message", payload, len(payload), sender_id, len(sender_id))
A new message was received from a peer
8,557
def locked_get(self): credentials = None if self._cache: json = self._cache.get(self._key_name) if json: credentials = client.Credentials.new_from_json(json) if credentials is None: entity = self._get_entity() if entity is not None: credentials = getattr(entity, self._property_name) if self._cache: self._cache.set(self._key_name, credentials.to_json()) if credentials and hasattr(credentials, ): credentials.set_store(self) return credentials
Retrieve Credential from datastore. Returns: oauth2client.Credentials
8,558
def __configure_client(self, config): self.logger.info("Configuring p4 client...") client_dict = config.to_dict() client_dict[] = os.path.expanduser(config.get()) os.chdir(client_dict[]) client_dict[] = system.NODE client_dict[] = config[] % self.environment.target.get_context_dict() client = re.sub(, , p4client_template % client_dict) self.logger.info(lib.call("%s client -i" % self.p4_command, stdin=client, env=self.p4environ, cwd=client_dict[]))
write the perforce client
8,559
def iteritems(self): for m in self.mappings: yield self.indexes[m.clause][m.target], m
Iterates over all mappings Yields ------ (int,Mapping) The next pair (index, mapping)
8,560
def delay_for( self, wait: typing.Union[int, float], identifier: typing.Any, ) -> bool: raise NotImplementedError()
Defer the execution of a function for some number of seconds. Args: wait (typing.Union[int, float]): A numeric value that represents the number of seconds that must pass before the callback becomes available for execution. All given values must be positive. identifier (typing.Any): The identifier returned from a call to defer or defer_for. Returns: bool: True if the call is delayed. False if the identifier is invalid or if the deferred call is already executed.
8,561
def treat(request_body): if isinstance(request_body, six.binary_type): request_body = request_body.decode() try: data = json.loads(request_body) except ValueError: raise exceptions.UnknownAPIResource() unsafe_api_resource = APIResource.factory(data) try: consistent_api_resource = unsafe_api_resource.get_consistent_resource() except AttributeError: raise exceptions.UnknownAPIResource() return consistent_api_resource
Treat a notification and guarantee its authenticity. :param request_body: The request body in plain text. :type request_body: string :return: A safe APIResource :rtype: APIResource
8,562
def _select_features(example, feature_list=None): feature_list = feature_list or ["inputs", "targets"] return {f: example[f] for f in feature_list}
Select a subset of features from the example dict.
8,563
def parse_http_date(date): MONTHS = .split() __D = r __D2 = r __M = r __Y = r __Y2 = r __T = r RFC1123_DATE = re.compile(r % (__D, __M, __Y, __T)) RFC850_DATE = re.compile(r % (__D, __M, __Y2, __T)) ASCTIME_DATE = re.compile(r % (__M, __D2, __T, __Y)) for regex in RFC1123_DATE, RFC850_DATE, ASCTIME_DATE: m = regex.match(date) if m is not None: break else: raise ValueError("%r is not in a valid HTTP date format" % date) try: year = int(m.group()) if year < 100: if year < 70: year += 2000 else: year += 1900 month = MONTHS.index(m.group().lower()) + 1 day = int(m.group()) hour = int(m.group()) min = int(m.group()) sec = int(m.group()) result = datetime.datetime(year, month, day, hour, min, sec) return calendar.timegm(result.utctimetuple()) except Exception as exc: raise ValueError("%r is not a valid date" % date) from exc
Parse a date format as specified by HTTP RFC7231 section 7.1.1.1. The three formats allowed by the RFC are accepted, even if only the first one is still in widespread use. Return an integer expressed in seconds since the epoch, in UTC. Implementation copied from Django. https://github.com/django/django/blob/master/django/utils/http.py#L157 License: BSD 3-clause
8,564
def dtdQAttrDesc(self, elem, name, prefix): ret = libxml2mod.xmlGetDtdQAttrDesc(self._o, elem, name, prefix) if ret is None:raise treeError() __tmp = xmlAttribute(_obj=ret) return __tmp
Search the DTD for the description of this qualified attribute on this element.
8,565
def set_hyperparams(self, new_params): new_params = scipy.asarray(new_params, dtype=float) if len(new_params) == len(self.free_params): if self.enforce_bounds: for idx, new_param, bound in zip(range(0, len(new_params)), new_params, self.free_param_bounds): if bound[0] is not None and new_param < bound[0]: new_params[idx] = bound[0] elif bound[1] is not None and new_param > bound[1]: new_params[idx] = bound[1] self.params[~self.fixed_params] = new_params else: raise ValueError("Length of new_params must be %s!" % (len(self.free_params),))
Sets the free hyperparameters to the new parameter values in new_params. Parameters ---------- new_params : :py:class:`Array` or other Array-like, (len(:py:attr:`self.free_params`),) New parameter values, ordered as dictated by the docstring for the class.
8,566
def fix_multiple_files(filenames, options, output=None): filenames = find_files(filenames, options.recursive, options.exclude) if options.jobs > 1: import multiprocessing pool = multiprocessing.Pool(options.jobs) pool.map(_fix_file, [(name, options) for name in filenames]) else: for name in filenames: _fix_file((name, options, output))
Fix list of files. Optionally fix files recursively.
8,567
def edit_ipv6(self, ip6, descricao, id_ip): if not is_valid_int_param(id_ip): raise InvalidParameterError( u) if ip6 is None or ip6 == "": raise InvalidParameterError(u) ip_map = dict() ip_map[] = descricao ip_map[] = ip6 ip_map[] = id_ip url = "ipv6/edit/" code, xml = self.submit({: ip_map}, , url) return self.response(code, xml)
Edit a IP6 :param ip6: An IP6 available to save in format xxxx:xxxx:xxxx:xxxx:xxxx:xxxx:xxxx:xxxx. :param descricao: IP description. :param id_ip: Ipv6 identifier. Integer value and greater than zero. :return: None
8,568
def crop(self, start_timestamp, end_timestamp): output = {} for key, value in self.items(): if key >= start_timestamp and key <= end_timestamp: output[key] = value if output: return TimeSeries(output) else: raise ValueError()
Return a new TimeSeries object contains all the timstamps and values within the specified range. :param int start_timestamp: the start timestamp value :param int end_timestamp: the end timestamp value :return: :class:`TimeSeries` object.
8,569
def simplex_grid(m, n): r L = num_compositions_jit(m, n) if L == 0: raise ValueError(_msg_max_size_exceeded) out = np.empty((L, m), dtype=np.int_) x = np.zeros(m, dtype=np.int_) x[m-1] = n for j in range(m): out[0, j] = x[j] h = m for i in range(1, L): h -= 1 val = x[h] x[h] = 0 x[m-1] = val - 1 x[h-1] += 1 for j in range(m): out[i, j] = x[j] if val != 1: h = m return out
r""" Construct an array consisting of the integer points in the (m-1)-dimensional simplex :math:`\{x \mid x_0 + \cdots + x_{m-1} = n \}`, or equivalently, the m-part compositions of n, which are listed in lexicographic order. The total number of the points (hence the length of the output array) is L = (n+m-1)!/(n!*(m-1)!) (i.e., (n+m-1) choose (m-1)). Parameters ---------- m : scalar(int) Dimension of each point. Must be a positive integer. n : scalar(int) Number which the coordinates of each point sum to. Must be a nonnegative integer. Returns ------- out : ndarray(int, ndim=2) Array of shape (L, m) containing the integer points in the simplex, aligned in lexicographic order. Notes ----- A grid of the (m-1)-dimensional *unit* simplex with n subdivisions along each dimension can be obtained by `simplex_grid(m, n) / n`. Examples -------- >>> simplex_grid(3, 4) array([[0, 0, 4], [0, 1, 3], [0, 2, 2], [0, 3, 1], [0, 4, 0], [1, 0, 3], [1, 1, 2], [1, 2, 1], [1, 3, 0], [2, 0, 2], [2, 1, 1], [2, 2, 0], [3, 0, 1], [3, 1, 0], [4, 0, 0]]) >>> simplex_grid(3, 4) / 4 array([[ 0. , 0. , 1. ], [ 0. , 0.25, 0.75], [ 0. , 0.5 , 0.5 ], [ 0. , 0.75, 0.25], [ 0. , 1. , 0. ], [ 0.25, 0. , 0.75], [ 0.25, 0.25, 0.5 ], [ 0.25, 0.5 , 0.25], [ 0.25, 0.75, 0. ], [ 0.5 , 0. , 0.5 ], [ 0.5 , 0.25, 0.25], [ 0.5 , 0.5 , 0. ], [ 0.75, 0. , 0.25], [ 0.75, 0.25, 0. ], [ 1. , 0. , 0. ]]) References ---------- A. Nijenhuis and H. S. Wilf, Combinatorial Algorithms, Chapter 5, Academic Press, 1978.
8,570
def asRemoteException(ErrorType): RemoteException = _remoteExceptionCache.get(ErrorType) if RemoteException is None: RemoteException = _newRemoteException(ErrorType) _remoteExceptionCache.setdefault(ErrorType, RemoteException) _remoteExceptionCache.setdefault(RemoteException, RemoteException) return _remoteExceptionCache.get(ErrorType) return RemoteException
return the remote exception version of the error above you can catch errors as usally: >>> try: raise asRemoteException(ValueError) except ValueError: pass or you can catch the remote Exception >>> try: raise asRemoteException(ReferenceError)(ReferenceError(),'') except asRemoteException(ReferenceError): pass
8,571
def iter_descendants(self, strategy="levelorder", is_leaf_fn=None): for n in self.traverse(strategy=strategy, is_leaf_fn=is_leaf_fn): if n is not self: yield n
Returns an iterator over all descendant nodes.
8,572
def execute(self, args): import colorama self.cli_ctx.raise_event(EVENT_INVOKER_PRE_CMD_TBL_CREATE, args=args) cmd_tbl = self.commands_loader.load_command_table(args) command = self._rudimentary_get_command(args) self.cli_ctx.invocation.data[] = command self.commands_loader.load_arguments(command) self.cli_ctx.raise_event(EVENT_INVOKER_POST_CMD_TBL_CREATE, cmd_tbl=cmd_tbl) self.parser.load_command_table(self.commands_loader) self.cli_ctx.raise_event(EVENT_INVOKER_CMD_TBL_LOADED, parser=self.parser) arg_check = [a for a in args if a not in [, ]] if not arg_check: self.cli_ctx.completion.enable_autocomplete(self.parser) subparser = self.parser.subparsers[tuple()] self.help.show_welcome(subparser) return CommandResultItem(None, exit_code=0) if args[0].lower() == : args[0] = self.cli_ctx.completion.enable_autocomplete(self.parser) self.cli_ctx.raise_event(EVENT_INVOKER_PRE_PARSE_ARGS, args=args) parsed_args = self.parser.parse_args(args) self.cli_ctx.raise_event(EVENT_INVOKER_POST_PARSE_ARGS, command=parsed_args.command, args=parsed_args) self._validation(parsed_args) self.data[] = parsed_args.command cmd = parsed_args.func if hasattr(parsed_args, ): parsed_args.cmd = cmd deprecations = getattr(parsed_args, , []) if cmd.deprecate_info: deprecations.append(cmd.deprecate_info) params = self._filter_params(parsed_args) path_comps = cmd.name.split()[:-1] implicit_deprecate_info = None while path_comps and not implicit_deprecate_info: implicit_deprecate_info = resolve_deprecate_info(self.cli_ctx, .join(path_comps)) del path_comps[-1] if implicit_deprecate_info: deprecate_kwargs = implicit_deprecate_info.__dict__.copy() deprecate_kwargs[] = del deprecate_kwargs[] del deprecate_kwargs[] deprecations.append(ImplicitDeprecated(**deprecate_kwargs)) colorama.init() for d in deprecations: print(d.message, file=sys.stderr) colorama.deinit() cmd_result = parsed_args.func(params) cmd_result = todict(cmd_result) event_data = {: cmd_result} self.cli_ctx.raise_event(EVENT_INVOKER_TRANSFORM_RESULT, event_data=event_data) self.cli_ctx.raise_event(EVENT_INVOKER_FILTER_RESULT, event_data=event_data) return CommandResultItem(event_data[], exit_code=0, table_transformer=cmd_tbl[parsed_args.command].table_transformer, is_query_active=self.data[])
Executes the command invocation :param args: The command arguments for this invocation :type args: list :return: The command result :rtype: knack.util.CommandResultItem
8,573
def get_schema(self, filename): table_set = self.read_file(filename) if table_set is None: return [] row_set = table_set.tables[0] offset, headers = headers_guess(row_set.sample) row_set.register_processor(headers_processor(headers)) row_set.register_processor(offset_processor(offset + 1)) types = type_guess(row_set.sample, strict=True) sample = next(row_set.sample) clean = lambda v: str(v) if not isinstance(v, str) else v schema = [] for i, h in enumerate(headers): schema.append([h, str(types[i]), clean(sample[i].value)]) return schema
Guess schema using messytables
8,574
def set_events_callback(self, call_back): logger.info("setting event callback") callback_wrap = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.POINTER(snap7.snap7types.SrvEvent), ctypes.c_int) def wrapper(usrptr, pevent, size): logger.info("callback event: " + self.event_text(pevent.contents)) call_back(pevent.contents) return 0 self._callback = callback_wrap(wrapper) usrPtr = ctypes.c_void_p() return self.library.Srv_SetEventsCallback(self.pointer, self._callback, usrPtr)
Sets the user callback that the Server object has to call when an event is created.
8,575
def _del_thread(self, dwThreadId): try: aThread = self.__threadDict[dwThreadId] del self.__threadDict[dwThreadId] except KeyError: aThread = None msg = "Unknown thread ID %d" % dwThreadId warnings.warn(msg, RuntimeWarning) if aThread: aThread.clear()
Private method to remove a thread object from the snapshot. @type dwThreadId: int @param dwThreadId: Global thread ID.
8,576
def macs_filtered_reads_plot(self): data = dict() req_cats = [, , , ] for s_name, d in self.macs_data.items(): if all([c in d for c in req_cats]): data[.format(s_name)] = dict() data[.format(s_name)] = dict() data[.format(s_name)][] = d[] - d[] data[.format(s_name)][] = d[] data[.format(s_name)][] = d[] - d[] data[.format(s_name)][] = d[] if len(data) == 0: return keys = OrderedDict() keys[] = { : , : } keys[] = { : , : } pconfig = { : , : , : , : , : False } self.add_section( plot = bargraph.plot(data, keys, pconfig) )
Plot of filtered reads for control and treatment samples
8,577
def validate_arguments(args): print print "Checking input...", semantic_tests = ["animals", "custom"] phonemic_tests = ["a", "p", "s", "f"] if args.similarity_file: print print "Custom similarity file was specified..." args.semantic = "custom" if args.threshold: try: args.threshold = float(args.threshold) except ValueError: raise VFClustException() if not (args.source_file_path.lower().endswith() or args.source_file_path.lower().endswith()): raise VFClustException( + args.source_file_path.lower()) if not os.path.isfile(args.source_file_path): raise VFClustException() if args.output_path == None: args.output_path = args.source_path Alternatively, provide a custom similarity file using the --similarity-file and --threshold options.s not None if not os.path.isfile(args.similarity_file): raise VFClustException() if not args.threshold: raise VFClustException() try: args.threshold = float(args.threshold) except: raise VFClustException() args.similarity_file = os.path.abspath(args.similarity_file) print "OK!" print print "Parsed arguments:" print_table([(k, str(vars(args)[k])) for k in vars(args)]) return args
Makes sure arguments are valid, specified files exist, etc.
8,578
def read(self, n): while len(self.buf) < n: chunk = self.f.recv(4096) if not chunk: raise EndOfStreamError() self.buf += chunk res, self.buf = self.buf[:n], self.buf[n:] return res
Consume `n` characters from the stream.
8,579
def _get_data_dtype(self): pkhrec = [ (, GSDTRecords.gp_pk_header), (, GSDTRecords.gp_pk_sh1) ] pk_head_dtype = np.dtype(pkhrec) def get_lrec(cols): lrec = [ ("gp_pk", pk_head_dtype), ("version", np.uint8), ("satid", np.uint16), ("time", (np.uint16, 5)), ("lineno", np.uint32), ("chan_id", np.uint8), ("acq_time", (np.uint16, 3)), ("line_validity", np.uint8), ("line_rquality", np.uint8), ("line_gquality", np.uint8), ("line_data", (np.uint8, cols)) ] return lrec visir_rec = get_lrec(int(self.mda[] * 1.25)) number_of_visir_channels = len( [s for s in self.mda[] if not s == ]) drec = [(, (visir_rec, number_of_visir_channels))] if self.mda[][]: hrv_rec = get_lrec(int(self.mda[] * 1.25)) drec.append((, (hrv_rec, 3))) return np.dtype(drec)
Get the dtype of the file based on the actual available channels
8,580
def clone(self) -> : "Mimic the behavior of torch.clone for `Image` objects." flow = FlowField(self.size, self.flow.flow.clone()) return self.__class__(flow, scale=False, y_first=False, labels=self.labels, pad_idx=self.pad_idx)
Mimic the behavior of torch.clone for `Image` objects.
8,581
def update_dashboard(self, id, **kwargs): kwargs[] = True if kwargs.get(): return self.update_dashboard_with_http_info(id, **kwargs) else: (data) = self.update_dashboard_with_http_info(id, **kwargs) return data
Update a specific dashboard # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.update_dashboard(id, async_req=True) >>> result = thread.get() :param async_req bool :param str id: (required) :param Dashboard body: Example Body: <pre>{ \"name\": \"Dashboard API example\", \"id\": \"api-example\", \"url\": \"api-example\", \"description\": \"Dashboard Description\", \"sections\": [ { \"name\": \"Section 1\", \"rows\": [ { \"charts\": [ { \"name\": \"Chart 1\", \"description\": \"description1\", \"sources\": [ { \"name\": \"Source1\", \"query\": \"ts()\" } ] } ] } ] } ] }</pre> :return: ResponseContainerDashboard If the method is called asynchronously, returns the request thread.
8,582
def summary(self, sortOn=None): titles = self if sortOn is None else self.sortTitles(sortOn) for title in titles: yield self[title].summary()
Summarize all the alignments for this title. @param sortOn: A C{str} attribute to sort titles on. One of 'length', 'maxScore', 'medianScore', 'readCount', or 'title'. @raise ValueError: If an unknown C{sortOn} value is given. @return: A generator that yields C{dict} instances as produced by C{TitleAlignments} (see class earlier in this file), sorted by C{sortOn}.
8,583
def store_result(self, message, result: Result, ttl: int) -> None: message_key = self.build_message_key(message) return self._store(message_key, result, ttl)
Store a result in the backend. Parameters: message(Message) result(object): Must be serializable. ttl(int): The maximum amount of time the result may be stored in the backend for.
8,584
def radiation_values(self, location, timestep=1): sp = Sunpath.from_location(location) altitudes = [] dates = self._get_datetimes(timestep) for t_date in dates: sun = sp.calculate_sun_from_date_time(t_date) altitudes.append(sun.altitude) dir_norm, diff_horiz = ashrae_clear_sky( altitudes, self._month, self._clearness) glob_horiz = [dhr + dnr * math.sin(math.radians(alt)) for alt, dnr, dhr in zip(altitudes, dir_norm, diff_horiz)] return dir_norm, diff_horiz, glob_horiz
Lists of driect normal, diffuse horiz, and global horiz rad at each timestep.
8,585
def list_semod(): * helptext = __salt__[]().splitlines() semodule_version = for line in helptext: if line.strip().startswith(): semodule_version = if semodule_version == : mdata = __salt__[]().splitlines() ret = {} for line in mdata: if not line.strip(): continue comps = line.split() if len(comps) == 4: ret[comps[1]] = {: False, : None} else: ret[comps[1]] = {: True, : None} else: mdata = __salt__[]().splitlines() ret = {} for line in mdata: if not line.strip(): continue comps = line.split() if len(comps) == 3: ret[comps[0]] = {: False, : comps[1]} else: ret[comps[0]] = {: True, : comps[1]} return ret
Return a structure listing all of the selinux modules on the system and what state they are in CLI Example: .. code-block:: bash salt '*' selinux.list_semod .. versionadded:: 2016.3.0
8,586
def pp_hex(raw, reverse=True): if not reverse: return .join([.format(v) for v in bytearray(raw)]) return .join(reversed([.format(v) for v in bytearray(raw)]))
Return a pretty-printed (hex style) version of a binary string. Args: raw (bytes): any sequence of bytes reverse (bool): True if output should be in reverse order. Returns: Hex string corresponding to input byte sequence.
8,587
def default_branch(self, file): if isinstance(self.__default_branch__, str): return self.__default_branch__ elif self.__default_branch__ == GithubProxy.DEFAULT_BRANCH.NO: return self.master_upstream else: return file.sha[:8]
Decide the name of the default branch given the file and the configuration :param file: File with informations about it :return: Branch Name
8,588
def iterstraight(self, raw): rb = self.row_bytes a = array() recon = None for some in raw: a.extend(some) while len(a) >= rb + 1: filter_type = a[0] scanline = a[1:rb+1] del a[:rb+1] recon = self.undo_filter(filter_type, scanline, recon) yield recon if len(a) != 0: raise FormatError( ) assert len(a) == 0
Iterator that undoes the effect of filtering, and yields each row in serialised format (as a sequence of bytes). Assumes input is straightlaced. `raw` should be an iterable that yields the raw bytes in chunks of arbitrary size.
8,589
def fcoe_get_interface_output_fcoe_intf_list_fcoe_intf_last_counters_cleared(self, **kwargs): config = ET.Element("config") fcoe_get_interface = ET.Element("fcoe_get_interface") config = fcoe_get_interface output = ET.SubElement(fcoe_get_interface, "output") fcoe_intf_list = ET.SubElement(output, "fcoe-intf-list") fcoe_intf_fcoe_port_id_key = ET.SubElement(fcoe_intf_list, "fcoe-intf-fcoe-port-id") fcoe_intf_fcoe_port_id_key.text = kwargs.pop() fcoe_intf_last_counters_cleared = ET.SubElement(fcoe_intf_list, "fcoe-intf-last-counters-cleared") fcoe_intf_last_counters_cleared.text = kwargs.pop() callback = kwargs.pop(, self._callback) return callback(config)
Auto Generated Code
8,590
def chunk_count(self): c = 0 for r in self.iter_regions(): c += r.chunk_count() return c
Return a count of the chunks in this world folder.
8,591
def train_evaluate_model_from_config(config: Union[str, Path, dict], iterator: Union[DataLearningIterator, DataFittingIterator] = None, *, to_train: bool = True, evaluation_targets: Optional[Iterable[str]] = None, to_validate: Optional[bool] = None, download: bool = False, start_epoch_num: Optional[int] = None, recursive: bool = False) -> Dict[str, Dict[str, float]]: config = parse_config(config) if download: deep_download(config) if to_train and recursive: for subconfig in get_all_elems_from_json(config[], ): log.info(f) train_evaluate_model_from_config(subconfig, download=False, recursive=True) import_packages(config.get(, {}).get(, [])) if iterator is None: try: data = read_data_by_config(config) except ConfigError as e: to_train = False log.warning(f) else: iterator = get_iterator_from_config(config, data) if not in config: log.warning() train_config = config.get() if start_epoch_num is not None: train_config[] = start_epoch_num if not in train_config and ( in train_config or in train_config): log.warning( ) train_config[] = [] if train_config.pop(, True): train_config[].append() if train_config.pop(, True): train_config[].append() trainer_class = get_model(train_config.pop(, )) trainer = trainer_class(config[], **train_config) if to_train: trainer.train(iterator) res = {} if iterator is not None: if to_validate is not None: if evaluation_targets is None: log.warning( ) evaluation_targets = [] if to_validate: evaluation_targets.append() else: log.warn( ) res = trainer.evaluate(iterator, evaluation_targets, print_reports=True) trainer.get_chainer().destroy() res = {k: v[] for k, v in res.items()} return res
Make training and evaluation of the model described in corresponding configuration file.
8,592
def get_key_pair(self, alias_name): uri = self.URI + "/keypair/" + alias_name return self._client.get(uri)
Retrieves the public and private key pair associated with the specified alias name. Args: alias_name: Key pair associated with the RabbitMQ Returns: dict: RabbitMQ certificate
8,593
def delete_by_ids(self, ids): try: self.filter(id__in=ids).delete() return True except self.model.DoesNotExist: return False
Delete objects by ids. :param ids: list of objects ids to delete. :return: True if objects were deleted. Otherwise, return False if no objects were found or the delete was not successful.
8,594
def check_name(name): if type(name) not in [str, unicode]: return False if not is_name_valid(name): return False return True
Verify the name is well-formed >>> check_name(123) False >>> check_name('') False >>> check_name('abc') False >>> check_name('abc.def') True >>> check_name('abc.def.ghi') False >>> check_name('abc.d-ef') True >>> check_name('abc.d+ef') False >>> check_name('.abc') False >>> check_name('abc.') False >>> check_name('abcdabcdabcdabcdabcdabcdabcdabcdabcdabcdabcd.abcd') False >>> check_name('abcdabcdabcdabcdabcdabcdabcdabcdabc.d') True
8,595
def modify_snapshot(snapshot_id=None, description=None, userdata=None, cleanup=None, config="root"): ***{"foo": "bar"}* if not snapshot_id: raise CommandExecutionError() snapshot = get_snapshot(config=config, number=snapshot_id) try: updated_opts = { : description if description is not None else snapshot[], : cleanup if cleanup is not None else snapshot[], : userdata if userdata is not None else snapshot[], } snapper.SetSnapshot(config, snapshot_id, updated_opts[], updated_opts[], updated_opts[]) return get_snapshot(config=config, number=snapshot_id) except dbus.DBusException as exc: raise CommandExecutionError(_dbus_exception_to_reason(exc, locals()))
Modify attributes of an existing snapshot. config Configuration name. (Default: root) snapshot_id ID of the snapshot to be modified. cleanup Change the cleanup method of the snapshot. (str) description Change the description of the snapshot. (str) userdata Change the userdata dictionary of the snapshot. (dict) CLI example: .. code-block:: bash salt '*' snapper.modify_snapshot 54 description="my snapshot description" salt '*' snapper.modify_snapshot 54 description="my snapshot description" salt '*' snapper.modify_snapshot 54 userdata='{"foo": "bar"}' salt '*' snapper.modify_snapshot snapshot_id=54 cleanup="number"
8,596
def router_connections(self): clients = [] for server in self._routers: if Servers().is_alive(server): client = self.create_connection(Servers().hostname(server)) clients.append(client) return clients
Return a list of MongoClients, one for each mongos.
8,597
def get_column_info(connection, table_name): cursor = connection.cursor() cursor.execute("SELECT sql FROM sqlite_master WHERE type == AND name == ?", (table_name,)) statement, = cursor.fetchone() coldefs = re.match(_sql_create_table_pattern, statement).groupdict()["coldefs"] return [(coldef.groupdict()["name"], coldef.groupdict()["type"]) for coldef in re.finditer(_sql_coldef_pattern, coldefs) if coldef.groupdict()["name"].upper() not in ("PRIMARY", "UNIQUE", "CHECK")]
Return an in order list of (name, type) tuples describing the columns in the given table.
8,598
def get_ips(self, interface=None, family=None, scope=None, timeout=0): kwargs = {} if interface: kwargs[] = interface if family: kwargs[] = family if scope: kwargs[] = scope ips = None timeout = int(os.environ.get(, timeout)) while not ips: ips = _lxc.Container.get_ips(self, **kwargs) if timeout == 0: break timeout -= 1 time.sleep(1) return ips
Get a tuple of IPs for the container.
8,599
def _disjoint_qubits(op1: ops.Operation, op2: ops.Operation) -> bool: return not set(op1.qubits) & set(op2.qubits)
Returns true only if the operations have qubits in common.