docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
Input a graph and output a DAG. The heuristic is to reverse the edge with the lowest score of the cycle if possible, else remove it. Args: g (networkx.DiGraph): Graph to modify to output a DAG Returns: networkx.DiGraph: DAG made out of the input graph.
def dagify_min_edge(g): while not nx.is_directed_acyclic_graph(g): cycle = next(nx.simple_cycles(g)) scores = [] edges = [] for i, j in zip(cycle[:1], cycle[:1]): edges.append((i, j)) scores.append(g[i][j]['weight']) i, j = edges[scores.index(min(scores))] gc = deepcopy(g) gc.remove_edge(i, j) gc.add_edge(j, i) if len(list(nx.simple_cycles(gc))) < len(list(nx.simple_cycles(g))): g.add_edge(j, i, weight=min(scores)) g.remove_edge(i, j) return g
226,646
Apply causal discovery on observational data using CAM. Args: data (pandas.DataFrame): DataFrame containing the data Returns: networkx.DiGraph: Solution given by the CAM algorithm.
def create_graph_from_data(self, data, **kwargs): # Building setup w/ arguments. self.arguments['{SCORE}'] = self.scores[self.score] self.arguments['{CUTOFF}'] = str(self.cutoff) self.arguments['{VARSEL}'] = str(self.variablesel).upper() self.arguments['{SELMETHOD}'] = self.var_selection[self.selmethod] self.arguments['{PRUNING}'] = str(self.pruning).upper() self.arguments['{PRUNMETHOD}'] = self.var_selection[self.prunmethod] self.arguments['{NJOBS}'] = str(self.nb_jobs) self.arguments['{VERBOSE}'] = str(self.verbose).upper() results = self._run_cam(data, verbose=self.verbose) return nx.relabel_nodes(nx.DiGraph(results), {idx: i for idx, i in enumerate(data.columns)})
226,694
For one variable, predict its neighbouring nodes. Args: df_features (pandas.DataFrame): df_target (pandas.Series): idx (int): (optional) for printing purposes kwargs (dict): additional options for algorithms Returns: list: scores of each feature relatively to the target
def predict_features(self, df_features, df_target, idx=0, **kwargs): estimator = SVR(kernel='linear') selector = RFECV(estimator, step=1) selector = selector.fit(df_features.values, df_target.values[:, 0]) return selector.grid_scores_
226,695
For one variable, predict its neighbouring nodes. Args: df_features (pandas.DataFrame): df_target (pandas.Series): idx (int): (optional) for printing purposes kwargs (dict): additional options for algorithms C (float): Penalty parameter of the error term Returns: list: scores of each feature relatively to the target
def predict_features(self, df_features, df_target, idx=0, C=.1, **kwargs): lsvc = LinearSVR(C=C).fit(df_features.values, df_target.values) return np.abs(lsvc.coef_)
226,696
For one variable, predict its neighbouring nodes. Args: df_features (pandas.DataFrame): df_target (pandas.Series): idx (int): (optional) for printing purposes kwargs (dict): additional options for algorithms Returns: list: scores of each feature relatively to the target
def predict_features(self, df_features, df_target, idx=0, **kwargs): X = df_features.values y = df_target.values regressor = DecisionTreeRegressor() regressor.fit(X, y) return regressor.feature_importances_
226,697
For one variable, predict its neighbouring nodes. Args: df_features (pandas.DataFrame): df_target (pandas.Series): idx (int): (optional) for printing purposes kwargs (dict): additional options for algorithms Returns: list: scores of each feature relatively to the target
def predict_features(self, df_features, df_target, idx=0, **kwargs): X = df_features.values y = df_target.values clf = ard(compute_score=True) clf.fit(X, y.ravel()) return np.abs(clf.coef_)
226,698
For one variable, predict its neighbouring nodes. Args: df_features (pandas.DataFrame): df_target (pandas.Series): idx (int): (optional) for printing purposes kwargs (dict): additional options for algorithms Returns: list: scores of each feature relatively to the target
def predict_features(self, df_features, df_target, idx=0, **kwargs): X = df_features.values y = df_target.values[:, 0] rr = ReliefF() rr.fit(X, y) return rr.feature_importances_
226,699
Fit the NCC model. Args: x_tr (pd.DataFrame): CEPC format dataframe containing the pairs y_tr (pd.DataFrame or np.ndarray): labels associated to the pairs epochs (int): number of train epochs learning_rate (float): learning rate of Adam verbose (bool): verbosity (defaults to ``cdt.SETTINGS.verbose``) device (str): cuda or cpu device (defaults to ``cdt.SETTINGS.default_device``)
def fit(self, x_tr, y_tr, epochs=50, batchsize=32, learning_rate=0.01, verbose=None, device=None): if batchsize > len(x_tr): batchsize = len(x_tr) verbose, device = SETTINGS.get_default(('verbose', verbose), ('device', device)) self.model = NCC_model() opt = th.optim.Adam(self.model.parameters(), lr=learning_rate) criterion = th.nn.BCEWithLogitsLoss() y = y_tr.values if isinstance(y_tr, pd.DataFrame) else y_tr y = th.Tensor(y)/2 + .5 # print(y) self.model = self.model.to(device) y = y.to(device) dataset = [] for i, (idx, row) in enumerate(x_tr.iterrows()): a = row['A'].reshape((len(row['A']), 1)) b = row['B'].reshape((len(row['B']), 1)) m = np.hstack((a, b)) m = m.astype('float32') m = th.from_numpy(m).t().unsqueeze(0) dataset.append(m) dataset = [m.to(device) for m in dataset] acc = [0] da = th.utils.data.DataLoader(Dataset(dataset, y), batch_size=batchsize, shuffle=True) data_per_epoch = (len(dataset) // batchsize) with trange(epochs, desc="Epochs", disable=not verbose) as te: for epoch in te: with trange(data_per_epoch, desc="Batches of {}".format(batchsize), disable=not (verbose and batchsize == len(dataset))) as t: output = [] labels = [] for (batch, label), i in zip(da, t): opt.zero_grad() # print(batch.shape, labels.shape) out = th.stack([self.model(m) for m in batch], 0).squeeze(2) loss = criterion(out, label) loss.backward() t.set_postfix(loss=loss.item()) opt.step() output.append(out) labels.append(label) acc = th.where(th.cat(output, 0) > .5, th.ones(len(output)), th.zeros(len(output))) - th.cat(labels, 0) te.set_postfix(Acc=1-acc.abs().mean().item())
226,703
Infer causal directions using the trained NCC pairwise model. Args: a (numpy.ndarray): Variable 1 b (numpy.ndarray): Variable 2 device (str): Device to run the algorithm on (defaults to ``cdt.SETTINGS.default_device``) Returns: float: Causation score (Value : 1 if a->b and -1 if b->a)
def predict_proba(self, a, b, device=None): device = SETTINGS.get_default(device=device) if self.model is None: print('Model has to be trained before doing any predictions') raise ValueError if len(np.array(a).shape) == 1: a = np.array(a).reshape((-1, 1)) b = np.array(b).reshape((-1, 1)) m = np.hstack((a, b)) m = scale(m) m = m.astype('float32') m = th.from_numpy(m).t().unsqueeze(0) if th.cuda.is_available(): m = m.cuda() return (self.model(m).data.cpu().numpy()-.5) * 2
226,704
Sets the action to take when conflicts arise when attempting to insert/create a new row. Arguments: fields: The fields the conflicts can occur in. action: The action to take when the conflict occurs. index_predicate: The index predicate to satisfy an arbiter partial index (i.e. what partial index to use for checking conflicts)
def on_conflict(self, fields: List[Union[str, Tuple[str]]], action, index_predicate: str=None): self.conflict_target = fields self.conflict_action = action self.index_predicate = index_predicate return self
226,776
Creates a new record in the database. This allows specifying custom conflict behavior using .on_conflict(). If no special behavior was specified, this uses the normal Django create(..) Arguments: fields: The fields of the row to create. Returns: The primary key of the record that was created.
def insert(self, **fields): if self.conflict_target or self.conflict_action: compiler = self._build_insert_compiler([fields]) rows = compiler.execute_sql(return_id=True) pk_field_name = self.model._meta.pk.name return rows[0][pk_field_name] # no special action required, use the standard Django create(..) return super().create(**fields).pk
226,778
Creates a new record in the database and then gets the entire row. This allows specifying custom conflict behavior using .on_conflict(). If no special behavior was specified, this uses the normal Django create(..) Arguments: fields: The fields of the row to create. Returns: The model instance representing the row that was created.
def insert_and_get(self, **fields): if not self.conflict_target and not self.conflict_action: # no special action required, use the standard Django create(..) return super().create(**fields) compiler = self._build_insert_compiler([fields]) rows = compiler.execute_sql(return_id=False) columns = rows[0] # get a list of columns that are officially part of the model and preserve the fact that the attribute name # might be different than the database column name model_columns = {} for field in self.model._meta.local_concrete_fields: model_columns[field.column] = field.attname # strip out any columns/fields returned by the db that # are not present in the model model_init_fields = {} for column_name, column_value in columns.items(): try: model_init_fields[model_columns[column_name]] = column_value except KeyError: pass return self.model(**model_init_fields)
226,779
Creates a set of new records or updates the existing ones with the specified data. Arguments: conflict_target: Fields to pass into the ON CONFLICT clause. rows: Rows to upsert. index_predicate: The index predicate to satisfy an arbiter partial index (i.e. what partial index to use for checking conflicts)
def bulk_upsert(self, conflict_target: List, rows: List[Dict], index_predicate: str=None): if not rows or len(rows) <= 0: return self.on_conflict(conflict_target, ConflictAction.UPDATE, index_predicate) return self.bulk_insert(rows)
226,782
Builds the SQL compiler for a insert query. Arguments: rows: A list of dictionaries, where each entry describes a record to insert. Returns: The SQL compiler for the insert.
def _build_insert_compiler(self, rows: List[Dict]): # create model objects, we also have to detect cases # such as: # [dict(first_name='swen'), dict(fist_name='swen', last_name='kooij')] # we need to be certain that each row specifies the exact same # amount of fields/columns objs = [] field_count = len(rows[0]) for index, row in enumerate(rows): if field_count != len(row): raise SuspiciousOperation(( 'In bulk upserts, you cannot have rows with different field ' 'configurations. Row {0} has a different field config than ' 'the first row.' ).format(index)) objs.append(self.model(**row)) # indicate this query is going to perform write self._for_write = True # get the fields to be used during update/insert insert_fields, update_fields = self._get_upsert_fields(rows[0]) # build a normal insert query query = PostgresInsertQuery(self.model) query.conflict_action = self.conflict_action query.conflict_target = self.conflict_target query.index_predicate = self.index_predicate query.values(objs, insert_fields, update_fields) # use the postgresql insert query compiler to transform the insert # into an special postgresql insert connection = django.db.connections[self.db] compiler = PostgresInsertCompiler(query, connection, self.db) return compiler
226,783
Sets the action to take when conflicts arise when attempting to insert/create a new row. Arguments: fields: The fields the conflicts can occur in. action: The action to take when the conflict occurs. index_predicate: The index predicate to satisfy an arbiter partial index.
def on_conflict(self, fields: List[Union[str, Tuple[str]]], action, index_predicate: str=None): return self.get_queryset().on_conflict(fields, action, index_predicate)
226,787
Creates a new record or updates the existing one with the specified data. Arguments: conflict_target: Fields to pass into the ON CONFLICT clause. fields: Fields to insert/update. index_predicate: The index predicate to satisfy an arbiter partial index. Returns: The primary key of the row that was created/updated.
def upsert(self, conflict_target: List, fields: Dict, index_predicate: str=None) -> int: return self.get_queryset().upsert(conflict_target, fields, index_predicate)
226,788
Creates a new record or updates the existing one with the specified data and then gets the row. Arguments: conflict_target: Fields to pass into the ON CONFLICT clause. fields: Fields to insert/update. index_predicate: The index predicate to satisfy an arbiter partial index. Returns: The model instance representing the row that was created/updated.
def upsert_and_get(self, conflict_target: List, fields: Dict, index_predicate: str=None): return self.get_queryset().upsert_and_get(conflict_target, fields, index_predicate)
226,789
Creates a set of new records or updates the existing ones with the specified data. Arguments: conflict_target: Fields to pass into the ON CONFLICT clause. index_predicate: The index predicate to satisfy an arbiter partial index. rows: Rows to upsert.
def bulk_upsert(self, conflict_target: List, rows: List[Dict], index_predicate: str=None): return self.get_queryset().bulk_upsert(conflict_target, rows, index_predicate)
226,790
Selects whichever field is not None, in the specified order. Arguments: fields: The fields to attempt to get a value from, in order. default: The value to return in case all values are None. Returns: A Case-When expression that tries each field and returns the specified default value when all of them are None.
def IsNotNone(*fields, default=None): when_clauses = [ expressions.When( ~expressions.Q(**{field: None}), then=expressions.F(field) ) for field in reversed(fields) ] return expressions.Case( *when_clauses, default=expressions.Value(default), output_field=CharField() )
226,793
Initializes a new instance of :see:HStoreColumn. Arguments: alias: The table name. target: The field instance. hstore_key The name of the hstore key to include in the epxression.
def __init__(self, alias, target, hstore_key): super().__init__(alias, target, output_field=target) self.alias, self.target, self.hstore_key = alias, target, hstore_key
226,796
Initializes a new instance of :see:HStoreRef. Arguments: name: The name of the column/field to resolve. key: The name of the HStore key to select.
def __init__(self, name: str, key: str): super().__init__(name) self.key = key
226,800
Renames the aliases for the specified annotations: .annotate(myfield=F('somestuf__myfield')) .rename_annotations(myfield='field') Arguments: annotations: The annotations to rename. Mapping the old name to the new name.
def rename_annotations(self, annotations) -> None: for old_name, new_name in annotations.items(): annotation = self.annotations.get(old_name) if not annotation: raise SuspiciousOperation(( 'Cannot rename annotation "{old_name}" to "{new_name}", because there' ' is no annotation named "{old_name}".' ).format(old_name=old_name, new_name=new_name)) self._annotations = OrderedDict( [(new_name, v) if k == old_name else (k, v) for k, v in self._annotations.items()]) if django.VERSION < (2, 0): self.set_annotation_mask( (new_name if v == old_name else v for v in (self.annotation_select_mask or [])))
226,804
Gets the name for a CONSTRAINT that applies to a single hstore key. Arguments: table: The name of the table the field is a part of. field: The hstore field to create a UNIQUE INDEX for. key: The name of the hstore key to create the name for. Returns: The name for the UNIQUE index.
def _required_constraint_name(table: str, field, key): return '{table}_{field}_required_{postfix}'.format( table=table, field=field.column, postfix=key )
226,819
Rewrites a formed SQL INSERT query to include the ON CONFLICT clause. Arguments: sql: The SQL INSERT query to rewrite. params: The parameters passed to the query. returning: What to put in the `RETURNING` clause of the resulting query. Returns: A tuple of the rewritten SQL query and new params.
def _rewrite_insert(self, sql, params, return_id=False): returning = self.qn(self.query.model._meta.pk.attname) if return_id else '*' if self.query.conflict_action.value == 'UPDATE': return self._rewrite_insert_update(sql, params, returning) elif self.query.conflict_action.value == 'NOTHING': return self._rewrite_insert_nothing(sql, params, returning) raise SuspiciousOperation(( '%s is not a valid conflict action, specify ' 'ConflictAction.UPDATE or ConflictAction.NOTHING.' ) % str(self.query.conflict_action))
226,842
Gets the field on a model with the specified name. Arguments: name: The name of the field to look for. This can be both the actual field name, or the name of the column, both will work :) Returns: The field with the specified name or None if no such field exists.
def _get_model_field(self, name: str): field_name = self._normalize_field_name(name) # 'pk' has special meaning and always refers to the primary # key of a model, we have to respect this de-facto standard behaviour if field_name == 'pk' and self.query.model._meta.pk: return self.query.model._meta.pk for field in self.query.model._meta.local_concrete_fields: if field.name == field_name or field.column == field_name: return field return None
226,846
Formats a field's name for usage in SQL. Arguments: field_name: The field name to format. Returns: The specified field name formatted for usage in SQL.
def _format_field_name(self, field_name) -> str: field = self._get_model_field(field_name) return self.qn(field.column)
226,847
Formats a field's value for usage in SQL. Arguments: field_name: The name of the field to format the value of. Returns: The field's value formatted for usage in SQL.
def _format_field_value(self, field_name) -> str: field_name = self._normalize_field_name(field_name) field = self._get_model_field(field_name) return SQLInsertCompiler.prepare_value( self, field, # Note: this deliberately doesn't use `pre_save_val` as we don't # want things like auto_now on DateTimeField (etc.) to change the # value. We rely on pre_save having already been done by the # underlying compiler so that things like FileField have already had # the opportunity to save out their data. getattr(self.query.objs[0], field.attname) )
226,848
Normalizes a field name into a string by extracting the field name if it was specified as a reference to a HStore key (as a tuple). Arguments: field_name: The field name to normalize. Returns: The normalized field name.
def _normalize_field_name(self, field_name) -> str: if isinstance(field_name, tuple): field_name, _ = field_name return field_name
226,849
Iterates over the keys marked as "unique" in the specified field. Arguments: field: The field of which key's to iterate over.
def _iterate_uniqueness_keys(self, field): uniqueness = getattr(field, 'uniqueness', None) if not uniqueness: return for keys in uniqueness: composed_keys = self._compose_keys(keys) yield composed_keys
226,858
Adds an extra condition to this join. Arguments: field: The field that the condition will apply to. value: The value to compare.
def add_condition(self, field, value: Any) -> None: self.extra_conditions.append((field, value))
226,860
Creates a new :see:ConditionalJoin from the specified :see:Join object. Arguments: join: The :see:Join object to create the :see:ConditionalJoin object from. Returns: A :see:ConditionalJoin object created from the :see:Join object.
def from_join(cls, join: Join) -> 'ConditionalJoin': return cls( join.table_name, join.parent_alias, join.table_alias, join.join_type, join.join_field, join.nullable )
226,862
Get dashboard meta info from in page `page` and page size is `size`. Args: page: page number. size: size number. Returns: list of dict containing the dash_id and accordingly meta info. maybe empty list [] when page * size > total dashes in db. that's reasonable.
def get(self, page=0, size=10): dash_list = r_db.zrevrange(config.DASH_ID_KEY, 0, -1, True) id_list = dash_list[page * size : page * size + size] dash_meta = [] data = [] if id_list: dash_meta = r_db.hmget(config.DASH_META_KEY, [i[0] for i in id_list]) data = [json.loads(i) for i in dash_meta] return build_response(dict(data=data, code=200))
228,717
Just return the dashboard id in the rendering html. JS will do other work [ajax and rendering] according to the dash_id. Args: dash_id: dashboard id. Returns: rendered html.
def get(self, dash_id): return make_response(render_template('dashboard.html', dash_id=dash_id, api_root=config.app_host))
228,720
Read dashboard content. Args: dash_id: dashboard id. Returns: A dict containing the content of that dashboard, not include the meta info.
def get(self, dash_id): data = json.loads(r_db.hmget(config.DASH_CONTENT_KEY, dash_id)[0]) return build_response(dict(data=data, code=200))
228,721
Update a dash meta and content, return updated dash content. Args: dash_id: dashboard id. Returns: A dict containing the updated content of that dashboard, not include the meta info.
def put(self, dash_id=0): data = request.get_json() updated = self._update_dash(dash_id, data) return build_response(dict(data=updated, code=200))
228,722
Delete a dash meta and content, return updated dash content. Actually, just remove it to a specfied place in database. Args: dash_id: dashboard id. Returns: Redirect to home page.
def delete(self, dash_id): removed_info = dict( time_modified = r_db.zscore(config.DASH_ID_KEY, dash_id), meta = r_db.hget(config.DASH_META_KEY, dash_id), content = r_db.hget(config.DASH_CONTENT_KEY, dash_id)) r_db.zrem(config.DASH_ID_KEY, dash_id) r_db.hdel(config.DASH_META_KEY, dash_id) r_db.hdel(config.DASH_CONTENT_KEY, dash_id) return {'removed_info': removed_info}
228,723
Fibonacci example function Args: n (int): integer Returns: int: n-th Fibonacci number
def fib(n): assert n > 0 a, b = 1, 1 for i in range(n - 1): a, b = b, a + b return a
228,780
Parse command line parameters Args: args ([str]): command line parameters as list of strings Returns: :obj:`argparse.Namespace`: command line parameters namespace
def parse_args(args): parser = argparse.ArgumentParser( description="Just a Fibonnaci demonstration") parser.add_argument( '--version', action='version', version='nlpia {ver}'.format(ver=__version__)) parser.add_argument( dest="n", help="n-th Fibonacci number", type=int, metavar="INT") parser.add_argument( '-v', '--verbose', dest="loglevel", help="set loglevel to INFO", action='store_const', const=logging.INFO) parser.add_argument( '-vv', '--very-verbose', dest="loglevel", help="set loglevel to DEBUG", action='store_const', const=logging.DEBUG) return parser.parse_args(args)
228,781
Setup basic logging Args: loglevel (int): minimum loglevel for emitting messages
def setup_logging(loglevel): logformat = "[%(asctime)s] %(levelname)s:%(name)s:%(message)s" logging.basicConfig(level=loglevel, stream=sys.stdout, format=logformat, datefmt="%Y-%m-%d %H:%M:%S")
228,782
Main entry point allowing external calls Args: args ([str]): command line parameter list
def main(args): args = parse_args(args) setup_logging(args.loglevel) _logger.debug("Starting crazy calculations...") print("The {}-th Fibonacci number is {}".format(args.n, fib(args.n))) _logger.info("Script ends here")
228,783
Convert a dataset in a local file (usually a CSV) into a Pandas DataFrame TODO: should be called read_named_dataset Args: `name` is assumed not to have an extension (like ".csv"), alternative extensions are tried automatically.file
def read_named_csv(name, data_path=DATA_PATH, nrows=None, verbose=True): if os.path.isfile(name): try: return read_json(name) except (IOError, UnicodeDecodeError, json.JSONDecodeError): pass try: return read_csv(name, nrows=nrows) except (IOError, pd.errors.ParserError): pass try: return read_txt(name, nrows=nrows) except (IOError, UnicodeDecodeError): pass data_path = expand_filepath(data_path) if os.path.isfile(os.path.join(data_path, name)): return read_csv(os.path.join(data_path, name), nrows=nrows) if name in DATASET_NAME2FILENAME: name = DATASET_NAME2FILENAME[name] if name.lower().endswith('.txt') or name.lower().endswith('.txt.gz'): return read_text(os.path.join(data_path, name), nrows=nrows) else: return read_csv(os.path.join(data_path, name), nrows=nrows) try: return read_csv(os.path.join(data_path, name + '.csv.gz'), nrows=nrows) except IOError: pass try: return read_csv(os.path.join(data_path, name + '.csv'), nrows=nrows) except IOError: pass try: return read_json(os.path.join(data_path, name + '.json')) except IOError: pass try: return read_txt(os.path.join(data_path, name + '.txt'), verbose=verbose) except IOError: pass # FIXME: mapping from short name to uncompressed filename # BIGDATA files are usually not loadable into dataframes try: return KeyedVectors.load_word2vec_format(os.path.join(BIGDATA_PATH, name + '.bin.gz'), binary=True) except IOError: pass except ValueError: pass try: return read_txt(os.path.join(BIGDATA_PATH, name + '.txt'), verbose=verbose) except IOError: pass
228,878
One hot encode the tokens Args: dataset list of lists of tokens char_indices dictionary of {key=character, value=index to use encoding vector} maxlen int Length of each sample Return: np array of shape (samples, tokens, encoding length)
def onehot_encode(dataset, char_indices, maxlen): X = np.zeros((len(dataset), maxlen, len(char_indices.keys()))) for i, sentence in enumerate(dataset): for t, char in enumerate(sentence): X[i, t, char_indices[char]] = 1 return X
228,913
Use bitly or similar minifier to shrink all URLs in text files within a folder structure. Used for the NLPIA manuscript directory for Manning Publishing bitly API: https://dev.bitly.com/links.html Args: path (str): Directory or file path ext (str): File name extension to filter text files by. default='.asc' output_ext (str): Extension to append to filenames of altered files default='' (in-place replacement of URLs) FIXME: NotImplementedError! Untested!
def minify_urls(filepath, ext='asc', url_regex=None, output_ext='.urls_minified', access_token=None): access_token = access_token or secrets.bitly.access_token output_ext = output_ext or '' url_regex = regex.compile(url_regex) if isinstance(url_regex, str) else url_regex filemetas = [] for filemeta in find_files(filepath, ext=ext): filemetas += [filemeta] altered_text = '' with open(filemeta['path'], 'rt') as fin: text = fin.read() end = 0 for match in url_regex.finditer(text): url = match.group() start = match.start() altered_text += text[:start] resp = requests.get('https://api-ssl.bitly.com/v3/shorten?access_token={}&longUrl={}'.format( access_token, url), allow_redirects=True, timeout=5) js = resp.json() short_url = js['shortUrl'] altered_text += short_url end = start + len(url) altered_text += text[end:] with open(filemeta['path'] + (output_ext or ''), 'wt') as fout: fout.write(altered_text) return altered_text
228,920
Count the words in a corpus and return a TfidfVectorizer() as well as all the TFIDF vecgtors for the corpus Args: docs (iterable of strs): a sequence of documents (strings) Returns: (TfidfVectorizer, tfidf_vectors)
def tfidf_corpus(docs=CORPUS): vectorizer = TfidfVectorizer() vectorizer = vectorizer.fit(docs) return vectorizer, vectorizer.transform(docs)
228,937
Setup basic logging Args: loglevel (int): minimum loglevel for emitting messages
def stdout_logging(loglevel=logging.INFO): logformat = "[%(asctime)s] %(levelname)s:%(name)s:%(lineno)d: %(message)s" logging.config.dictConfig(level=loglevel, stream=sys.stdout, format=logformat, datefmt="%Y-%m-%d %H:%M:%S")
228,956
Convert UTF8 characters in a CSV file or dataframe into ASCII Args: df (DataFrame or str): DataFrame or path or url to CSV
def clean_df(df, header=None, **read_csv_kwargs): df = read_csv(df, header=header, **read_csv_kwargs) df = df.fillna(' ') for col in df.columns: df[col] = df[col].apply(unicode2ascii) return df
228,980
Tap element identified by ``locator``. Args: - ``x_offset`` - (optional) x coordinate to tap, relative to the top left corner of the element. - ``y_offset`` - (optional) y coordinate. If y is used, x must also be set, and vice versa - ``count`` - can be used for multiple times of tap on that element
def tap(self, locator, x_offset=None, y_offset=None, count=1): driver = self._current_application() el = self._element_find(locator, True, True) action = TouchAction(driver) action.tap(el,x_offset,y_offset, count).perform()
229,138
Create a new C{SpiderCrawl}er. Args: protocol: A :class:`~kademlia.protocol.KademliaProtocol` instance. node: A :class:`~kademlia.node.Node` representing the key we're looking for peers: A list of :class:`~kademlia.node.Node` instances that provide the entry point for the network ksize: The value for k based on the paper alpha: The value for alpha based on the paper
def __init__(self, protocol, node, peers, ksize, alpha): self.protocol = protocol self.ksize = ksize self.alpha = alpha self.node = node self.nearest = NodeHeap(self.node, self.ksize) self.last_ids_crawled = [] log.info("creating spider with peers: %s", peers) self.nearest.push(peers)
229,463
Create a server instance. This will start listening on the given port. Args: ksize (int): The k parameter from the paper alpha (int): The alpha parameter from the paper node_id: The id for this node on the network. storage: An instance that implements :interface:`~kademlia.storage.IStorage`
def __init__(self, ksize=20, alpha=3, node_id=None, storage=None): self.ksize = ksize self.alpha = alpha self.storage = storage or ForgetfulStorage() self.node = Node(node_id or digest(random.getrandbits(255))) self.transport = None self.protocol = None self.refresh_loop = None self.save_state_loop = None
229,470
Bootstrap the server by connecting to other known nodes in the network. Args: addrs: A `list` of (ip, port) `tuple` pairs. Note that only IP addresses are acceptable - hostnames will cause an error.
async def bootstrap(self, addrs): log.debug("Attempting to bootstrap node with %i initial contacts", len(addrs)) cos = list(map(self.bootstrap_node, addrs)) gathered = await asyncio.gather(*cos) nodes = [node for node in gathered if node is not None] spider = NodeSpiderCrawl(self.protocol, self.node, nodes, self.ksize, self.alpha) return await spider.find()
229,477
Save the state of node with a given regularity to the given filename. Args: fname: File name to save retularly to frequency: Frequency in seconds that the state should be saved. By default, 10 minutes.
def save_state_regularly(self, fname, frequency=600): self.save_state(fname) loop = asyncio.get_event_loop() self.save_state_loop = loop.call_later(frequency, self.save_state_regularly, fname, frequency)
229,484
Render the template with the context given. Arguments: template_name_or_list: Template name to render of a list of possible template names. context: The variables to pass to the template.
async def render_template(template_name_or_list: Union[str, List[str]], **context: Any) -> str: await current_app.update_template_context(context) template = current_app.jinja_env.get_or_select_template(template_name_or_list) return await _render(template, context)
229,628
Render the template source with the context given. Arguments: source: The template source code. context: The variables to pass to the template.
async def render_template_string(source: str, **context: Any) -> str: await current_app.update_template_context(context) template = current_app.jinja_env.from_string(source) return await _render(template, context)
229,629
Create a Quart specific Jinja2 Environment. Arguments: app: The Quart app to bind to. options: The standard Jinja2 Environment options.
def __init__(self, app: 'Quart', **options: Any) -> None: if 'loader' not in options: options['loader'] = app.create_global_jinja_loader() options['enable_async'] = True super().__init__(**options)
229,632
Return a blueprint setup state instance. Arguments: first_registration: True if this is the first registration of this blueprint on the app. url_prefix: An optional prefix to all rules
def make_setup_state( self, app: 'Quart', first_registration: bool, *, url_prefix: Optional[str]=None, ) -> 'BlueprintSetupState': return BlueprintSetupState(self, app, first_registration, url_prefix=url_prefix)
229,677
Convert the multidict to a plain dictionary. Arguments: flat: If True only return the a value for each key, if False return all values as lists.
def to_dict(self, flat: bool=True) -> Dict[Any, Any]: if flat: return {key: value for key, value in self.items()} # type: ignore else: return {key: self.getall(key) for key in self}
229,687
Save the file to the destination. Arguments: destination: A filename (str) or file object to write to. buffer_size: Buffer size as used as length in :func:`shutil.copyfileobj`.
def save(self, destination: BinaryIO, buffer_size: int=16384) -> None: close_destination = False if isinstance(destination, str): destination = open(destination, 'wb') close_destination = True try: copyfileobj(self.stream, destination, buffer_size) finally: if close_destination: destination.close()
229,691
Create a request object. Arguments: path: The full unquoted path of the request. query_string: The raw bytes for the query string part. scheme: The scheme used for the request. headers: The request headers. subprotocols: The subprotocols requested. receive: Returns an awaitable of the current data accept: Idempotent callable to accept the websocket connection.
def __init__( self, path: str, query_string: bytes, scheme: str, headers: CIMultiDict, subprotocols: List[str], receive: Callable, send: Callable, accept: Callable, ) -> None: super().__init__('GET', scheme, path, query_string, headers) self._accept = accept self._receive = receive self._send = send self._subprotocols = subprotocols
229,747
Manually chose to accept the websocket connection. Arguments: headers: Additional headers to send with the acceptance response. subprotocol: The chosen subprotocol, optional.
async def accept( self, headers: Optional[Union[dict, CIMultiDict, Headers]] = None, subprotocol: Optional[str] = None, ) -> None: if headers is None: headers_ = Headers() else: headers_ = Headers(headers) await self._accept(headers_, subprotocol)
229,749
Update the provided template context. This adds additional context from the various template context processors. Arguments: context: The context to update (mutate).
async def update_template_context(self, context: dict) -> None: processors = self.template_context_processors[None] if has_request_context(): blueprint = _request_ctx_stack.top.request.blueprint if blueprint is not None and blueprint in self.template_context_processors: processors = chain(processors, self.template_context_processors[blueprint]) # type: ignore # noqa extra_context: dict = {} for processor in processors: extra_context.update(await processor()) original = context.copy() context.update(extra_context) context.update(original)
229,773
Register a function as an endpoint. This is designed to be used as a decorator. An example usage, .. code-block:: python @app.endpoint('name') def endpoint(): ... Arguments: endpoint: The endpoint name to use.
def endpoint(self, endpoint: str) -> Callable: def decorator(func: Callable) -> Callable: handler = ensure_coroutine(func) self.view_functions[endpoint] = handler return func return decorator
229,776
Register a function as an error handler. This is designed to be used on the application directly. An example usage, .. code-block:: python def error_handler(): return "Error", 500 app.register_error_handler(500, error_handler) Arguments: error: The error code or Exception to handle. func: The function to handle the error. name: Optional blueprint key name.
def register_error_handler( self, error: Union[Type[Exception], int], func: Callable, name: AppOrBlueprintKey=None, ) -> None: handler = ensure_coroutine(func) if isinstance(error, int): error = all_http_exceptions[error] self.error_handler_spec[name][error] = handler
229,777
Add a template filter. This is designed to be used as a decorator. An example usage, .. code-block:: python @app.template_filter('name') def to_upper(value): return value.upper() Arguments: name: The filter name (defaults to function name).
def template_filter(self, name: Optional[str]=None) -> Callable: def decorator(func: Callable) -> Callable: self.add_template_filter(func, name=name) return func return decorator
229,778
Add a template filter. This is designed to be used on the application directly. An example usage, .. code-block:: python def to_upper(value): return value.upper() app.add_template_filter(to_upper) Arguments: func: The function that is the filter. name: The filter name (defaults to function name).
def add_template_filter(self, func: Callable, name: Optional[str]=None) -> None: self.jinja_env.filters[name or func.__name__] = func
229,779
Add a template test. This is designed to be used as a decorator. An example usage, .. code-block:: python @app.template_test('name') def is_upper(value): return value.isupper() Arguments: name: The test name (defaults to function name).
def template_test(self, name: Optional[str]=None) -> Callable: def decorator(func: Callable) -> Callable: self.add_template_test(func, name=name) return func return decorator
229,780
Add a template test. This is designed to be used on the application directly. An example usage, .. code-block:: python def is_upper(value): return value.isupper() app.add_template_test(is_upper) Arguments: func: The function that is the test. name: The test name (defaults to function name).
def add_template_test(self, func: Callable, name: Optional[str]=None) -> None: self.jinja_env.tests[name or func.__name__] = func
229,781
Add a template global. This is designed to be used as a decorator. An example usage, .. code-block:: python @app.template_global('name') def five(): return 5 Arguments: name: The global name (defaults to function name).
def template_global(self, name: Optional[str]=None) -> Callable: def decorator(func: Callable) -> Callable: self.add_template_global(func, name=name) return func return decorator
229,782
Add a template global. This is designed to be used on the application directly. An example usage, .. code-block:: python def five(): return 5 app.add_template_global(five) Arguments: func: The function that is the global. name: The global name (defaults to function name).
def add_template_global(self, func: Callable, name: Optional[str]=None) -> None: self.jinja_env.globals[name or func.__name__] = func
229,783
Add a before request function. This is designed to be used as a decorator. An example usage, .. code-block:: python @app.before_request def func(): ... Arguments: func: The before request function itself. name: Optional blueprint key name.
def before_request(self, func: Callable, name: AppOrBlueprintKey=None) -> Callable: handler = ensure_coroutine(func) self.before_request_funcs[name].append(handler) return func
229,796
Add a before websocket function. This is designed to be used as a decorator. An example usage, .. code-block:: python @app.before_websocket def func(): ... Arguments: func: The before websocket function itself. name: Optional blueprint key name.
def before_websocket(self, func: Callable, name: AppOrBlueprintKey=None) -> Callable: handler = ensure_coroutine(func) self.before_websocket_funcs[name].append(handler) return func
229,797
Add a before **first** request function. This is designed to be used as a decorator. An example usage, .. code-block:: python @app.before_first_request def func(): ... Arguments: func: The before first request function itself. name: Optional blueprint key name.
def before_first_request(self, func: Callable, name: AppOrBlueprintKey=None) -> Callable: handler = ensure_coroutine(func) self.before_first_request_funcs.append(handler) return func
229,798
Add a before serving function. This will allow the function provided to be called once before anything is served (before any byte is received). This is designed to be used as a decorator. An example usage, .. code-block:: python @app.before_serving def func(): ... Arguments: func: The function itself.
def before_serving(self, func: Callable) -> Callable: handler = ensure_coroutine(func) self.before_serving_funcs.append(handler) return func
229,799
Add an after request function. This is designed to be used as a decorator. An example usage, .. code-block:: python @app.after_request def func(response): return response Arguments: func: The after request function itself. name: Optional blueprint key name.
def after_request(self, func: Callable, name: AppOrBlueprintKey=None) -> Callable: handler = ensure_coroutine(func) self.after_request_funcs[name].append(handler) return func
229,800
Add an after websocket function. This is designed to be used as a decorator. An example usage, .. code-block:: python @app.after_websocket def func(response): return response Arguments: func: The after websocket function itself. name: Optional blueprint key name.
def after_websocket(self, func: Callable, name: AppOrBlueprintKey=None) -> Callable: handler = ensure_coroutine(func) self.after_websocket_funcs[name].append(handler) return func
229,801
Add a after serving function. This will allow the function provided to be called once after anything is served (after last byte is sent). This is designed to be used as a decorator. An example usage, .. code-block:: python @app.after_serving def func(): ... Arguments: func: The function itself.
def after_serving(self, func: Callable) -> Callable: handler = ensure_coroutine(func) self.after_serving_funcs.append(handler) return func
229,802
Add a teardown request function. This is designed to be used as a decorator. An example usage, .. code-block:: python @app.teardown_request def func(): ... Arguments: func: The teardown request function itself. name: Optional blueprint key name.
def teardown_request(self, func: Callable, name: AppOrBlueprintKey=None) -> Callable: handler = ensure_coroutine(func) self.teardown_request_funcs[name].append(handler) return func
229,803
Add a teardown websocket function. This is designed to be used as a decorator. An example usage, .. code-block:: python @app.teardown_websocket def func(): ... Arguments: func: The teardown websocket function itself. name: Optional blueprint key name.
def teardown_websocket(self, func: Callable, name: AppOrBlueprintKey=None) -> Callable: handler = ensure_coroutine(func) self.teardown_websocket_funcs[name].append(handler) return func
229,804
Add a teardown app (context) function. This is designed to be used as a decorator. An example usage, .. code-block:: python @app.teardown_appcontext def func(): ... Arguments: func: The teardown function itself. name: Optional blueprint key name.
def teardown_appcontext(self, func: Callable) -> Callable: handler = ensure_coroutine(func) self.teardown_appcontext_funcs.append(handler) return func
229,805
Register a blueprint on the app. This results in the blueprint's routes, error handlers etc... being added to the app. Arguments: blueprint: The blueprint to register. url_prefix: Optional prefix to apply to all paths.
def register_blueprint(self, blueprint: Blueprint, url_prefix: Optional[str]=None) -> None: first_registration = False if blueprint.name in self.blueprints and self.blueprints[blueprint.name] is not blueprint: raise RuntimeError( f"Blueprint name '{blueprint.name}' " f"is already registered by {self.blueprints[blueprint.name]}. " "Blueprints must have unique names", ) else: self.blueprints[blueprint.name] = blueprint first_registration = True blueprint.register(self, first_registration, url_prefix=url_prefix)
229,806
Teardown the request, calling the teardown functions. Arguments: exc: Any exception not handled that has caused the request to teardown. request_context: The request context, optional as Flask omits this argument.
async def do_teardown_request( self, exc: Optional[BaseException], request_context: Optional[RequestContext]=None, ) -> None: request_ = (request_context or _request_ctx_stack.top).request functions = self.teardown_request_funcs[None] blueprint = request_.blueprint if blueprint is not None: functions = chain(functions, self.teardown_request_funcs[blueprint]) # type: ignore for function in functions: await function(exc=exc) await request_tearing_down.send(self, exc=exc)
229,809
Teardown the websocket, calling the teardown functions. Arguments: exc: Any exception not handled that has caused the websocket to teardown. websocket_context: The websocket context, optional as Flask omits this argument.
async def do_teardown_websocket( self, exc: Optional[BaseException], websocket_context: Optional[WebsocketContext]=None, ) -> None: websocket_ = (websocket_context or _websocket_ctx_stack.top).websocket functions = self.teardown_websocket_funcs[None] blueprint = websocket_.blueprint if blueprint is not None: functions = chain(functions, self.teardown_websocket_funcs[blueprint]) # type: ignore for function in functions: await function(exc=exc) await websocket_tearing_down.send(self, exc=exc)
229,810
Adds pre and post processing to the request dispatching. Arguments: request_context: The request context, optional as Flask omits this argument.
async def full_dispatch_request( self, request_context: Optional[RequestContext]=None, ) -> Response: await self.try_trigger_before_first_request_functions() await request_started.send(self) try: result = await self.preprocess_request(request_context) if result is None: result = await self.dispatch_request(request_context) except Exception as error: result = await self.handle_user_exception(error) return await self.finalize_request(result, request_context)
229,818
Preprocess the request i.e. call before_request functions. Arguments: request_context: The request context, optional as Flask omits this argument.
async def preprocess_request( self, request_context: Optional[RequestContext]=None, ) -> Optional[ResponseReturnValue]: request_ = (request_context or _request_ctx_stack.top).request blueprint = request_.blueprint processors = self.url_value_preprocessors[None] if blueprint is not None: processors = chain(processors, self.url_value_preprocessors[blueprint]) # type: ignore for processor in processors: processor(request.endpoint, request.view_args) functions = self.before_request_funcs[None] if blueprint is not None: functions = chain(functions, self.before_request_funcs[blueprint]) # type: ignore for function in functions: result = await function() if result is not None: return result return None
229,819
Dispatch the request to the view function. Arguments: request_context: The request context, optional as Flask omits this argument.
async def dispatch_request( self, request_context: Optional[RequestContext]=None, ) -> ResponseReturnValue: request_ = (request_context or _request_ctx_stack.top).request if request_.routing_exception is not None: raise request_.routing_exception if request_.method == 'OPTIONS' and request_.url_rule.provide_automatic_options: return await self.make_default_options_response() handler = self.view_functions[request_.url_rule.endpoint] return await handler(**request_.view_args)
229,820
Turns the view response return value into a response. Arguments: result: The result of the request to finalize into a response. request_context: The request context, optional as Flask omits this argument.
async def finalize_request( self, result: ResponseReturnValue, request_context: Optional[RequestContext]=None, from_error_handler: bool=False, ) -> Response: response = await self.make_response(result) try: response = await self.process_response(response, request_context) await request_finished.send(self, response=response) except Exception: if not from_error_handler: raise self.logger.exception('Request finalizing errored') return response
229,821
Postprocess the request acting on the response. Arguments: response: The response after the request is finalized. request_context: The request context, optional as Flask omits this argument.
async def process_response( self, response: Response, request_context: Optional[RequestContext]=None, ) -> Response: request_ = (request_context or _request_ctx_stack.top).request functions = (request_context or _request_ctx_stack.top)._after_request_functions blueprint = request_.blueprint if blueprint is not None: functions = chain(functions, self.after_request_funcs[blueprint]) functions = chain(functions, self.after_request_funcs[None]) for function in functions: response = await function(response) session_ = (request_context or _request_ctx_stack.top).session if not self.session_interface.is_null_session(session_): await self.save_session(session_, response) return response
229,822
Adds pre and post processing to the websocket dispatching. Arguments: websocket_context: The websocket context, optional to match the Flask convention.
async def full_dispatch_websocket( self, websocket_context: Optional[WebsocketContext]=None, ) -> Optional[Response]: await self.try_trigger_before_first_request_functions() await websocket_started.send(self) try: result = await self.preprocess_websocket(websocket_context) if result is None: result = await self.dispatch_websocket(websocket_context) except Exception as error: result = await self.handle_user_exception(error) return await self.finalize_websocket(result, websocket_context)
229,824
Preprocess the websocket i.e. call before_websocket functions. Arguments: websocket_context: The websocket context, optional as Flask omits this argument.
async def preprocess_websocket( self, websocket_context: Optional[WebsocketContext]=None, ) -> Optional[ResponseReturnValue]: websocket_ = (websocket_context or _websocket_ctx_stack.top).websocket blueprint = websocket_.blueprint processors = self.url_value_preprocessors[None] if blueprint is not None: processors = chain(processors, self.url_value_preprocessors[blueprint]) # type: ignore for processor in processors: processor(websocket_.endpoint, websocket_.view_args) functions = self.before_websocket_funcs[None] if blueprint is not None: functions = chain(functions, self.before_websocket_funcs[blueprint]) # type: ignore for function in functions: result = await function() if result is not None: return result return None
229,825
Dispatch the websocket to the view function. Arguments: websocket_context: The websocket context, optional to match the Flask convention.
async def dispatch_websocket( self, websocket_context: Optional[WebsocketContext]=None, ) -> None: websocket_ = (websocket_context or _websocket_ctx_stack.top).websocket if websocket_.routing_exception is not None: raise websocket_.routing_exception handler = self.view_functions[websocket_.url_rule.endpoint] return await handler(**websocket_.view_args)
229,826
Turns the view response return value into a response. Arguments: result: The result of the websocket to finalize into a response. websocket_context: The websocket context, optional as Flask omits this argument.
async def finalize_websocket( self, result: ResponseReturnValue, websocket_context: Optional[WebsocketContext]=None, from_error_handler: bool=False, ) -> Optional[Response]: if result is not None: response = await self.make_response(result) else: response = None try: response = await self.postprocess_websocket(response, websocket_context) await websocket_finished.send(self, response=response) except Exception: if not from_error_handler: raise self.logger.exception('Request finalizing errored') return response
229,827
Postprocess the websocket acting on the response. Arguments: response: The response after the websocket is finalized. webcoket_context: The websocket context, optional as Flask omits this argument.
async def postprocess_websocket( self, response: Optional[Response], websocket_context: Optional[WebsocketContext]=None, ) -> Response: websocket_ = (websocket_context or _websocket_ctx_stack.top).websocket functions = (websocket_context or _websocket_ctx_stack.top)._after_websocket_functions blueprint = websocket_.blueprint if blueprint is not None: functions = chain(functions, self.after_websocket_funcs[blueprint]) functions = chain(functions, self.after_websocket_funcs[None]) for function in functions: response = await function(response) session_ = (websocket_context or _request_ctx_stack.top).session if not self.session_interface.is_null_session(session_): if response is None and isinstance(session_, SecureCookieSession) and session_.modified: self.logger.exception( "Secure Cookie Session modified during websocket handling. " "These modifications will be lost as a cookie cannot be set." ) else: await self.save_session(session_, response) return response
229,828
Make the response conditional to the Arguments: request_range: The range as requested by the request. max_partial_size: The maximum length the server is willing to serve in a single response. Defaults to unlimited.
async def make_conditional( self, request_range: Range, max_partial_size: Optional[int]=None, ) -> None: self.accept_ranges = "bytes" # Advertise this ability if len(request_range.ranges) == 0: # Not a conditional request return if request_range.units != "bytes" or len(request_range.ranges) > 1: from ..exceptions import RequestRangeNotSatisfiable raise RequestRangeNotSatisfiable() begin, end = request_range.ranges[0] try: complete_length = await self.response.make_conditional( # type: ignore begin, end, max_partial_size, ) except AttributeError: self.response = self.data_body_class(await self.response.convert_to_sequence()) return await self.make_conditional(request_range, max_partial_size) else: self.content_length = self.response.end - self.response.begin # type: ignore if self.content_length != complete_length: self.content_range = ContentRange( request_range.units, self.response.begin, self.response.end - 1, # type: ignore complete_length, ) self.status_code = 206
229,893
Parses the body data as JSON and returns it. Arguments: force: Force JSON parsing even if the mimetype is not JSON. silent: Do not trigger error handling if parsing fails, without this the :meth:`on_json_loading_failed` will be called on error. cache: Cache the parsed JSON on this request object.
async def get_json( self, force: bool=False, silent: bool=False, cache: bool=True, ) -> Any: if cache and self._cached_json is not sentinel: return self._cached_json if not (force or self.is_json): return None data = await self._load_json_data() try: result = loads(data) except ValueError as error: if silent: result = None else: self.on_json_loading_failed(error) if cache: self._cached_json = result return result
229,946
Create a request or websocket base object. Arguments: method: The HTTP verb. scheme: The scheme used for the request. path: The full unquoted path of the request. query_string: The raw bytes for the query string part. headers: The request headers. Attributes: args: The query string arguments. scheme: The URL scheme, http or https.
def __init__( self, method: str, scheme: str, path: str, query_string: bytes, headers: CIMultiDict, ) -> None: super().__init__(headers) self.args = MultiDict() for key, values in parse_qs(query_string.decode('ascii'), keep_blank_values=True).items(): for value in values: self.args.add(key, value) self.path = path self.query_string = query_string self.scheme = scheme self.method = method
229,949
Load the configuration from a Python object. This can be used to reference modules or objects within modules for example, .. code-block:: python app.config.from_object('module') app.config.from_object('module.instance') from module import instance app.config.from_object(instance) are valid. Arguments: instance: Either a str referencing a python object or the object itself.
def from_object(self, instance: Union[object, str]) -> None: if isinstance(instance, str): try: path, config = instance.rsplit('.', 1) except ValueError: path = instance instance = importlib.import_module(path) else: module = importlib.import_module(path) instance = getattr(module, config) for key in dir(instance): if key.isupper(): self[key] = getattr(instance, key)
229,969
Load the configuration values from a JSON formatted file. This allows configuration to be loaded as so .. code-block:: python app.config.from_json('config.json') Arguments: filename: The filename which when appended to :attr:`root_path` gives the path to the file. silent: If True any errors will fail silently.
def from_json(self, filename: str, silent: bool=False) -> None: file_path = self.root_path / filename try: with open(file_path) as file_: data = json.loads(file_.read()) except (FileNotFoundError, IsADirectoryError): if not silent: raise else: self.from_mapping(data)
229,970
Load the configuration values from a mapping. This allows either a mapping to be directly passed or as keyword arguments, for example, .. code-block:: python config = {'FOO': 'bar'} app.config.from_mapping(config) app.config.form_mapping(FOO='bar') Arguments: mapping: Optionally a mapping object. kwargs: Optionally a collection of keyword arguments to form a mapping.
def from_mapping(self, mapping: Optional[Mapping[str, Any]]=None, **kwargs: Any) -> None: mappings: Dict[str, Any] = {} if mapping is not None: mappings.update(mapping) mappings.update(kwargs) for key, value in mappings.items(): if key.isupper(): self[key] = value
229,971
Send a file from a given directory. Arguments: directory: Directory that when combined with file_name gives the file path. file_name: File name that when combined with directory gives the file path. See :func:`send_file` for the other arguments.
async def send_from_directory( directory: FilePath, file_name: str, *, mimetype: Optional[str]=None, as_attachment: bool=False, attachment_filename: Optional[str]=None, add_etags: bool=True, cache_timeout: Optional[int]=None, conditional: bool=True, last_modified: Optional[datetime]=None, ) -> Response: file_path = safe_join(directory, file_name) if not file_path.is_file(): raise NotFound() return await send_file( file_path, mimetype=mimetype, as_attachment=as_attachment, attachment_filename=attachment_filename, add_etags=add_etags, cache_timeout=cache_timeout, conditional=conditional, last_modified=last_modified, )
230,003
Construct ellipse coordinates from two arrays of numbers. Args: X (1D array_like) Y (1D array_like) Returns: float: The mean of `X`. float: The mean of `Y`. float: The width of the ellipse. float: The height of the ellipse. float: The angle of orientation of the ellipse.
def build_ellipse(X, Y): x_mean = np.mean(X) y_mean = np.mean(Y) cov_matrix = np.cov(np.vstack((X, Y))) U, s, V = linalg.svd(cov_matrix, full_matrices=False) chi_95 = np.sqrt(4.61) # 90% quantile of the chi-square distribution width = np.sqrt(cov_matrix[0][0]) * chi_95 * 2 height = np.sqrt(cov_matrix[1][1]) * chi_95 * 2 eigenvector = V.T[0] angle = np.arctan(eigenvector[1] / eigenvector[0]) return x_mean, y_mean, width, height, angle
230,663
Mix colors. Args: ca (gdk.RGBA): first color cb (gdk.RGBA): second color xb (float): between 0.0 and 1.0 Return: gdk.RGBA: linear interpolation between ca and cb, 0 or 1 return the unaltered 1st or 2nd color respectively, as in CSS.
def mix(self, ca, cb, xb): r = (1 - xb) * ca.red + xb * cb.red g = (1 - xb) * ca.green + xb * cb.green b = (1 - xb) * ca.blue + xb * cb.blue a = (1 - xb) * ca.alpha + xb * cb.alpha return gdk.RGBA(red=r, green=g, blue=b, alpha=a)
231,406