Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
BlockifyUI.start
(self)
Start blockify and the main update routine.
Start blockify and the main update routine.
def start(self): "Start blockify and the main update routine." # Try to find a Spotify process in the current DBus session. self.connect_dbus() # Load the blocklist, start blockify, trap some signals and unmute. blocklist = blockify.Blocklist() self.b = blockify.Blockify(blocklist) self.bind_signals() self.b.toggle_mute() # Start and loop the main update routine once every 250ms. # To influence responsiveness or CPU usage, decrease/increase ms here. glib.timeout_add(self.update_interval, self.update) log.info("Blockify-UI started.")
[ "def", "start", "(", "self", ")", ":", "# Try to find a Spotify process in the current DBus session.", "self", ".", "connect_dbus", "(", ")", "# Load the blocklist, start blockify, trap some signals and unmute.", "blocklist", "=", "blockify", ".", "Blocklist", "(", ")", "self", ".", "b", "=", "blockify", ".", "Blockify", "(", "blocklist", ")", "self", ".", "bind_signals", "(", ")", "self", ".", "b", ".", "toggle_mute", "(", ")", "# Start and loop the main update routine once every 250ms.", "# To influence responsiveness or CPU usage, decrease/increase ms here.", "glib", ".", "timeout_add", "(", "self", ".", "update_interval", ",", "self", ".", "update", ")", "log", ".", "info", "(", "\"Blockify-UI started.\"", ")" ]
[ 301, 4 ]
[ 314, 40 ]
python
en
['en', 'en', 'en']
True
BlockifyUI.bind_signals
(self)
Binds SIGTERM, SIGINT and SIGUSR1 to custom actions.
Binds SIGTERM, SIGINT and SIGUSR1 to custom actions.
def bind_signals(self): "Binds SIGTERM, SIGINT and SIGUSR1 to custom actions." signal.signal(signal.SIGUSR1, lambda sig, hdl: self.b.block_current()) signal.signal(signal.SIGUSR2, lambda sig, hdl: self.b.unblock_current()) signal.signal(signal.SIGTERM, lambda sig, hdl: self.stop()) signal.signal(signal.SIGINT, lambda sig, hdl: self.stop())
[ "def", "bind_signals", "(", "self", ")", ":", "signal", ".", "signal", "(", "signal", ".", "SIGUSR1", ",", "lambda", "sig", ",", "hdl", ":", "self", ".", "b", ".", "block_current", "(", ")", ")", "signal", ".", "signal", "(", "signal", ".", "SIGUSR2", ",", "lambda", "sig", ",", "hdl", ":", "self", ".", "b", ".", "unblock_current", "(", ")", ")", "signal", ".", "signal", "(", "signal", ".", "SIGTERM", ",", "lambda", "sig", ",", "hdl", ":", "self", ".", "stop", "(", ")", ")", "signal", ".", "signal", "(", "signal", ".", "SIGINT", ",", "lambda", "sig", ",", "hdl", ":", "self", ".", "stop", "(", ")", ")" ]
[ 316, 4 ]
[ 321, 66 ]
python
en
['en', 'en', 'en']
True
BlockifyUI.stop
(self, *args)
Cleanly shut down, unmuting sound and saving the blocklist.
Cleanly shut down, unmuting sound and saving the blocklist.
def stop(self, *args): "Cleanly shut down, unmuting sound and saving the blocklist." self.b.stop() log.debug("Exiting GUI.") gtk.main_quit()
[ "def", "stop", "(", "self", ",", "*", "args", ")", ":", "self", ".", "b", ".", "stop", "(", ")", "log", ".", "debug", "(", "\"Exiting GUI.\"", ")", "gtk", ".", "main_quit", "(", ")" ]
[ 323, 4 ]
[ 327, 23 ]
python
en
['en', 'en', 'en']
True
KeyValueStorage.get
(self, key)
Get a value identified by the given key :param key: The unique identifier :return: The value identified by key or None if no value was found
Get a value identified by the given key
def get(self, key): """ Get a value identified by the given key :param key: The unique identifier :return: The value identified by key or None if no value was found """ raise NotImplementedError
[ "def", "get", "(", "self", ",", "key", ")", ":", "raise", "NotImplementedError" ]
[ 10, 4 ]
[ 18, 33 ]
python
en
['en', 'error', 'th']
False
KeyValueStorage.set
(self, key, value)
Store the value identified by the key :param key: The unique identifier :param value: Value to store :return: bool True on success or False on failure
Store the value identified by the key
def set(self, key, value): """ Store the value identified by the key :param key: The unique identifier :param value: Value to store :return: bool True on success or False on failure """ raise NotImplementedError
[ "def", "set", "(", "self", ",", "key", ",", "value", ")", ":", "raise", "NotImplementedError" ]
[ 21, 4 ]
[ 30, 33 ]
python
en
['en', 'error', 'th']
False
KeyValueStorage.delete
(self, key)
Deletes item by key :param key: The unique identifier :return: bool True on success or False on failure
Deletes item by key
def delete(self, key): """ Deletes item by key :param key: The unique identifier :return: bool True on success or False on failure """ raise NotImplementedError
[ "def", "delete", "(", "self", ",", "key", ")", ":", "raise", "NotImplementedError" ]
[ 33, 4 ]
[ 41, 33 ]
python
en
['en', 'error', 'th']
False
KeyValueStorage.clear
(self)
Clears all entries :return: bool True on success or False on failure
Clears all entries
def clear(self): """ Clears all entries :return: bool True on success or False on failure """ raise NotImplementedError
[ "def", "clear", "(", "self", ")", ":", "raise", "NotImplementedError" ]
[ 44, 4 ]
[ 50, 33 ]
python
en
['en', 'error', 'th']
False
Bytecode_compat.__iter__
(self)
Yield '(op,arg)' pair for each operation in code object 'code
Yield '(op,arg)' pair for each operation in code object 'code
def __iter__(self): """Yield '(op,arg)' pair for each operation in code object 'code'""" bytes = array.array('b', self.code.co_code) eof = len(self.code.co_code) ptr = 0 extended_arg = 0 while ptr < eof: op = bytes[ptr] if op >= dis.HAVE_ARGUMENT: arg = bytes[ptr + 1] + bytes[ptr + 2] * 256 + extended_arg ptr += 3 if op == dis.EXTENDED_ARG: long_type = six.integer_types[-1] extended_arg = arg * long_type(65536) continue else: arg = None ptr += 1 yield OpArg(op, arg)
[ "def", "__iter__", "(", "self", ")", ":", "bytes", "=", "array", ".", "array", "(", "'b'", ",", "self", ".", "code", ".", "co_code", ")", "eof", "=", "len", "(", "self", ".", "code", ".", "co_code", ")", "ptr", "=", "0", "extended_arg", "=", "0", "while", "ptr", "<", "eof", ":", "op", "=", "bytes", "[", "ptr", "]", "if", "op", ">=", "dis", ".", "HAVE_ARGUMENT", ":", "arg", "=", "bytes", "[", "ptr", "+", "1", "]", "+", "bytes", "[", "ptr", "+", "2", "]", "*", "256", "+", "extended_arg", "ptr", "+=", "3", "if", "op", "==", "dis", ".", "EXTENDED_ARG", ":", "long_type", "=", "six", ".", "integer_types", "[", "-", "1", "]", "extended_arg", "=", "arg", "*", "long_type", "(", "65536", ")", "continue", "else", ":", "arg", "=", "None", "ptr", "+=", "1", "yield", "OpArg", "(", "op", ",", "arg", ")" ]
[ 20, 4 ]
[ 47, 32 ]
python
en
['en', 'en', 'en']
True
get_action_mapper
(name: str)
Get action mapper instance by name.
Get action mapper instance by name.
def get_action_mapper(name: str) -> 'ActionMapper': """Get action mapper instance by name.""" return ACTION_MAPPERS[name]()
[ "def", "get_action_mapper", "(", "name", ":", "str", ")", "->", "'ActionMapper'", ":", "return", "ACTION_MAPPERS", "[", "name", "]", "(", ")" ]
[ 43, 0 ]
[ 45, 33 ]
python
en
['en', 'en', 'en']
True
_compute_relations
(p: np.ndarray, p1: np.ndarray, p2: np.ndarray)
Compute relation between vector p1->p2 and point p. Returns distance from p1->p2 to p and whether p between p1 and p2.
Compute relation between vector p1->p2 and point p.
def _compute_relations(p: np.ndarray, p1: np.ndarray, p2: np.ndarray) -> Tuple[float, bool]: """Compute relation between vector p1->p2 and point p. Returns distance from p1->p2 to p and whether p between p1 and p2. """ vector = p2 - p1 vector_len = (vector * vector).sum()**.5 proj_len = ((p - p1) * (p2 - p1)).sum() / vector_len between = 0 <= proj_len <= vector_len perp = p - p1 - vector * (proj_len / vector_len) return (perp * perp).sum()**.5, between
[ "def", "_compute_relations", "(", "p", ":", "np", ".", "ndarray", ",", "p1", ":", "np", ".", "ndarray", ",", "p2", ":", "np", ".", "ndarray", ")", "->", "Tuple", "[", "float", ",", "bool", "]", ":", "vector", "=", "p2", "-", "p1", "vector_len", "=", "(", "vector", "*", "vector", ")", ".", "sum", "(", ")", "**", ".5", "proj_len", "=", "(", "(", "p", "-", "p1", ")", "*", "(", "p2", "-", "p1", ")", ")", ".", "sum", "(", ")", "/", "vector_len", "between", "=", "0", "<=", "proj_len", "<=", "vector_len", "perp", "=", "p", "-", "p1", "-", "vector", "*", "(", "proj_len", "/", "vector_len", ")", "return", "(", "perp", "*", "perp", ")", ".", "sum", "(", ")", "**", ".5", ",", "between" ]
[ 266, 0 ]
[ 277, 43 ]
python
en
['en', 'en', 'en']
True
ActionMapper.action_to_user_input
(self, action: GeneralizedAction )
Converts actions to points and is_valid flags. Args: action: A list or an array representing a single action Returns: A pair (user_input, is_valid). * user_input: scene_if.User that corresponds to the action. * is_valid: a boolean flag indicating whether the action is in valid range, i.e., a ball is withing the scene. Note that if the action is not valid, the function should return empty point_list
Converts actions to points and is_valid flags.
def action_to_user_input(self, action: GeneralizedAction ) -> Tuple[scene_if.UserInput, bool]: """Converts actions to points and is_valid flags. Args: action: A list or an array representing a single action Returns: A pair (user_input, is_valid). * user_input: scene_if.User that corresponds to the action. * is_valid: a boolean flag indicating whether the action is in valid range, i.e., a ball is withing the scene. Note that if the action is not valid, the function should return empty point_list """
[ "def", "action_to_user_input", "(", "self", ",", "action", ":", "GeneralizedAction", ")", "->", "Tuple", "[", "scene_if", ".", "UserInput", ",", "bool", "]", ":" ]
[ 92, 4 ]
[ 107, 11 ]
python
en
['en', 'en', 'en']
True
_keep
(window, windows)
Helper function for creating rolling windows.
Helper function for creating rolling windows.
def _keep(window, windows): """Helper function for creating rolling windows.""" windows.append(window.copy()) return -1.
[ "def", "_keep", "(", "window", ",", "windows", ")", ":", "windows", ".", "append", "(", "window", ".", "copy", "(", ")", ")", "return", "-", "1." ]
[ 14, 0 ]
[ 17, 12 ]
python
en
['en', 'en', 'en']
True
create_rolling_features_label
(series, window_size, pred_offset, pred_n=1)
Computes rolling window of the series and creates rolling window of label. Args: series: A Pandas Series. The indices are datetimes and the values are numeric type. window_size: integer; steps of historical data to use for features. pred_offset: integer; steps into the future for prediction. pred_n: integer; window size of label. Returns: Pandas dataframe where the index is the datetime predicting at. The columns beginning with "-" indicate windows N steps before the prediction time. Examples: >>> series = pd.Series(np.random.random(6), index=pd.date_range(start='1/1/2018', end='1/06/2018')) # Example #1: >>> series 2018-01-01 0.803948 2018-01-02 0.269849 2018-01-03 0.971984 2018-01-04 0.809718 2018-01-05 0.324454 2018-01-06 0.229447 >>> window_size = 3 # get 3 months of historical data >>> pred_offset = 1 # predict starting next month >>> pred_n = 1 # for predicting a single month >>> utils.create_rolling_features_label(series, window_size, pred_offset, pred_n) pred_datetime -3_steps -2_steps -1_steps label 2018-01-04 0.803948 0.269849 0.971984 0.809718 2018-01-05 0.269849 0.971984 0.809718 0.324454 2018-01-06 0.971984 0.809718 0.324454 0.229447 # Example #2: >>> window_size = 3 # get 3 months of historical data >>> pred_offset = 2 # predict starting 2 months into future >>> pred_n = 1 # for predicting a single month >>> utils.create_rolling_features_label(series, window_size, pred_offset, pred_n) pred_datetime -4_steps -3_steps -2_steps label 2018-01-05 0.803948 0.269849 0.971984 0.324454 2018-01-06 0.269849 0.971984 0.809718 0.229447 # Example #3: >>> window_size = 3 # get 3 months of historical data >>> pred_offset = 1 # predict starting next month >>> pred_n = 2 # for predicting a multiple months >>> utils.create_rolling_features_label(series, window_size, pred_offset, pred_n) pred_datetime -3_steps -2_steps -1_steps label_0_steps label_1_steps 2018-01-04 0.803948 0.269849 0.971984 0.809718 0.324454 2018-01-05 0.269849 0.971984 0.809718 0.324454 0.229447
Computes rolling window of the series and creates rolling window of label.
def create_rolling_features_label(series, window_size, pred_offset, pred_n=1): """Computes rolling window of the series and creates rolling window of label. Args: series: A Pandas Series. The indices are datetimes and the values are numeric type. window_size: integer; steps of historical data to use for features. pred_offset: integer; steps into the future for prediction. pred_n: integer; window size of label. Returns: Pandas dataframe where the index is the datetime predicting at. The columns beginning with "-" indicate windows N steps before the prediction time. Examples: >>> series = pd.Series(np.random.random(6), index=pd.date_range(start='1/1/2018', end='1/06/2018')) # Example #1: >>> series 2018-01-01 0.803948 2018-01-02 0.269849 2018-01-03 0.971984 2018-01-04 0.809718 2018-01-05 0.324454 2018-01-06 0.229447 >>> window_size = 3 # get 3 months of historical data >>> pred_offset = 1 # predict starting next month >>> pred_n = 1 # for predicting a single month >>> utils.create_rolling_features_label(series, window_size, pred_offset, pred_n) pred_datetime -3_steps -2_steps -1_steps label 2018-01-04 0.803948 0.269849 0.971984 0.809718 2018-01-05 0.269849 0.971984 0.809718 0.324454 2018-01-06 0.971984 0.809718 0.324454 0.229447 # Example #2: >>> window_size = 3 # get 3 months of historical data >>> pred_offset = 2 # predict starting 2 months into future >>> pred_n = 1 # for predicting a single month >>> utils.create_rolling_features_label(series, window_size, pred_offset, pred_n) pred_datetime -4_steps -3_steps -2_steps label 2018-01-05 0.803948 0.269849 0.971984 0.324454 2018-01-06 0.269849 0.971984 0.809718 0.229447 # Example #3: >>> window_size = 3 # get 3 months of historical data >>> pred_offset = 1 # predict starting next month >>> pred_n = 2 # for predicting a multiple months >>> utils.create_rolling_features_label(series, window_size, pred_offset, pred_n) pred_datetime -3_steps -2_steps -1_steps label_0_steps label_1_steps 2018-01-04 0.803948 0.269849 0.971984 0.809718 0.324454 2018-01-05 0.269849 0.971984 0.809718 0.324454 0.229447 """ if series.isnull().sum() > 0: raise ValueError('Series must not contain missing values.') if pred_n < 1: raise ValueError('pred_n must not be < 1.') if len(series) < (window_size + pred_offset + pred_n): raise ValueError('window_size + pred_offset + pred_n must not be greater ' 'than series length.') total_steps = len(series) def compute_rolling_window(series, window_size): # Accumulate series into list. windows = [] series.rolling(window_size)\ .apply(_keep, args=(windows,)) return np.array(windows) features_start = 0 features_end = total_steps - (pred_offset - 1) - pred_n historical_windows = compute_rolling_window( series[features_start:features_end], window_size) # Get label pred_offset steps into the future. label_start, label_end = window_size + pred_offset - 1, total_steps label_series = series[label_start:label_end] y = compute_rolling_window(label_series, pred_n) if pred_n == 1: # TODO(crawles): remove this if statement/label name. It's for backwards # compatibility. columns = ['label'] else: columns = ['label_{}_steps'.format(i) for i in range(pred_n)] # Make dataframe. Combine features and labels. label_ix = label_series.index[0:len(label_series) + 1 - pred_n] df = pd.DataFrame(y, columns=columns, index=label_ix) df.index.name = 'pred_date' # Populate dataframe with past sales. for day in range(window_size - 1, -1, -1): day_rel_label = pred_offset + window_size - day - 1 df.insert(0, '-{}_steps'.format(day_rel_label), historical_windows[:, day]) return df
[ "def", "create_rolling_features_label", "(", "series", ",", "window_size", ",", "pred_offset", ",", "pred_n", "=", "1", ")", ":", "if", "series", ".", "isnull", "(", ")", ".", "sum", "(", ")", ">", "0", ":", "raise", "ValueError", "(", "'Series must not contain missing values.'", ")", "if", "pred_n", "<", "1", ":", "raise", "ValueError", "(", "'pred_n must not be < 1.'", ")", "if", "len", "(", "series", ")", "<", "(", "window_size", "+", "pred_offset", "+", "pred_n", ")", ":", "raise", "ValueError", "(", "'window_size + pred_offset + pred_n must not be greater '", "'than series length.'", ")", "total_steps", "=", "len", "(", "series", ")", "def", "compute_rolling_window", "(", "series", ",", "window_size", ")", ":", "# Accumulate series into list.", "windows", "=", "[", "]", "series", ".", "rolling", "(", "window_size", ")", ".", "apply", "(", "_keep", ",", "args", "=", "(", "windows", ",", ")", ")", "return", "np", ".", "array", "(", "windows", ")", "features_start", "=", "0", "features_end", "=", "total_steps", "-", "(", "pred_offset", "-", "1", ")", "-", "pred_n", "historical_windows", "=", "compute_rolling_window", "(", "series", "[", "features_start", ":", "features_end", "]", ",", "window_size", ")", "# Get label pred_offset steps into the future.", "label_start", ",", "label_end", "=", "window_size", "+", "pred_offset", "-", "1", ",", "total_steps", "label_series", "=", "series", "[", "label_start", ":", "label_end", "]", "y", "=", "compute_rolling_window", "(", "label_series", ",", "pred_n", ")", "if", "pred_n", "==", "1", ":", "# TODO(crawles): remove this if statement/label name. It's for backwards", "# compatibility.", "columns", "=", "[", "'label'", "]", "else", ":", "columns", "=", "[", "'label_{}_steps'", ".", "format", "(", "i", ")", "for", "i", "in", "range", "(", "pred_n", ")", "]", "# Make dataframe. Combine features and labels.", "label_ix", "=", "label_series", ".", "index", "[", "0", ":", "len", "(", "label_series", ")", "+", "1", "-", "pred_n", "]", "df", "=", "pd", ".", "DataFrame", "(", "y", ",", "columns", "=", "columns", ",", "index", "=", "label_ix", ")", "df", ".", "index", ".", "name", "=", "'pred_date'", "# Populate dataframe with past sales.", "for", "day", "in", "range", "(", "window_size", "-", "1", ",", "-", "1", ",", "-", "1", ")", ":", "day_rel_label", "=", "pred_offset", "+", "window_size", "-", "day", "-", "1", "df", ".", "insert", "(", "0", ",", "'-{}_steps'", ".", "format", "(", "day_rel_label", ")", ",", "historical_windows", "[", ":", ",", "day", "]", ")", "return", "df" ]
[ 20, 0 ]
[ 122, 11 ]
python
en
['en', 'en', 'en']
True
add_aggregate_features
(df, time_series_col_names)
Compute summary statistic features for every row of dataframe.
Compute summary statistic features for every row of dataframe.
def add_aggregate_features(df, time_series_col_names): """Compute summary statistic features for every row of dataframe.""" x = df[time_series_col_names] features = {} features['mean'] = x.mean(axis=1) features['std'] = x.std(axis=1) features['min'] = x.min(axis=1) features['max'] = x.max(axis=1) percentiles = range(10, 100, 20) for p in percentiles: features['{}_per'.format(p)] = np.percentile(x, p, axis=1) df_features = pd.DataFrame(features, index=x.index) return df_features.merge(df, left_index=True, right_index=True)
[ "def", "add_aggregate_features", "(", "df", ",", "time_series_col_names", ")", ":", "x", "=", "df", "[", "time_series_col_names", "]", "features", "=", "{", "}", "features", "[", "'mean'", "]", "=", "x", ".", "mean", "(", "axis", "=", "1", ")", "features", "[", "'std'", "]", "=", "x", ".", "std", "(", "axis", "=", "1", ")", "features", "[", "'min'", "]", "=", "x", ".", "min", "(", "axis", "=", "1", ")", "features", "[", "'max'", "]", "=", "x", ".", "max", "(", "axis", "=", "1", ")", "percentiles", "=", "range", "(", "10", ",", "100", ",", "20", ")", "for", "p", "in", "percentiles", ":", "features", "[", "'{}_per'", ".", "format", "(", "p", ")", "]", "=", "np", ".", "percentile", "(", "x", ",", "p", ",", "axis", "=", "1", ")", "df_features", "=", "pd", ".", "DataFrame", "(", "features", ",", "index", "=", "x", ".", "index", ")", "return", "df_features", ".", "merge", "(", "df", ",", "left_index", "=", "True", ",", "right_index", "=", "True", ")" ]
[ 125, 0 ]
[ 137, 65 ]
python
en
['en', 'en', 'en']
True
is_between_dates
(dates, start=None, end=None)
Return boolean indices indicating if dates occurs between start and end.
Return boolean indices indicating if dates occurs between start and end.
def is_between_dates(dates, start=None, end=None): """Return boolean indices indicating if dates occurs between start and end.""" if start is None: start = pd.to_datetime(0) if end is None: end = pd.to_datetime(sys.maxsize) date_series = pd.Series(pd.to_datetime(dates)) return date_series.between(start, end).values
[ "def", "is_between_dates", "(", "dates", ",", "start", "=", "None", ",", "end", "=", "None", ")", ":", "if", "start", "is", "None", ":", "start", "=", "pd", ".", "to_datetime", "(", "0", ")", "if", "end", "is", "None", ":", "end", "=", "pd", ".", "to_datetime", "(", "sys", ".", "maxsize", ")", "date_series", "=", "pd", ".", "Series", "(", "pd", ".", "to_datetime", "(", "dates", ")", ")", "return", "date_series", ".", "between", "(", "start", ",", "end", ")", ".", "values" ]
[ 146, 0 ]
[ 153, 47 ]
python
en
['en', 'en', 'en']
True
_count_holidays
(dates, months, weeks)
Count number of holidays spanned in prediction windows.
Count number of holidays spanned in prediction windows.
def _count_holidays(dates, months, weeks): """Count number of holidays spanned in prediction windows.""" cal = calendar() holidays = cal.holidays(start=dates.min(), end=dates.max()) def count_holidays_during_month(date): n_holidays = 0 beg = date end = date + pd.DateOffset(months=months, weeks=weeks) for h in holidays: if beg <= h < end: n_holidays += 1 return n_holidays return pd.Series(dates).apply(count_holidays_during_month)
[ "def", "_count_holidays", "(", "dates", ",", "months", ",", "weeks", ")", ":", "cal", "=", "calendar", "(", ")", "holidays", "=", "cal", ".", "holidays", "(", "start", "=", "dates", ".", "min", "(", ")", ",", "end", "=", "dates", ".", "max", "(", ")", ")", "def", "count_holidays_during_month", "(", "date", ")", ":", "n_holidays", "=", "0", "beg", "=", "date", "end", "=", "date", "+", "pd", ".", "DateOffset", "(", "months", "=", "months", ",", "weeks", "=", "weeks", ")", "for", "h", "in", "holidays", ":", "if", "beg", "<=", "h", "<", "end", ":", "n_holidays", "+=", "1", "return", "n_holidays", "return", "pd", ".", "Series", "(", "dates", ")", ".", "apply", "(", "count_holidays_during_month", ")" ]
[ 156, 0 ]
[ 170, 60 ]
python
en
['en', 'en', 'en']
True
_get_day_of_month
(x)
From a datetime object, extract day of month.
From a datetime object, extract day of month.
def _get_day_of_month(x): """From a datetime object, extract day of month.""" return int(x.strftime('%d'))
[ "def", "_get_day_of_month", "(", "x", ")", ":", "return", "int", "(", "x", ".", "strftime", "(", "'%d'", ")", ")" ]
[ 173, 0 ]
[ 175, 30 ]
python
en
['en', 'en', 'en']
True
add_date_features
(df, dates, months, weeks, inplace=False)
Create features using date that is being predicted on.
Create features using date that is being predicted on.
def add_date_features(df, dates, months, weeks, inplace=False): """Create features using date that is being predicted on.""" if not inplace: df = df.copy() df['doy'] = dates.dayofyear df['dom'] = dates.map(_get_day_of_month) df['month'] = dates.month df['year'] = dates.year df['n_holidays'] = _count_holidays(dates, months, weeks).values return df
[ "def", "add_date_features", "(", "df", ",", "dates", ",", "months", ",", "weeks", ",", "inplace", "=", "False", ")", ":", "if", "not", "inplace", ":", "df", "=", "df", ".", "copy", "(", ")", "df", "[", "'doy'", "]", "=", "dates", ".", "dayofyear", "df", "[", "'dom'", "]", "=", "dates", ".", "map", "(", "_get_day_of_month", ")", "df", "[", "'month'", "]", "=", "dates", ".", "month", "df", "[", "'year'", "]", "=", "dates", ".", "year", "df", "[", "'n_holidays'", "]", "=", "_count_holidays", "(", "dates", ",", "months", ",", "weeks", ")", ".", "values", "return", "df" ]
[ 178, 0 ]
[ 187, 11 ]
python
en
['en', 'en', 'en']
True
Metrics.calculate_rmse
(self, residuals)
Root mean squared error.
Root mean squared error.
def calculate_rmse(self, residuals): """Root mean squared error.""" return np.sqrt(np.mean(np.square(residuals)))
[ "def", "calculate_rmse", "(", "self", ",", "residuals", ")", ":", "return", "np", ".", "sqrt", "(", "np", ".", "mean", "(", "np", ".", "square", "(", "residuals", ")", ")", ")" ]
[ 201, 2 ]
[ 203, 49 ]
python
en
['en', 'en', 'en']
True
Metrics.calculate_mae
(self, residuals)
Mean absolute error.
Mean absolute error.
def calculate_mae(self, residuals): """Mean absolute error.""" return np.mean(np.abs(residuals))
[ "def", "calculate_mae", "(", "self", ",", "residuals", ")", ":", "return", "np", ".", "mean", "(", "np", ".", "abs", "(", "residuals", ")", ")" ]
[ 205, 2 ]
[ 207, 37 ]
python
en
['et', 'gd', 'en']
False
Metrics.calculate_malr
(self, y_true, predictions)
Mean absolute log ratio.
Mean absolute log ratio.
def calculate_malr(self, y_true, predictions): """Mean absolute log ratio.""" return np.mean(np.abs(np.log(1 + predictions) - np.log(1 + y_true)))
[ "def", "calculate_malr", "(", "self", ",", "y_true", ",", "predictions", ")", ":", "return", "np", ".", "mean", "(", "np", ".", "abs", "(", "np", ".", "log", "(", "1", "+", "predictions", ")", "-", "np", ".", "log", "(", "1", "+", "y_true", ")", ")", ")" ]
[ 209, 2 ]
[ 211, 72 ]
python
en
['en', 'en', 'en']
True
get_word_alignment
(num, force_arch=64, _machine_word_size=MACHINE_WORD_SIZE)
Returns alignment details for the given number based on the platform Python is running on. :param num: Unsigned integral number. :param force_arch: If you don't want to use 64-bit unsigned chunks, set this to anything other than 64. 32-bit chunks will be preferred then. Default 64 will be used when on a 64-bit machine. :param _machine_word_size: (Internal) The machine word size used for alignment. :returns: 4-tuple:: (word_bits, word_bytes, max_uint, packing_format_type)
Returns alignment details for the given number based on the platform Python is running on.
def get_word_alignment(num, force_arch=64, _machine_word_size=MACHINE_WORD_SIZE): """ Returns alignment details for the given number based on the platform Python is running on. :param num: Unsigned integral number. :param force_arch: If you don't want to use 64-bit unsigned chunks, set this to anything other than 64. 32-bit chunks will be preferred then. Default 64 will be used when on a 64-bit machine. :param _machine_word_size: (Internal) The machine word size used for alignment. :returns: 4-tuple:: (word_bits, word_bytes, max_uint, packing_format_type) """ max_uint64 = 0xffffffffffffffff max_uint32 = 0xffffffff max_uint16 = 0xffff max_uint8 = 0xff if force_arch == 64 and _machine_word_size >= 64 and num > max_uint32: # 64-bit unsigned integer. return 64, 8, max_uint64, "Q" elif num > max_uint16: # 32-bit unsigned integer return 32, 4, max_uint32, "L" elif num > max_uint8: # 16-bit unsigned integer. return 16, 2, max_uint16, "H" else: # 8-bit unsigned integer. return 8, 1, max_uint8, "B"
[ "def", "get_word_alignment", "(", "num", ",", "force_arch", "=", "64", ",", "_machine_word_size", "=", "MACHINE_WORD_SIZE", ")", ":", "max_uint64", "=", "0xffffffffffffffff", "max_uint32", "=", "0xffffffff", "max_uint16", "=", "0xffff", "max_uint8", "=", "0xff", "if", "force_arch", "==", "64", "and", "_machine_word_size", ">=", "64", "and", "num", ">", "max_uint32", ":", "# 64-bit unsigned integer.", "return", "64", ",", "8", ",", "max_uint64", ",", "\"Q\"", "elif", "num", ">", "max_uint16", ":", "# 32-bit unsigned integer", "return", "32", ",", "4", ",", "max_uint32", ",", "\"L\"", "elif", "num", ">", "max_uint8", ":", "# 16-bit unsigned integer.", "return", "16", ",", "2", ",", "max_uint16", ",", "\"H\"", "else", ":", "# 8-bit unsigned integer.", "return", "8", ",", "1", ",", "max_uint8", ",", "\"B\"" ]
[ 37, 0 ]
[ 73, 35 ]
python
en
['en', 'error', 'th']
False
endswith_cr
(line)
Return True if line (a text or bytestring) ends with '\r'.
Return True if line (a text or bytestring) ends with '\r'.
def endswith_cr(line): """Return True if line (a text or bytestring) ends with '\r'.""" return line.endswith('\r' if isinstance(line, str) else b'\r')
[ "def", "endswith_cr", "(", "line", ")", ":", "return", "line", ".", "endswith", "(", "'\\r'", "if", "isinstance", "(", "line", ",", "str", ")", "else", "b'\\r'", ")" ]
[ 147, 0 ]
[ 149, 66 ]
python
en
['en', 'en', 'en']
True
endswith_lf
(line)
Return True if line (a text or bytestring) ends with '\n'.
Return True if line (a text or bytestring) ends with '\n'.
def endswith_lf(line): """Return True if line (a text or bytestring) ends with '\n'.""" return line.endswith('\n' if isinstance(line, str) else b'\n')
[ "def", "endswith_lf", "(", "line", ")", ":", "return", "line", ".", "endswith", "(", "'\\n'", "if", "isinstance", "(", "line", ",", "str", ")", "else", "b'\\n'", ")" ]
[ 152, 0 ]
[ 154, 66 ]
python
en
['en', 'en', 'en']
True
equals_lf
(line)
Return True if line (a text or bytestring) equals '\n'.
Return True if line (a text or bytestring) equals '\n'.
def equals_lf(line): """Return True if line (a text or bytestring) equals '\n'.""" return line == ('\n' if isinstance(line, str) else b'\n')
[ "def", "equals_lf", "(", "line", ")", ":", "return", "line", "==", "(", "'\\n'", "if", "isinstance", "(", "line", ",", "str", ")", "else", "b'\\n'", ")" ]
[ 157, 0 ]
[ 159, 61 ]
python
en
['en', 'en', 'en']
True
File.chunks
(self, chunk_size=None)
Read the file and yield chunks of ``chunk_size`` bytes (defaults to ``File.DEFAULT_CHUNK_SIZE``).
Read the file and yield chunks of ``chunk_size`` bytes (defaults to ``File.DEFAULT_CHUNK_SIZE``).
def chunks(self, chunk_size=None): """ Read the file and yield chunks of ``chunk_size`` bytes (defaults to ``File.DEFAULT_CHUNK_SIZE``). """ chunk_size = chunk_size or self.DEFAULT_CHUNK_SIZE try: self.seek(0) except (AttributeError, UnsupportedOperation): pass while True: data = self.read(chunk_size) if not data: break yield data
[ "def", "chunks", "(", "self", ",", "chunk_size", "=", "None", ")", ":", "chunk_size", "=", "chunk_size", "or", "self", ".", "DEFAULT_CHUNK_SIZE", "try", ":", "self", ".", "seek", "(", "0", ")", "except", "(", "AttributeError", ",", "UnsupportedOperation", ")", ":", "pass", "while", "True", ":", "data", "=", "self", ".", "read", "(", "chunk_size", ")", "if", "not", "data", ":", "break", "yield", "data" ]
[ 47, 4 ]
[ 62, 22 ]
python
en
['en', 'error', 'th']
False
File.multiple_chunks
(self, chunk_size=None)
Return ``True`` if you can expect multiple chunks. NB: If a particular file representation is in memory, subclasses should always return ``False`` -- there's no good reason to read from memory in chunks.
Return ``True`` if you can expect multiple chunks.
def multiple_chunks(self, chunk_size=None): """ Return ``True`` if you can expect multiple chunks. NB: If a particular file representation is in memory, subclasses should always return ``False`` -- there's no good reason to read from memory in chunks. """ return self.size > (chunk_size or self.DEFAULT_CHUNK_SIZE)
[ "def", "multiple_chunks", "(", "self", ",", "chunk_size", "=", "None", ")", ":", "return", "self", ".", "size", ">", "(", "chunk_size", "or", "self", ".", "DEFAULT_CHUNK_SIZE", ")" ]
[ 64, 4 ]
[ 72, 66 ]
python
en
['en', 'error', 'th']
False
get_parquet_schema
(project, dataset, table)
Return parquet schema for specified table.
Return parquet schema for specified table.
def get_parquet_schema(project, dataset, table): '''Return parquet schema for specified table.''' project_id = project dataset_id = dataset table_id = table data_type_mapping = { 'STRING': pyarrow.string(), 'BYTES': pyarrow.string(), 'INTEGER': pyarrow.int64(), 'FLOAT': pyarrow.float64(), 'BOOLEAN': pyarrow.bool_(), 'TIMESTAMP': pyarrow.timestamp(unit='s'), 'DATE': pyarrow.date64(), 'DATETIME': pyarrow.timestamp(unit='s'), ## Uncomment lines ONLY if you have nested and repeated fields present. # 'ARRAY': pyarrow.list_(), # 'RECORD': pyarrow.dictionary() } client = bigquery.Client(project=project_id) dataset_ref = client.dataset(dataset_id, project=project) table_ref = dataset_ref.table(table_id) table = client.get_table(table_ref) parquet_schema = pyarrow.schema([]) for column in table.schema: parquet_schema = parquet_schema.append( pyarrow.field(column.name, data_type_mapping[column.field_type])) return parquet_schema
[ "def", "get_parquet_schema", "(", "project", ",", "dataset", ",", "table", ")", ":", "project_id", "=", "project", "dataset_id", "=", "dataset", "table_id", "=", "table", "data_type_mapping", "=", "{", "'STRING'", ":", "pyarrow", ".", "string", "(", ")", ",", "'BYTES'", ":", "pyarrow", ".", "string", "(", ")", ",", "'INTEGER'", ":", "pyarrow", ".", "int64", "(", ")", ",", "'FLOAT'", ":", "pyarrow", ".", "float64", "(", ")", ",", "'BOOLEAN'", ":", "pyarrow", ".", "bool_", "(", ")", ",", "'TIMESTAMP'", ":", "pyarrow", ".", "timestamp", "(", "unit", "=", "'s'", ")", ",", "'DATE'", ":", "pyarrow", ".", "date64", "(", ")", ",", "'DATETIME'", ":", "pyarrow", ".", "timestamp", "(", "unit", "=", "'s'", ")", ",", "## Uncomment lines ONLY if you have nested and repeated fields present.", "# 'ARRAY': pyarrow.list_(),", "# 'RECORD': pyarrow.dictionary()", "}", "client", "=", "bigquery", ".", "Client", "(", "project", "=", "project_id", ")", "dataset_ref", "=", "client", ".", "dataset", "(", "dataset_id", ",", "project", "=", "project", ")", "table_ref", "=", "dataset_ref", ".", "table", "(", "table_id", ")", "table", "=", "client", ".", "get_table", "(", "table_ref", ")", "parquet_schema", "=", "pyarrow", ".", "schema", "(", "[", "]", ")", "for", "column", "in", "table", ".", "schema", ":", "parquet_schema", "=", "parquet_schema", ".", "append", "(", "pyarrow", ".", "field", "(", "column", ".", "name", ",", "data_type_mapping", "[", "column", ".", "field_type", "]", ")", ")", "return", "parquet_schema" ]
[ 24, 0 ]
[ 50, 25 ]
python
en
['en', 'en', 'en']
True
run
(argv=None)
Main entry point: defines and runs the BQ extraction pipeline
Main entry point: defines and runs the BQ extraction pipeline
def run(argv=None): '''Main entry point: defines and runs the BQ extraction pipeline''' parser = argparse.ArgumentParser() # Custom arguments for BigQuery SQL and GCS output location parser.add_argument('--bql', dest='bql', help='BigQuery Standard SQL statement to define data to be extracted.') parser.add_argument('--output', dest='output', help='GCS output location for parquet files.') known_args, pipeline_args = parser.parse_known_args(argv) bq_table_source = known_args.bql.split('`')[1].split('.') parquet_schema = get_parquet_schema(bq_table_source[0], bq_table_source[1], bq_table_source[2]) options = PipelineOptions(pipeline_args) # Instantiate a pipeline with all the pipeline options p = beam.Pipeline(options=options) # Processing and structure of pipeline p \ | 'Input: Query BQ Table' >> beam.io.Read(beam.io.BigQuerySource( query=known_args.bql, use_standard_sql=True)) \ | 'Output: Export to Parquet' >> beam.io.parquetio.WriteToParquet( file_path_prefix=known_args.output, schema=parquet_schema, file_name_suffix='.parquet', num_shards=1 # Remove this line for larger datasets and concatenate downstream. ) result = p.run() result.wait_until_finish() # Makes job to display all the logs. Remove this line if you
[ "def", "run", "(", "argv", "=", "None", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "# Custom arguments for BigQuery SQL and GCS output location", "parser", ".", "add_argument", "(", "'--bql'", ",", "dest", "=", "'bql'", ",", "help", "=", "'BigQuery Standard SQL statement to define data to be extracted.'", ")", "parser", ".", "add_argument", "(", "'--output'", ",", "dest", "=", "'output'", ",", "help", "=", "'GCS output location for parquet files.'", ")", "known_args", ",", "pipeline_args", "=", "parser", ".", "parse_known_args", "(", "argv", ")", "bq_table_source", "=", "known_args", ".", "bql", ".", "split", "(", "'`'", ")", "[", "1", "]", ".", "split", "(", "'.'", ")", "parquet_schema", "=", "get_parquet_schema", "(", "bq_table_source", "[", "0", "]", ",", "bq_table_source", "[", "1", "]", ",", "bq_table_source", "[", "2", "]", ")", "options", "=", "PipelineOptions", "(", "pipeline_args", ")", "# Instantiate a pipeline with all the pipeline options", "p", "=", "beam", ".", "Pipeline", "(", "options", "=", "options", ")", "# Processing and structure of pipeline", "p", "|", "'Input: Query BQ Table'", ">>", "beam", ".", "io", ".", "Read", "(", "beam", ".", "io", ".", "BigQuerySource", "(", "query", "=", "known_args", ".", "bql", ",", "use_standard_sql", "=", "True", ")", ")", "|", "'Output: Export to Parquet'", ">>", "beam", ".", "io", ".", "parquetio", ".", "WriteToParquet", "(", "file_path_prefix", "=", "known_args", ".", "output", ",", "schema", "=", "parquet_schema", ",", "file_name_suffix", "=", "'.parquet'", ",", "num_shards", "=", "1", "# Remove this line for larger datasets and concatenate downstream.", ")", "result", "=", "p", ".", "run", "(", ")", "result", ".", "wait_until_finish", "(", ")", "# Makes job to display all the logs. Remove this line if you" ]
[ 53, 0 ]
[ 83, 92 ]
python
en
['en', 'en', 'en']
True
parse_args
()
Parse arguments.
Parse arguments.
def parse_args(): """Parse arguments.""" parser = argparse.ArgumentParser() parser.add_argument('-c', '--cfg', type=str, required=True, help='Overrides config file') parser.add_argument('-l', '--local', action='store_true', help='Run locally instead of launching to cluster') parser.add_argument('-lc', '--local_carl', action='store_true', help='Run locally but with a local large GPU cluster') parser.add_argument('-v', '--vis', action='store_true', help='Generate visualizations when testing') parser.add_argument('-t', '--test', action='store_true', help='Run testing mode (will pick the last ckpt)') parser.add_argument('-b', '--tb', action='store_true', help='Run tensorboard on this directory') parser.add_argument('-f', '--fl', action='store_true', help='View the folder (run a python server)') parser.add_argument('-d', '--delete', action='store_true', help='Delete the folder') parser.add_argument('-p', '--profile', action='store_true', help='Run with kernprof. Decorate fn with @profile') parser.add_argument('--cls', action='store_true', help='Gen classification file and run that') parser.add_argument('--run_id', type=int, default=None, help='Run for this specific run_id, if known') parser.add_argument('--num_gpu', type=int, default=1, help='Number of gpus override') parser.add_argument('rest', nargs=argparse.REMAINDER) args = parser.parse_args() return args
[ "def", "parse_args", "(", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "'-c'", ",", "'--cfg'", ",", "type", "=", "str", ",", "required", "=", "True", ",", "help", "=", "'Overrides config file'", ")", "parser", ".", "add_argument", "(", "'-l'", ",", "'--local'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Run locally instead of launching to cluster'", ")", "parser", ".", "add_argument", "(", "'-lc'", ",", "'--local_carl'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Run locally but with a local large GPU cluster'", ")", "parser", ".", "add_argument", "(", "'-v'", ",", "'--vis'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Generate visualizations when testing'", ")", "parser", ".", "add_argument", "(", "'-t'", ",", "'--test'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Run testing mode (will pick the last ckpt)'", ")", "parser", ".", "add_argument", "(", "'-b'", ",", "'--tb'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Run tensorboard on this directory'", ")", "parser", ".", "add_argument", "(", "'-f'", ",", "'--fl'", ",", "action", "=", "'store_true'", ",", "help", "=", "'View the folder (run a python server)'", ")", "parser", ".", "add_argument", "(", "'-d'", ",", "'--delete'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Delete the folder'", ")", "parser", ".", "add_argument", "(", "'-p'", ",", "'--profile'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Run with kernprof. Decorate fn with @profile'", ")", "parser", ".", "add_argument", "(", "'--cls'", ",", "action", "=", "'store_true'", ",", "help", "=", "'Gen classification file and run that'", ")", "parser", ".", "add_argument", "(", "'--run_id'", ",", "type", "=", "int", ",", "default", "=", "None", ",", "help", "=", "'Run for this specific run_id, if known'", ")", "parser", ".", "add_argument", "(", "'--num_gpu'", ",", "type", "=", "int", ",", "default", "=", "1", ",", "help", "=", "'Number of gpus override'", ")", "parser", ".", "add_argument", "(", "'rest'", ",", "nargs", "=", "argparse", ".", "REMAINDER", ")", "args", "=", "parser", ".", "parse_args", "(", ")", "return", "args" ]
[ 41, 0 ]
[ 93, 15 ]
python
en
['en', 'fr', 'en']
False
get_sweep_param_from_combinations
(clis)
Returns: [(run_id, overrides_dict)]. The run_id can be None if unsure what hydra would use.
Returns: [(run_id, overrides_dict)]. The run_id can be None if unsure what hydra would use.
def get_sweep_param_from_combinations(clis): """ Returns: [(run_id, overrides_dict)]. The run_id can be None if unsure what hydra would use. """ sweep_params = OrderedDict() final_clis = {} for cli in clis: config_key, config_vals = cli.split('=') if ',' not in config_vals: final_clis[config_key] = config_vals continue sweep_params[config_key] = config_vals.split(',') if len(sweep_params) == 0: return [(None, final_clis)] # Cross product combos = itertools.product(*list(sweep_params.values())) combos = [ dict([(list(sweep_params.keys())[i], el) for i, el in enumerate(elts)]) for elts in combos ] # Using None since don't know which run_id will be assigned to which # config by hydra return [(None, {**final_clis, **extra_conf}) for extra_conf in combos]
[ "def", "get_sweep_param_from_combinations", "(", "clis", ")", ":", "sweep_params", "=", "OrderedDict", "(", ")", "final_clis", "=", "{", "}", "for", "cli", "in", "clis", ":", "config_key", ",", "config_vals", "=", "cli", ".", "split", "(", "'='", ")", "if", "','", "not", "in", "config_vals", ":", "final_clis", "[", "config_key", "]", "=", "config_vals", "continue", "sweep_params", "[", "config_key", "]", "=", "config_vals", ".", "split", "(", "','", ")", "if", "len", "(", "sweep_params", ")", "==", "0", ":", "return", "[", "(", "None", ",", "final_clis", ")", "]", "# Cross product", "combos", "=", "itertools", ".", "product", "(", "*", "list", "(", "sweep_params", ".", "values", "(", ")", ")", ")", "combos", "=", "[", "dict", "(", "[", "(", "list", "(", "sweep_params", ".", "keys", "(", ")", ")", "[", "i", "]", ",", "el", ")", "for", "i", ",", "el", "in", "enumerate", "(", "elts", ")", "]", ")", "for", "elts", "in", "combos", "]", "# Using None since don't know which run_id will be assigned to which", "# config by hydra", "return", "[", "(", "None", ",", "{", "*", "*", "final_clis", ",", "*", "*", "extra_conf", "}", ")", "for", "extra_conf", "in", "combos", "]" ]
[ 96, 0 ]
[ 120, 74 ]
python
en
['en', 'error', 'th']
False
subselect_dict_keys_diff
(run_id_param_dicts)
Select keys from the param_dicts that actually change between configs.
Select keys from the param_dicts that actually change between configs.
def subselect_dict_keys_diff(run_id_param_dicts): """Select keys from the param_dicts that actually change between configs.""" key_vals = {} for _, param_dict in run_id_param_dicts: for key, val in param_dict.items(): if key not in key_vals: key_vals[key] = [] key_vals[key].append(val) keys_to_keep = [ key for key, vals in key_vals.items() if len(set(vals)) > 1 ] return [(el[0], {key: el[1][key] for key in keys_to_keep}) for el in run_id_param_dicts]
[ "def", "subselect_dict_keys_diff", "(", "run_id_param_dicts", ")", ":", "key_vals", "=", "{", "}", "for", "_", ",", "param_dict", "in", "run_id_param_dicts", ":", "for", "key", ",", "val", "in", "param_dict", ".", "items", "(", ")", ":", "if", "key", "not", "in", "key_vals", ":", "key_vals", "[", "key", "]", "=", "[", "]", "key_vals", "[", "key", "]", ".", "append", "(", "val", ")", "keys_to_keep", "=", "[", "key", "for", "key", ",", "vals", "in", "key_vals", ".", "items", "(", ")", "if", "len", "(", "set", "(", "vals", ")", ")", ">", "1", "]", "return", "[", "(", "el", "[", "0", "]", ",", "{", "key", ":", "el", "[", "1", "]", "[", "key", "]", "for", "key", "in", "keys_to_keep", "}", ")", "for", "el", "in", "run_id_param_dicts", "]" ]
[ 136, 0 ]
[ 148, 76 ]
python
en
['en', 'en', 'en']
True
choose_single_run
(clis, fpath, run_id)
clis are a list of flags provided in the config overrides file. Args: clis: List of clis from the txt file run_id: If known which model to run locally, the run_id of that sweep
clis are a list of flags provided in the config overrides file. Args: clis: List of clis from the txt file run_id: If known which model to run locally, the run_id of that sweep
def choose_single_run(clis, fpath, run_id): """ clis are a list of flags provided in the config overrides file. Args: clis: List of clis from the txt file run_id: If known which model to run locally, the run_id of that sweep """ # Check if this has been run before, then we can pick the overrides from # the .hydra folder. Else, will have to manually construct potential # combinations that will be run by hydra run_id_param_dicts = get_sweep_param_from_runs(fpath) if len(run_id_param_dicts) == 0: run_id_param_dicts = get_sweep_param_from_combinations(clis) if len(run_id_param_dicts) == 1: final_run_id, param_dict = run_id_param_dicts[0] assert run_id is None or run_id == final_run_id elif run_id is not None: final_run_id = run_id param_dicts = [el[1] for el in run_id_param_dicts if el[0] == run_id] assert len(param_dicts) == 1, 'run_id not found, or multiple found' param_dict = param_dicts[0] else: # Show options to the user and let her pick run_id_param_dicts_diff = subselect_dict_keys_diff(run_id_param_dicts) print('Choose from: \n' + '\n'.join([str(el) for el in run_id_param_dicts_diff])) qst = [ inquirer.List( 'r', message='Which sweep config to use?', choices=range(len(run_id_param_dicts)), carousel=True, ), ] final_run_id, param_dict = run_id_param_dicts[inquirer.prompt(qst) ['r']] return final_run_id, [ f'{key}={val}' for key, val in param_dict.items() ]
[ "def", "choose_single_run", "(", "clis", ",", "fpath", ",", "run_id", ")", ":", "# Check if this has been run before, then we can pick the overrides from", "# the .hydra folder. Else, will have to manually construct potential", "# combinations that will be run by hydra", "run_id_param_dicts", "=", "get_sweep_param_from_runs", "(", "fpath", ")", "if", "len", "(", "run_id_param_dicts", ")", "==", "0", ":", "run_id_param_dicts", "=", "get_sweep_param_from_combinations", "(", "clis", ")", "if", "len", "(", "run_id_param_dicts", ")", "==", "1", ":", "final_run_id", ",", "param_dict", "=", "run_id_param_dicts", "[", "0", "]", "assert", "run_id", "is", "None", "or", "run_id", "==", "final_run_id", "elif", "run_id", "is", "not", "None", ":", "final_run_id", "=", "run_id", "param_dicts", "=", "[", "el", "[", "1", "]", "for", "el", "in", "run_id_param_dicts", "if", "el", "[", "0", "]", "==", "run_id", "]", "assert", "len", "(", "param_dicts", ")", "==", "1", ",", "'run_id not found, or multiple found'", "param_dict", "=", "param_dicts", "[", "0", "]", "else", ":", "# Show options to the user and let her pick", "run_id_param_dicts_diff", "=", "subselect_dict_keys_diff", "(", "run_id_param_dicts", ")", "print", "(", "'Choose from: \\n'", "+", "'\\n'", ".", "join", "(", "[", "str", "(", "el", ")", "for", "el", "in", "run_id_param_dicts_diff", "]", ")", ")", "qst", "=", "[", "inquirer", ".", "List", "(", "'r'", ",", "message", "=", "'Which sweep config to use?'", ",", "choices", "=", "range", "(", "len", "(", "run_id_param_dicts", ")", ")", ",", "carousel", "=", "True", ",", ")", ",", "]", "final_run_id", ",", "param_dict", "=", "run_id_param_dicts", "[", "inquirer", ".", "prompt", "(", "qst", ")", "[", "'r'", "]", "]", "return", "final_run_id", ",", "[", "f'{key}={val}'", "for", "key", ",", "val", "in", "param_dict", ".", "items", "(", ")", "]" ]
[ 151, 0 ]
[ 189, 5 ]
python
en
['en', 'error', 'th']
False
read_file_into_cli
(fpath, running_local=False, run_id=None)
Read cli from file into a string.
Read cli from file into a string.
def read_file_into_cli(fpath, running_local=False, run_id=None): """Read cli from file into a string.""" res = [] with open(fpath, 'r') as fin: for line in fin: args = line.split('#')[0].strip() if len(args) == 0: continue res.append(args) if running_local: final_run_id, res = choose_single_run(res, fpath, run_id) else: final_run_id = None # not local, launch all, so run_id is irrelevant return final_run_id, res
[ "def", "read_file_into_cli", "(", "fpath", ",", "running_local", "=", "False", ",", "run_id", "=", "None", ")", ":", "res", "=", "[", "]", "with", "open", "(", "fpath", ",", "'r'", ")", "as", "fin", ":", "for", "line", "in", "fin", ":", "args", "=", "line", ".", "split", "(", "'#'", ")", "[", "0", "]", ".", "strip", "(", ")", "if", "len", "(", "args", ")", "==", "0", ":", "continue", "res", ".", "append", "(", "args", ")", "if", "running_local", ":", "final_run_id", ",", "res", "=", "choose_single_run", "(", "res", ",", "fpath", ",", "run_id", ")", "else", ":", "final_run_id", "=", "None", "# not local, launch all, so run_id is irrelevant", "return", "final_run_id", ",", "res" ]
[ 192, 0 ]
[ 205, 28 ]
python
en
['en', 'en', 'en']
True
get_models_dir
(dpath)
Go inside the dpath to get the model dir.
Go inside the dpath to get the model dir.
def get_models_dir(dpath): """Go inside the dpath to get the model dir.""" runs = sorted([el for el in next(os.walk(dpath))[1] if el.isdigit()]) if len(runs) > 1: # Ask which run to use question = [ inquirer.List( 'run', message='Which run to use?', choices=runs, ), ] answers = inquirer.prompt(question) else: answers = dict(run=runs[0]) return dpath + '/' + answers['run']
[ "def", "get_models_dir", "(", "dpath", ")", ":", "runs", "=", "sorted", "(", "[", "el", "for", "el", "in", "next", "(", "os", ".", "walk", "(", "dpath", ")", ")", "[", "1", "]", "if", "el", ".", "isdigit", "(", ")", "]", ")", "if", "len", "(", "runs", ")", ">", "1", ":", "# Ask which run to use", "question", "=", "[", "inquirer", ".", "List", "(", "'run'", ",", "message", "=", "'Which run to use?'", ",", "choices", "=", "runs", ",", ")", ",", "]", "answers", "=", "inquirer", ".", "prompt", "(", "question", ")", "else", ":", "answers", "=", "dict", "(", "run", "=", "runs", "[", "0", "]", ")", "return", "dpath", "+", "'/'", "+", "answers", "[", "'run'", "]" ]
[ 208, 0 ]
[ 223, 39 ]
python
en
['en', 'en', 'en']
True
construct_cmd
(args)
Construct the cmd as provided in args.
Construct the cmd as provided in args.
def construct_cmd(args): """Construct the cmd as provided in args.""" if args.cfg: assert args.cfg.startswith('expts'), 'Must be wrt this directory' agent_folder = '{}/{}'.format(BASE_RUN_DIR, args.cfg if args.cfg else 'default') if (args.local or args.local_carl) and not args.test: # If args.test, then might be testing a model from other dir agent_folder = os.path.join(agent_folder, 'local') if args.tb: # Run tensorboard only # Clear the cli and just run tensorboard cli = ('cd {agent_folder}; tensorboard --logdir . --port {port} ' '--max_reload_threads 10 --window_title {name} ').format( agent_folder=agent_folder, port=get_free_port(), name=args.cfg) return cli if args.fl: # Visualize the folder only # Clear the cli and just run tensorboard cli = 'cd {}; python -m http.server {}'.format(agent_folder, get_free_port()) return cli if args.delete: cli = 'rm -r {f}/* {f}/.*'.format(f=agent_folder) shall = input("Run %s (y/N) " % cli).lower() == 'y' if shall: return cli return '' # Else, it is the general train command cli = ' {} train2.py hydra.run.dir={} '.format( 'kernprof -l ' if args.profile else 'python ', agent_folder) run_id, cli_stuff = read_file_into_cli(args.cfg, running_local=args.local or args.local_carl, run_id=args.run_id) cli_stuff = [f"'{el}'" for el in cli_stuff] cli += ' '.join(cli_stuff) if args.vis: cli += (' eval.store_vis=True ' ' eval.frames_per_clip=20 ' ' eval.n_fwd_times=20 ' ' force_eval=true ') if args.test: # wts_folder = get_models_dir(agent_folder) if args.local else 'last' wts_folder = (os.path.join(agent_folder, str(run_id)) if (args.local or args.local_carl) else 'last') cli += ' agent.weights_folder={} '.format(wts_folder) if args.local: cli += (' num_gpus=4 train.batch_size=2 ' ' eval.batch_size=4 ' ' train.data_loader.num_workers=0 ' ' eval.data_loader.num_workers=0 ') if args.num_gpu: cli += (f' num_gpus={int(args.num_gpu)}') cli += ' ' + ' '.join(args.rest) # This must go at the end, the other args must go before if not (args.local or args.local_carl): cli += ' -m ' return cli
[ "def", "construct_cmd", "(", "args", ")", ":", "if", "args", ".", "cfg", ":", "assert", "args", ".", "cfg", ".", "startswith", "(", "'expts'", ")", ",", "'Must be wrt this directory'", "agent_folder", "=", "'{}/{}'", ".", "format", "(", "BASE_RUN_DIR", ",", "args", ".", "cfg", "if", "args", ".", "cfg", "else", "'default'", ")", "if", "(", "args", ".", "local", "or", "args", ".", "local_carl", ")", "and", "not", "args", ".", "test", ":", "# If args.test, then might be testing a model from other dir", "agent_folder", "=", "os", ".", "path", ".", "join", "(", "agent_folder", ",", "'local'", ")", "if", "args", ".", "tb", ":", "# Run tensorboard only", "# Clear the cli and just run tensorboard", "cli", "=", "(", "'cd {agent_folder}; tensorboard --logdir . --port {port} '", "'--max_reload_threads 10 --window_title {name} '", ")", ".", "format", "(", "agent_folder", "=", "agent_folder", ",", "port", "=", "get_free_port", "(", ")", ",", "name", "=", "args", ".", "cfg", ")", "return", "cli", "if", "args", ".", "fl", ":", "# Visualize the folder only", "# Clear the cli and just run tensorboard", "cli", "=", "'cd {}; python -m http.server {}'", ".", "format", "(", "agent_folder", ",", "get_free_port", "(", ")", ")", "return", "cli", "if", "args", ".", "delete", ":", "cli", "=", "'rm -r {f}/* {f}/.*'", ".", "format", "(", "f", "=", "agent_folder", ")", "shall", "=", "input", "(", "\"Run %s (y/N) \"", "%", "cli", ")", ".", "lower", "(", ")", "==", "'y'", "if", "shall", ":", "return", "cli", "return", "''", "# Else, it is the general train command", "cli", "=", "' {} train2.py hydra.run.dir={} '", ".", "format", "(", "'kernprof -l '", "if", "args", ".", "profile", "else", "'python '", ",", "agent_folder", ")", "run_id", ",", "cli_stuff", "=", "read_file_into_cli", "(", "args", ".", "cfg", ",", "running_local", "=", "args", ".", "local", "or", "args", ".", "local_carl", ",", "run_id", "=", "args", ".", "run_id", ")", "cli_stuff", "=", "[", "f\"'{el}'\"", "for", "el", "in", "cli_stuff", "]", "cli", "+=", "' '", ".", "join", "(", "cli_stuff", ")", "if", "args", ".", "vis", ":", "cli", "+=", "(", "' eval.store_vis=True '", "' eval.frames_per_clip=20 '", "' eval.n_fwd_times=20 '", "' force_eval=true '", ")", "if", "args", ".", "test", ":", "# wts_folder = get_models_dir(agent_folder) if args.local else 'last'", "wts_folder", "=", "(", "os", ".", "path", ".", "join", "(", "agent_folder", ",", "str", "(", "run_id", ")", ")", "if", "(", "args", ".", "local", "or", "args", ".", "local_carl", ")", "else", "'last'", ")", "cli", "+=", "' agent.weights_folder={} '", ".", "format", "(", "wts_folder", ")", "if", "args", ".", "local", ":", "cli", "+=", "(", "' num_gpus=4 train.batch_size=2 '", "' eval.batch_size=4 '", "' train.data_loader.num_workers=0 '", "' eval.data_loader.num_workers=0 '", ")", "if", "args", ".", "num_gpu", ":", "cli", "+=", "(", "f' num_gpus={int(args.num_gpu)}'", ")", "cli", "+=", "' '", "+", "' '", ".", "join", "(", "args", ".", "rest", ")", "# This must go at the end, the other args must go before", "if", "not", "(", "args", ".", "local", "or", "args", ".", "local_carl", ")", ":", "cli", "+=", "' -m '", "return", "cli" ]
[ 240, 0 ]
[ 308, 14 ]
python
en
['en', 'en', 'en']
True
_const_compare_digest_backport
(a, b)
Compare two digests of equal length in constant time. The digests must be of type str/bytes. Returns True if the digests match, and False otherwise.
Compare two digests of equal length in constant time.
def _const_compare_digest_backport(a, b): """ Compare two digests of equal length in constant time. The digests must be of type str/bytes. Returns True if the digests match, and False otherwise. """ result = abs(len(a) - len(b)) for left, right in zip(bytearray(a), bytearray(b)): result |= left ^ right return result == 0
[ "def", "_const_compare_digest_backport", "(", "a", ",", "b", ")", ":", "result", "=", "abs", "(", "len", "(", "a", ")", "-", "len", "(", "b", ")", ")", "for", "left", ",", "right", "in", "zip", "(", "bytearray", "(", "a", ")", ",", "bytearray", "(", "b", ")", ")", ":", "result", "|=", "left", "^", "right", "return", "result", "==", "0" ]
[ 29, 0 ]
[ 39, 22 ]
python
en
['en', 'error', 'th']
False
assert_fingerprint
(cert, fingerprint)
Checks if given fingerprint matches the supplied certificate. :param cert: Certificate as bytes object. :param fingerprint: Fingerprint as string of hexdigits, can be interspersed by colons.
Checks if given fingerprint matches the supplied certificate.
def assert_fingerprint(cert, fingerprint): """ Checks if given fingerprint matches the supplied certificate. :param cert: Certificate as bytes object. :param fingerprint: Fingerprint as string of hexdigits, can be interspersed by colons. """ fingerprint = fingerprint.replace(":", "").lower() digest_length = len(fingerprint) hashfunc = HASHFUNC_MAP.get(digest_length) if not hashfunc: raise SSLError("Fingerprint of invalid length: {0}".format(fingerprint)) # We need encode() here for py32; works on py2 and p33. fingerprint_bytes = unhexlify(fingerprint.encode()) cert_digest = hashfunc(cert).digest() if not _const_compare_digest(cert_digest, fingerprint_bytes): raise SSLError( 'Fingerprints did not match. Expected "{0}", got "{1}".'.format( fingerprint, hexlify(cert_digest) ) )
[ "def", "assert_fingerprint", "(", "cert", ",", "fingerprint", ")", ":", "fingerprint", "=", "fingerprint", ".", "replace", "(", "\":\"", ",", "\"\"", ")", ".", "lower", "(", ")", "digest_length", "=", "len", "(", "fingerprint", ")", "hashfunc", "=", "HASHFUNC_MAP", ".", "get", "(", "digest_length", ")", "if", "not", "hashfunc", ":", "raise", "SSLError", "(", "\"Fingerprint of invalid length: {0}\"", ".", "format", "(", "fingerprint", ")", ")", "# We need encode() here for py32; works on py2 and p33.", "fingerprint_bytes", "=", "unhexlify", "(", "fingerprint", ".", "encode", "(", ")", ")", "cert_digest", "=", "hashfunc", "(", "cert", ")", ".", "digest", "(", ")", "if", "not", "_const_compare_digest", "(", "cert_digest", ",", "fingerprint_bytes", ")", ":", "raise", "SSLError", "(", "'Fingerprints did not match. Expected \"{0}\", got \"{1}\".'", ".", "format", "(", "fingerprint", ",", "hexlify", "(", "cert_digest", ")", ")", ")" ]
[ 181, 0 ]
[ 207, 9 ]
python
en
['en', 'error', 'th']
False
resolve_cert_reqs
(candidate)
Resolves the argument to a numeric constant, which can be passed to the wrap_socket function/method from the ssl module. Defaults to :data:`ssl.CERT_REQUIRED`. If given a string it is assumed to be the name of the constant in the :mod:`ssl` module or its abbreviation. (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. If it's neither `None` nor a string we assume it is already the numeric constant which can directly be passed to wrap_socket.
Resolves the argument to a numeric constant, which can be passed to the wrap_socket function/method from the ssl module. Defaults to :data:`ssl.CERT_REQUIRED`. If given a string it is assumed to be the name of the constant in the :mod:`ssl` module or its abbreviation. (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. If it's neither `None` nor a string we assume it is already the numeric constant which can directly be passed to wrap_socket.
def resolve_cert_reqs(candidate): """ Resolves the argument to a numeric constant, which can be passed to the wrap_socket function/method from the ssl module. Defaults to :data:`ssl.CERT_REQUIRED`. If given a string it is assumed to be the name of the constant in the :mod:`ssl` module or its abbreviation. (So you can specify `REQUIRED` instead of `CERT_REQUIRED`. If it's neither `None` nor a string we assume it is already the numeric constant which can directly be passed to wrap_socket. """ if candidate is None: return CERT_REQUIRED if isinstance(candidate, str): res = getattr(ssl, candidate, None) if res is None: res = getattr(ssl, "CERT_" + candidate) return res return candidate
[ "def", "resolve_cert_reqs", "(", "candidate", ")", ":", "if", "candidate", "is", "None", ":", "return", "CERT_REQUIRED", "if", "isinstance", "(", "candidate", ",", "str", ")", ":", "res", "=", "getattr", "(", "ssl", ",", "candidate", ",", "None", ")", "if", "res", "is", "None", ":", "res", "=", "getattr", "(", "ssl", ",", "\"CERT_\"", "+", "candidate", ")", "return", "res", "return", "candidate" ]
[ 210, 0 ]
[ 230, 20 ]
python
en
['en', 'error', 'th']
False
resolve_ssl_version
(candidate)
like resolve_cert_reqs
like resolve_cert_reqs
def resolve_ssl_version(candidate): """ like resolve_cert_reqs """ if candidate is None: return PROTOCOL_TLS if isinstance(candidate, str): res = getattr(ssl, candidate, None) if res is None: res = getattr(ssl, "PROTOCOL_" + candidate) return res return candidate
[ "def", "resolve_ssl_version", "(", "candidate", ")", ":", "if", "candidate", "is", "None", ":", "return", "PROTOCOL_TLS", "if", "isinstance", "(", "candidate", ",", "str", ")", ":", "res", "=", "getattr", "(", "ssl", ",", "candidate", ",", "None", ")", "if", "res", "is", "None", ":", "res", "=", "getattr", "(", "ssl", ",", "\"PROTOCOL_\"", "+", "candidate", ")", "return", "res", "return", "candidate" ]
[ 233, 0 ]
[ 246, 20 ]
python
en
['en', 'error', 'th']
False
create_urllib3_context
( ssl_version=None, cert_reqs=None, options=None, ciphers=None )
All arguments have the same meaning as ``ssl_wrap_socket``. By default, this function does a lot of the same work that ``ssl.create_default_context`` does on Python 3.4+. It: - Disables SSLv2, SSLv3, and compression - Sets a restricted set of server ciphers If you wish to enable SSLv3, you can do:: from pip._vendor.urllib3.util import ssl_ context = ssl_.create_urllib3_context() context.options &= ~ssl_.OP_NO_SSLv3 You can do the same to enable compression (substituting ``COMPRESSION`` for ``SSLv3`` in the last line above). :param ssl_version: The desired protocol version to use. This will default to PROTOCOL_SSLv23 which will negotiate the highest protocol that both the server and your installation of OpenSSL support. :param cert_reqs: Whether to require the certificate verification. This defaults to ``ssl.CERT_REQUIRED``. :param options: Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``. :param ciphers: Which cipher suites to allow the server to select. :returns: Constructed SSLContext object with specified options :rtype: SSLContext
All arguments have the same meaning as ``ssl_wrap_socket``.
def create_urllib3_context( ssl_version=None, cert_reqs=None, options=None, ciphers=None ): """All arguments have the same meaning as ``ssl_wrap_socket``. By default, this function does a lot of the same work that ``ssl.create_default_context`` does on Python 3.4+. It: - Disables SSLv2, SSLv3, and compression - Sets a restricted set of server ciphers If you wish to enable SSLv3, you can do:: from pip._vendor.urllib3.util import ssl_ context = ssl_.create_urllib3_context() context.options &= ~ssl_.OP_NO_SSLv3 You can do the same to enable compression (substituting ``COMPRESSION`` for ``SSLv3`` in the last line above). :param ssl_version: The desired protocol version to use. This will default to PROTOCOL_SSLv23 which will negotiate the highest protocol that both the server and your installation of OpenSSL support. :param cert_reqs: Whether to require the certificate verification. This defaults to ``ssl.CERT_REQUIRED``. :param options: Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``, ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``, and ``ssl.OP_NO_TICKET``. :param ciphers: Which cipher suites to allow the server to select. :returns: Constructed SSLContext object with specified options :rtype: SSLContext """ # PROTOCOL_TLS is deprecated in Python 3.10 if not ssl_version or ssl_version == PROTOCOL_TLS: ssl_version = PROTOCOL_TLS_CLIENT context = SSLContext(ssl_version) context.set_ciphers(ciphers or DEFAULT_CIPHERS) # Setting the default here, as we may have no ssl module on import cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs if options is None: options = 0 # SSLv2 is easily broken and is considered harmful and dangerous options |= OP_NO_SSLv2 # SSLv3 has several problems and is now dangerous options |= OP_NO_SSLv3 # Disable compression to prevent CRIME attacks for OpenSSL 1.0+ # (issue #309) options |= OP_NO_COMPRESSION # TLSv1.2 only. Unless set explicitly, do not request tickets. # This may save some bandwidth on wire, and although the ticket is encrypted, # there is a risk associated with it being on wire, # if the server is not rotating its ticketing keys properly. options |= OP_NO_TICKET context.options |= options # Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is # necessary for conditional client cert authentication with TLS 1.3. # The attribute is None for OpenSSL <= 1.1.0 or does not exist in older # versions of Python. We only enable on Python 3.7.4+ or if certificate # verification is enabled to work around Python issue #37428 # See: https://bugs.python.org/issue37428 if (cert_reqs == ssl.CERT_REQUIRED or sys.version_info >= (3, 7, 4)) and getattr( context, "post_handshake_auth", None ) is not None: context.post_handshake_auth = True def disable_check_hostname(): if ( getattr(context, "check_hostname", None) is not None ): # Platform-specific: Python 3.2 # We do our own verification, including fingerprints and alternative # hostnames. So disable it here context.check_hostname = False # The order of the below lines setting verify_mode and check_hostname # matter due to safe-guards SSLContext has to prevent an SSLContext with # check_hostname=True, verify_mode=NONE/OPTIONAL. This is made even more # complex because we don't know whether PROTOCOL_TLS_CLIENT will be used # or not so we don't know the initial state of the freshly created SSLContext. if cert_reqs == ssl.CERT_REQUIRED: context.verify_mode = cert_reqs disable_check_hostname() else: disable_check_hostname() context.verify_mode = cert_reqs # Enable logging of TLS session keys via defacto standard environment variable # 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values. if hasattr(context, "keylog_filename"): sslkeylogfile = os.environ.get("SSLKEYLOGFILE") if sslkeylogfile: context.keylog_filename = sslkeylogfile return context
[ "def", "create_urllib3_context", "(", "ssl_version", "=", "None", ",", "cert_reqs", "=", "None", ",", "options", "=", "None", ",", "ciphers", "=", "None", ")", ":", "# PROTOCOL_TLS is deprecated in Python 3.10", "if", "not", "ssl_version", "or", "ssl_version", "==", "PROTOCOL_TLS", ":", "ssl_version", "=", "PROTOCOL_TLS_CLIENT", "context", "=", "SSLContext", "(", "ssl_version", ")", "context", ".", "set_ciphers", "(", "ciphers", "or", "DEFAULT_CIPHERS", ")", "# Setting the default here, as we may have no ssl module on import", "cert_reqs", "=", "ssl", ".", "CERT_REQUIRED", "if", "cert_reqs", "is", "None", "else", "cert_reqs", "if", "options", "is", "None", ":", "options", "=", "0", "# SSLv2 is easily broken and is considered harmful and dangerous", "options", "|=", "OP_NO_SSLv2", "# SSLv3 has several problems and is now dangerous", "options", "|=", "OP_NO_SSLv3", "# Disable compression to prevent CRIME attacks for OpenSSL 1.0+", "# (issue #309)", "options", "|=", "OP_NO_COMPRESSION", "# TLSv1.2 only. Unless set explicitly, do not request tickets.", "# This may save some bandwidth on wire, and although the ticket is encrypted,", "# there is a risk associated with it being on wire,", "# if the server is not rotating its ticketing keys properly.", "options", "|=", "OP_NO_TICKET", "context", ".", "options", "|=", "options", "# Enable post-handshake authentication for TLS 1.3, see GH #1634. PHA is", "# necessary for conditional client cert authentication with TLS 1.3.", "# The attribute is None for OpenSSL <= 1.1.0 or does not exist in older", "# versions of Python. We only enable on Python 3.7.4+ or if certificate", "# verification is enabled to work around Python issue #37428", "# See: https://bugs.python.org/issue37428", "if", "(", "cert_reqs", "==", "ssl", ".", "CERT_REQUIRED", "or", "sys", ".", "version_info", ">=", "(", "3", ",", "7", ",", "4", ")", ")", "and", "getattr", "(", "context", ",", "\"post_handshake_auth\"", ",", "None", ")", "is", "not", "None", ":", "context", ".", "post_handshake_auth", "=", "True", "def", "disable_check_hostname", "(", ")", ":", "if", "(", "getattr", "(", "context", ",", "\"check_hostname\"", ",", "None", ")", "is", "not", "None", ")", ":", "# Platform-specific: Python 3.2", "# We do our own verification, including fingerprints and alternative", "# hostnames. So disable it here", "context", ".", "check_hostname", "=", "False", "# The order of the below lines setting verify_mode and check_hostname", "# matter due to safe-guards SSLContext has to prevent an SSLContext with", "# check_hostname=True, verify_mode=NONE/OPTIONAL. This is made even more", "# complex because we don't know whether PROTOCOL_TLS_CLIENT will be used", "# or not so we don't know the initial state of the freshly created SSLContext.", "if", "cert_reqs", "==", "ssl", ".", "CERT_REQUIRED", ":", "context", ".", "verify_mode", "=", "cert_reqs", "disable_check_hostname", "(", ")", "else", ":", "disable_check_hostname", "(", ")", "context", ".", "verify_mode", "=", "cert_reqs", "# Enable logging of TLS session keys via defacto standard environment variable", "# 'SSLKEYLOGFILE', if the feature is available (Python 3.8+). Skip empty values.", "if", "hasattr", "(", "context", ",", "\"keylog_filename\"", ")", ":", "sslkeylogfile", "=", "os", ".", "environ", ".", "get", "(", "\"SSLKEYLOGFILE\"", ")", "if", "sslkeylogfile", ":", "context", ".", "keylog_filename", "=", "sslkeylogfile", "return", "context" ]
[ 249, 0 ]
[ 351, 18 ]
python
en
['en', 'en', 'en']
True
ssl_wrap_socket
( sock, keyfile=None, certfile=None, cert_reqs=None, ca_certs=None, server_hostname=None, ssl_version=None, ciphers=None, ssl_context=None, ca_cert_dir=None, key_password=None, ca_cert_data=None, tls_in_tls=False, )
All arguments except for server_hostname, ssl_context, and ca_cert_dir have the same meaning as they do when using :func:`ssl.wrap_socket`. :param server_hostname: When SNI is supported, the expected hostname of the certificate :param ssl_context: A pre-made :class:`SSLContext` object. If none is provided, one will be created using :func:`create_urllib3_context`. :param ciphers: A string of ciphers we wish the client to support. :param ca_cert_dir: A directory containing CA certificates in multiple separate files, as supported by OpenSSL's -CApath flag or the capath argument to SSLContext.load_verify_locations(). :param key_password: Optional password if the keyfile is encrypted. :param ca_cert_data: Optional string containing CA certificates in PEM format suitable for passing as the cadata parameter to SSLContext.load_verify_locations() :param tls_in_tls: Use SSLTransport to wrap the existing socket.
All arguments except for server_hostname, ssl_context, and ca_cert_dir have the same meaning as they do when using :func:`ssl.wrap_socket`.
def ssl_wrap_socket( sock, keyfile=None, certfile=None, cert_reqs=None, ca_certs=None, server_hostname=None, ssl_version=None, ciphers=None, ssl_context=None, ca_cert_dir=None, key_password=None, ca_cert_data=None, tls_in_tls=False, ): """ All arguments except for server_hostname, ssl_context, and ca_cert_dir have the same meaning as they do when using :func:`ssl.wrap_socket`. :param server_hostname: When SNI is supported, the expected hostname of the certificate :param ssl_context: A pre-made :class:`SSLContext` object. If none is provided, one will be created using :func:`create_urllib3_context`. :param ciphers: A string of ciphers we wish the client to support. :param ca_cert_dir: A directory containing CA certificates in multiple separate files, as supported by OpenSSL's -CApath flag or the capath argument to SSLContext.load_verify_locations(). :param key_password: Optional password if the keyfile is encrypted. :param ca_cert_data: Optional string containing CA certificates in PEM format suitable for passing as the cadata parameter to SSLContext.load_verify_locations() :param tls_in_tls: Use SSLTransport to wrap the existing socket. """ context = ssl_context if context is None: # Note: This branch of code and all the variables in it are no longer # used by urllib3 itself. We should consider deprecating and removing # this code. context = create_urllib3_context(ssl_version, cert_reqs, ciphers=ciphers) if ca_certs or ca_cert_dir or ca_cert_data: try: context.load_verify_locations(ca_certs, ca_cert_dir, ca_cert_data) except (IOError, OSError) as e: raise SSLError(e) elif ssl_context is None and hasattr(context, "load_default_certs"): # try to load OS default certs; works well on Windows (require Python3.4+) context.load_default_certs() # Attempt to detect if we get the goofy behavior of the # keyfile being encrypted and OpenSSL asking for the # passphrase via the terminal and instead error out. if keyfile and key_password is None and _is_key_file_encrypted(keyfile): raise SSLError("Client private key is encrypted, password is required") if certfile: if key_password is None: context.load_cert_chain(certfile, keyfile) else: context.load_cert_chain(certfile, keyfile, key_password) try: if hasattr(context, "set_alpn_protocols"): context.set_alpn_protocols(ALPN_PROTOCOLS) except NotImplementedError: # Defensive: in CI, we always have set_alpn_protocols pass # If we detect server_hostname is an IP address then the SNI # extension should not be used according to RFC3546 Section 3.1 use_sni_hostname = server_hostname and not is_ipaddress(server_hostname) # SecureTransport uses server_hostname in certificate verification. send_sni = (use_sni_hostname and HAS_SNI) or ( IS_SECURETRANSPORT and server_hostname ) # Do not warn the user if server_hostname is an invalid SNI hostname. if not HAS_SNI and use_sni_hostname: warnings.warn( "An HTTPS request has been made, but the SNI (Server Name " "Indication) extension to TLS is not available on this platform. " "This may cause the server to present an incorrect TLS " "certificate, which can cause validation failures. You can upgrade to " "a newer version of Python to solve this. For more information, see " "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html" "#ssl-warnings", SNIMissingWarning, ) if send_sni: ssl_sock = _ssl_wrap_socket_impl( sock, context, tls_in_tls, server_hostname=server_hostname ) else: ssl_sock = _ssl_wrap_socket_impl(sock, context, tls_in_tls) return ssl_sock
[ "def", "ssl_wrap_socket", "(", "sock", ",", "keyfile", "=", "None", ",", "certfile", "=", "None", ",", "cert_reqs", "=", "None", ",", "ca_certs", "=", "None", ",", "server_hostname", "=", "None", ",", "ssl_version", "=", "None", ",", "ciphers", "=", "None", ",", "ssl_context", "=", "None", ",", "ca_cert_dir", "=", "None", ",", "key_password", "=", "None", ",", "ca_cert_data", "=", "None", ",", "tls_in_tls", "=", "False", ",", ")", ":", "context", "=", "ssl_context", "if", "context", "is", "None", ":", "# Note: This branch of code and all the variables in it are no longer", "# used by urllib3 itself. We should consider deprecating and removing", "# this code.", "context", "=", "create_urllib3_context", "(", "ssl_version", ",", "cert_reqs", ",", "ciphers", "=", "ciphers", ")", "if", "ca_certs", "or", "ca_cert_dir", "or", "ca_cert_data", ":", "try", ":", "context", ".", "load_verify_locations", "(", "ca_certs", ",", "ca_cert_dir", ",", "ca_cert_data", ")", "except", "(", "IOError", ",", "OSError", ")", "as", "e", ":", "raise", "SSLError", "(", "e", ")", "elif", "ssl_context", "is", "None", "and", "hasattr", "(", "context", ",", "\"load_default_certs\"", ")", ":", "# try to load OS default certs; works well on Windows (require Python3.4+)", "context", ".", "load_default_certs", "(", ")", "# Attempt to detect if we get the goofy behavior of the", "# keyfile being encrypted and OpenSSL asking for the", "# passphrase via the terminal and instead error out.", "if", "keyfile", "and", "key_password", "is", "None", "and", "_is_key_file_encrypted", "(", "keyfile", ")", ":", "raise", "SSLError", "(", "\"Client private key is encrypted, password is required\"", ")", "if", "certfile", ":", "if", "key_password", "is", "None", ":", "context", ".", "load_cert_chain", "(", "certfile", ",", "keyfile", ")", "else", ":", "context", ".", "load_cert_chain", "(", "certfile", ",", "keyfile", ",", "key_password", ")", "try", ":", "if", "hasattr", "(", "context", ",", "\"set_alpn_protocols\"", ")", ":", "context", ".", "set_alpn_protocols", "(", "ALPN_PROTOCOLS", ")", "except", "NotImplementedError", ":", "# Defensive: in CI, we always have set_alpn_protocols", "pass", "# If we detect server_hostname is an IP address then the SNI", "# extension should not be used according to RFC3546 Section 3.1", "use_sni_hostname", "=", "server_hostname", "and", "not", "is_ipaddress", "(", "server_hostname", ")", "# SecureTransport uses server_hostname in certificate verification.", "send_sni", "=", "(", "use_sni_hostname", "and", "HAS_SNI", ")", "or", "(", "IS_SECURETRANSPORT", "and", "server_hostname", ")", "# Do not warn the user if server_hostname is an invalid SNI hostname.", "if", "not", "HAS_SNI", "and", "use_sni_hostname", ":", "warnings", ".", "warn", "(", "\"An HTTPS request has been made, but the SNI (Server Name \"", "\"Indication) extension to TLS is not available on this platform. \"", "\"This may cause the server to present an incorrect TLS \"", "\"certificate, which can cause validation failures. You can upgrade to \"", "\"a newer version of Python to solve this. For more information, see \"", "\"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html\"", "\"#ssl-warnings\"", ",", "SNIMissingWarning", ",", ")", "if", "send_sni", ":", "ssl_sock", "=", "_ssl_wrap_socket_impl", "(", "sock", ",", "context", ",", "tls_in_tls", ",", "server_hostname", "=", "server_hostname", ")", "else", ":", "ssl_sock", "=", "_ssl_wrap_socket_impl", "(", "sock", ",", "context", ",", "tls_in_tls", ")", "return", "ssl_sock" ]
[ 354, 0 ]
[ 453, 19 ]
python
en
['en', 'error', 'th']
False
is_ipaddress
(hostname)
Detects whether the hostname given is an IPv4 or IPv6 address. Also detects IPv6 addresses with Zone IDs. :param str hostname: Hostname to examine. :return: True if the hostname is an IP address, False otherwise.
Detects whether the hostname given is an IPv4 or IPv6 address. Also detects IPv6 addresses with Zone IDs.
def is_ipaddress(hostname): """Detects whether the hostname given is an IPv4 or IPv6 address. Also detects IPv6 addresses with Zone IDs. :param str hostname: Hostname to examine. :return: True if the hostname is an IP address, False otherwise. """ if not six.PY2 and isinstance(hostname, bytes): # IDN A-label bytes are ASCII compatible. hostname = hostname.decode("ascii") return bool(IPV4_RE.match(hostname) or BRACELESS_IPV6_ADDRZ_RE.match(hostname))
[ "def", "is_ipaddress", "(", "hostname", ")", ":", "if", "not", "six", ".", "PY2", "and", "isinstance", "(", "hostname", ",", "bytes", ")", ":", "# IDN A-label bytes are ASCII compatible.", "hostname", "=", "hostname", ".", "decode", "(", "\"ascii\"", ")", "return", "bool", "(", "IPV4_RE", ".", "match", "(", "hostname", ")", "or", "BRACELESS_IPV6_ADDRZ_RE", ".", "match", "(", "hostname", ")", ")" ]
[ 456, 0 ]
[ 466, 83 ]
python
en
['en', 'en', 'en']
True
_is_key_file_encrypted
(key_file)
Detects if a key file is encrypted or not.
Detects if a key file is encrypted or not.
def _is_key_file_encrypted(key_file): """Detects if a key file is encrypted or not.""" with open(key_file, "r") as f: for line in f: # Look for Proc-Type: 4,ENCRYPTED if "ENCRYPTED" in line: return True return False
[ "def", "_is_key_file_encrypted", "(", "key_file", ")", ":", "with", "open", "(", "key_file", ",", "\"r\"", ")", "as", "f", ":", "for", "line", "in", "f", ":", "# Look for Proc-Type: 4,ENCRYPTED", "if", "\"ENCRYPTED\"", "in", "line", ":", "return", "True", "return", "False" ]
[ 469, 0 ]
[ 477, 16 ]
python
en
['en', 'en', 'en']
True
TestNet.draw_graph
(self, model_name)
グラフ描画 :return: 未定
グラフ描画 :return: 未定
def draw_graph(self, model_name): """ グラフ描画 :return: 未定 """ import inspect import os import matplotlib matplotlib.use('Agg') here = "/".join(inspect.stack()[0][1].split("/")[:-2]) data_dir = os.path.join(here, "data") data_graphs_dir = os.path.join(data_dir, "graphs") pred_app_dir = os.path.join(here, "pred_app") pred_app_graphs_dir = os.path.join(pred_app_dir, "static", "graphs") os.makedirs(data_graphs_dir, exist_ok=True) os.makedirs(pred_app_graphs_dir, exist_ok=True) from matplotlib import pyplot as plt if hasattr(self, 'hist'): plt.subplot(1, 2, 1) plt.plot(self.hist.history['acc'], label='acc') plt.plot(self.hist.history['val_acc'], linestyle='dashed', label='val_acc') plt.title('acc') plt.legend() plt.subplot(1, 2, 2) plt.plot(self.hist.history['loss'], label='loss') plt.plot(self.hist.history['val_loss'], linestyle='dashed', label='val_loss') plt.title('loss') plt.legend() plt.savefig(os.path.join(data_graphs_dir, "{}.png".format(model_name.split('.')[0]))) plt.savefig(os.path.join(pred_app_graphs_dir, "{}.png".format(model_name.split('.')[0])))
[ "def", "draw_graph", "(", "self", ",", "model_name", ")", ":", "import", "inspect", "import", "os", "import", "matplotlib", "matplotlib", ".", "use", "(", "'Agg'", ")", "here", "=", "\"/\"", ".", "join", "(", "inspect", ".", "stack", "(", ")", "[", "0", "]", "[", "1", "]", ".", "split", "(", "\"/\"", ")", "[", ":", "-", "2", "]", ")", "data_dir", "=", "os", ".", "path", ".", "join", "(", "here", ",", "\"data\"", ")", "data_graphs_dir", "=", "os", ".", "path", ".", "join", "(", "data_dir", ",", "\"graphs\"", ")", "pred_app_dir", "=", "os", ".", "path", ".", "join", "(", "here", ",", "\"pred_app\"", ")", "pred_app_graphs_dir", "=", "os", ".", "path", ".", "join", "(", "pred_app_dir", ",", "\"static\"", ",", "\"graphs\"", ")", "os", ".", "makedirs", "(", "data_graphs_dir", ",", "exist_ok", "=", "True", ")", "os", ".", "makedirs", "(", "pred_app_graphs_dir", ",", "exist_ok", "=", "True", ")", "from", "matplotlib", "import", "pyplot", "as", "plt", "if", "hasattr", "(", "self", ",", "'hist'", ")", ":", "plt", ".", "subplot", "(", "1", ",", "2", ",", "1", ")", "plt", ".", "plot", "(", "self", ".", "hist", ".", "history", "[", "'acc'", "]", ",", "label", "=", "'acc'", ")", "plt", ".", "plot", "(", "self", ".", "hist", ".", "history", "[", "'val_acc'", "]", ",", "linestyle", "=", "'dashed'", ",", "label", "=", "'val_acc'", ")", "plt", ".", "title", "(", "'acc'", ")", "plt", ".", "legend", "(", ")", "plt", ".", "subplot", "(", "1", ",", "2", ",", "2", ")", "plt", ".", "plot", "(", "self", ".", "hist", ".", "history", "[", "'loss'", "]", ",", "label", "=", "'loss'", ")", "plt", ".", "plot", "(", "self", ".", "hist", ".", "history", "[", "'val_loss'", "]", ",", "linestyle", "=", "'dashed'", ",", "label", "=", "'val_loss'", ")", "plt", ".", "title", "(", "'loss'", ")", "plt", ".", "legend", "(", ")", "plt", ".", "savefig", "(", "os", ".", "path", ".", "join", "(", "data_graphs_dir", ",", "\"{}.png\"", ".", "format", "(", "model_name", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", ")", ")", "plt", ".", "savefig", "(", "os", ".", "path", ".", "join", "(", "pred_app_graphs_dir", ",", "\"{}.png\"", ".", "format", "(", "model_name", ".", "split", "(", "'.'", ")", "[", "0", "]", ")", ")", ")" ]
[ 68, 4 ]
[ 97, 101 ]
python
en
['en', 'error', 'th']
False
fax_sent
()
Define a handler for when the fax is initially sent.
Define a handler for when the fax is initially sent.
def fax_sent(): """Define a handler for when the fax is initially sent.""" # Let's manually build some TwiML. We can choose to receive the # fax with <Receive>, or reject with <Reject>. twiml = """ <Response> <Receive action="/fax/received"/> </Response> """ return Response(twiml, mimetype='text/xml')
[ "def", "fax_sent", "(", ")", ":", "# Let's manually build some TwiML. We can choose to receive the", "# fax with <Receive>, or reject with <Reject>.", "twiml", "=", "\"\"\"\n <Response>\n <Receive action=\"/fax/received\"/>\n </Response>\n \"\"\"", "return", "Response", "(", "twiml", ",", "mimetype", "=", "'text/xml'", ")" ]
[ 10, 0 ]
[ 20, 47 ]
python
en
['en', 'en', 'en']
True
fax_received
()
Define a handler for when the fax finished sending to us.
Define a handler for when the fax finished sending to us.
def fax_received(): """Define a handler for when the fax finished sending to us.""" # We will have a URL to the contents of the fax at this point # log the URL of the PDF received in the fax print(request.form.get('MediaUrl')) # Respond with empty 200/OK to Twilio return '', 200
[ "def", "fax_received", "(", ")", ":", "# We will have a URL to the contents of the fax at this point", "# log the URL of the PDF received in the fax", "print", "(", "request", ".", "form", ".", "get", "(", "'MediaUrl'", ")", ")", "# Respond with empty 200/OK to Twilio", "return", "''", ",", "200" ]
[ 24, 0 ]
[ 31, 18 ]
python
en
['en', 'en', 'en']
True
get_path_info
(environ)
Return the HTTP request's PATH_INFO as a string.
Return the HTTP request's PATH_INFO as a string.
def get_path_info(environ): """Return the HTTP request's PATH_INFO as a string.""" path_info = get_bytes_from_wsgi(environ, 'PATH_INFO', '/') return repercent_broken_unicode(path_info).decode()
[ "def", "get_path_info", "(", "environ", ")", ":", "path_info", "=", "get_bytes_from_wsgi", "(", "environ", ",", "'PATH_INFO'", ",", "'/'", ")", "return", "repercent_broken_unicode", "(", "path_info", ")", ".", "decode", "(", ")" ]
[ 151, 0 ]
[ 155, 55 ]
python
en
['en', 'en', 'en']
True
get_script_name
(environ)
Return the equivalent of the HTTP request's SCRIPT_NAME environment variable. If Apache mod_rewrite is used, return what would have been the script name prior to any rewriting (so it's the script name as seen from the client's perspective), unless the FORCE_SCRIPT_NAME setting is set (to anything).
Return the equivalent of the HTTP request's SCRIPT_NAME environment variable. If Apache mod_rewrite is used, return what would have been the script name prior to any rewriting (so it's the script name as seen from the client's perspective), unless the FORCE_SCRIPT_NAME setting is set (to anything).
def get_script_name(environ): """ Return the equivalent of the HTTP request's SCRIPT_NAME environment variable. If Apache mod_rewrite is used, return what would have been the script name prior to any rewriting (so it's the script name as seen from the client's perspective), unless the FORCE_SCRIPT_NAME setting is set (to anything). """ if settings.FORCE_SCRIPT_NAME is not None: return settings.FORCE_SCRIPT_NAME # If Apache's mod_rewrite had a whack at the URL, Apache set either # SCRIPT_URL or REDIRECT_URL to the full resource URL before applying any # rewrites. Unfortunately not every Web server (lighttpd!) passes this # information through all the time, so FORCE_SCRIPT_NAME, above, is still # needed. script_url = get_bytes_from_wsgi(environ, 'SCRIPT_URL', '') or get_bytes_from_wsgi(environ, 'REDIRECT_URL', '') if script_url: if b'//' in script_url: # mod_wsgi squashes multiple successive slashes in PATH_INFO, # do the same with script_url before manipulating paths (#17133). script_url = _slashes_re.sub(b'/', script_url) path_info = get_bytes_from_wsgi(environ, 'PATH_INFO', '') script_name = script_url[:-len(path_info)] if path_info else script_url else: script_name = get_bytes_from_wsgi(environ, 'SCRIPT_NAME', '') return script_name.decode()
[ "def", "get_script_name", "(", "environ", ")", ":", "if", "settings", ".", "FORCE_SCRIPT_NAME", "is", "not", "None", ":", "return", "settings", ".", "FORCE_SCRIPT_NAME", "# If Apache's mod_rewrite had a whack at the URL, Apache set either", "# SCRIPT_URL or REDIRECT_URL to the full resource URL before applying any", "# rewrites. Unfortunately not every Web server (lighttpd!) passes this", "# information through all the time, so FORCE_SCRIPT_NAME, above, is still", "# needed.", "script_url", "=", "get_bytes_from_wsgi", "(", "environ", ",", "'SCRIPT_URL'", ",", "''", ")", "or", "get_bytes_from_wsgi", "(", "environ", ",", "'REDIRECT_URL'", ",", "''", ")", "if", "script_url", ":", "if", "b'//'", "in", "script_url", ":", "# mod_wsgi squashes multiple successive slashes in PATH_INFO,", "# do the same with script_url before manipulating paths (#17133).", "script_url", "=", "_slashes_re", ".", "sub", "(", "b'/'", ",", "script_url", ")", "path_info", "=", "get_bytes_from_wsgi", "(", "environ", ",", "'PATH_INFO'", ",", "''", ")", "script_name", "=", "script_url", "[", ":", "-", "len", "(", "path_info", ")", "]", "if", "path_info", "else", "script_url", "else", ":", "script_name", "=", "get_bytes_from_wsgi", "(", "environ", ",", "'SCRIPT_NAME'", ",", "''", ")", "return", "script_name", ".", "decode", "(", ")" ]
[ 158, 0 ]
[ 186, 31 ]
python
en
['en', 'error', 'th']
False
get_bytes_from_wsgi
(environ, key, default)
Get a value from the WSGI environ dictionary as bytes. key and default should be strings.
Get a value from the WSGI environ dictionary as bytes.
def get_bytes_from_wsgi(environ, key, default): """ Get a value from the WSGI environ dictionary as bytes. key and default should be strings. """ value = environ.get(key, default) # Non-ASCII values in the WSGI environ are arbitrarily decoded with # ISO-8859-1. This is wrong for Django websites where UTF-8 is the default. # Re-encode to recover the original bytestring. return value.encode('iso-8859-1')
[ "def", "get_bytes_from_wsgi", "(", "environ", ",", "key", ",", "default", ")", ":", "value", "=", "environ", ".", "get", "(", "key", ",", "default", ")", "# Non-ASCII values in the WSGI environ are arbitrarily decoded with", "# ISO-8859-1. This is wrong for Django websites where UTF-8 is the default.", "# Re-encode to recover the original bytestring.", "return", "value", ".", "encode", "(", "'iso-8859-1'", ")" ]
[ 189, 0 ]
[ 199, 37 ]
python
en
['en', 'error', 'th']
False
get_str_from_wsgi
(environ, key, default)
Get a value from the WSGI environ dictionary as str. key and default should be str objects.
Get a value from the WSGI environ dictionary as str.
def get_str_from_wsgi(environ, key, default): """ Get a value from the WSGI environ dictionary as str. key and default should be str objects. """ value = get_bytes_from_wsgi(environ, key, default) return value.decode(errors='replace')
[ "def", "get_str_from_wsgi", "(", "environ", ",", "key", ",", "default", ")", ":", "value", "=", "get_bytes_from_wsgi", "(", "environ", ",", "key", ",", "default", ")", "return", "value", ".", "decode", "(", "errors", "=", "'replace'", ")" ]
[ 202, 0 ]
[ 209, 41 ]
python
en
['en', 'error', 'th']
False
voice
()
Respond to incoming phone calls with a 'Hello world' message
Respond to incoming phone calls with a 'Hello world' message
def voice(): """Respond to incoming phone calls with a 'Hello world' message""" # Start our TwiML response resp = VoiceResponse() # Read a message aloud to the caller resp.say("hello world!", voice='alice') return str(resp)
[ "def", "voice", "(", ")", ":", "# Start our TwiML response", "resp", "=", "VoiceResponse", "(", ")", "# Read a message aloud to the caller", "resp", ".", "say", "(", "\"hello world!\"", ",", "voice", "=", "'alice'", ")", "return", "str", "(", "resp", ")" ]
[ 7, 0 ]
[ 15, 20 ]
python
en
['en', 'en', 'en']
True
_looks_like_bpo_44860
()
The resolution to bpo-44860 will change this incorrect platlib. See <https://bugs.python.org/issue44860>.
The resolution to bpo-44860 will change this incorrect platlib.
def _looks_like_bpo_44860() -> bool: """The resolution to bpo-44860 will change this incorrect platlib. See <https://bugs.python.org/issue44860>. """ from distutils.command.install import INSTALL_SCHEMES # type: ignore try: unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"] except KeyError: return False return unix_user_platlib == "$usersite"
[ "def", "_looks_like_bpo_44860", "(", ")", "->", "bool", ":", "from", "distutils", ".", "command", ".", "install", "import", "INSTALL_SCHEMES", "# type: ignore", "try", ":", "unix_user_platlib", "=", "INSTALL_SCHEMES", "[", "\"unix_user\"", "]", "[", "\"platlib\"", "]", "except", "KeyError", ":", "return", "False", "return", "unix_user_platlib", "==", "\"$usersite\"" ]
[ 48, 0 ]
[ 59, 43 ]
python
en
['en', 'en', 'en']
True
_looks_like_red_hat_lib
()
Red Hat patches platlib in unix_prefix and unix_home, but not purelib. This is the only way I can see to tell a Red Hat-patched Python.
Red Hat patches platlib in unix_prefix and unix_home, but not purelib.
def _looks_like_red_hat_lib() -> bool: """Red Hat patches platlib in unix_prefix and unix_home, but not purelib. This is the only way I can see to tell a Red Hat-patched Python. """ from distutils.command.install import INSTALL_SCHEMES # type: ignore return all( k in INSTALL_SCHEMES and _looks_like_red_hat_patched_platlib_purelib(INSTALL_SCHEMES[k]) for k in ("unix_prefix", "unix_home") )
[ "def", "_looks_like_red_hat_lib", "(", ")", "->", "bool", ":", "from", "distutils", ".", "command", ".", "install", "import", "INSTALL_SCHEMES", "# type: ignore", "return", "all", "(", "k", "in", "INSTALL_SCHEMES", "and", "_looks_like_red_hat_patched_platlib_purelib", "(", "INSTALL_SCHEMES", "[", "k", "]", ")", "for", "k", "in", "(", "\"unix_prefix\"", ",", "\"unix_home\"", ")", ")" ]
[ 71, 0 ]
[ 82, 5 ]
python
en
['en', 'en', 'en']
True
_looks_like_debian_scheme
()
Debian adds two additional schemes.
Debian adds two additional schemes.
def _looks_like_debian_scheme() -> bool: """Debian adds two additional schemes.""" from distutils.command.install import INSTALL_SCHEMES # type: ignore return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES
[ "def", "_looks_like_debian_scheme", "(", ")", "->", "bool", ":", "from", "distutils", ".", "command", ".", "install", "import", "INSTALL_SCHEMES", "# type: ignore", "return", "\"deb_system\"", "in", "INSTALL_SCHEMES", "and", "\"unix_local\"", "in", "INSTALL_SCHEMES" ]
[ 86, 0 ]
[ 90, 78 ]
python
en
['en', 'en', 'en']
True
_looks_like_red_hat_scheme
()
Red Hat patches ``sys.prefix`` and ``sys.exec_prefix``. Red Hat's ``00251-change-user-install-location.patch`` changes the install command's ``prefix`` and ``exec_prefix`` to append ``"/local"``. This is (fortunately?) done quite unconditionally, so we create a default command object without any configuration to detect this.
Red Hat patches ``sys.prefix`` and ``sys.exec_prefix``.
def _looks_like_red_hat_scheme() -> bool: """Red Hat patches ``sys.prefix`` and ``sys.exec_prefix``. Red Hat's ``00251-change-user-install-location.patch`` changes the install command's ``prefix`` and ``exec_prefix`` to append ``"/local"``. This is (fortunately?) done quite unconditionally, so we create a default command object without any configuration to detect this. """ from distutils.command.install import install from distutils.dist import Distribution cmd: Any = install(Distribution()) cmd.finalize_options() return ( cmd.exec_prefix == f"{os.path.normpath(sys.exec_prefix)}/local" and cmd.prefix == f"{os.path.normpath(sys.prefix)}/local" )
[ "def", "_looks_like_red_hat_scheme", "(", ")", "->", "bool", ":", "from", "distutils", ".", "command", ".", "install", "import", "install", "from", "distutils", ".", "dist", "import", "Distribution", "cmd", ":", "Any", "=", "install", "(", "Distribution", "(", ")", ")", "cmd", ".", "finalize_options", "(", ")", "return", "(", "cmd", ".", "exec_prefix", "==", "f\"{os.path.normpath(sys.exec_prefix)}/local\"", "and", "cmd", ".", "prefix", "==", "f\"{os.path.normpath(sys.prefix)}/local\"", ")" ]
[ 94, 0 ]
[ 110, 5 ]
python
en
['en', 'en', 'en']
True
_looks_like_msys2_mingw_scheme
()
MSYS2 patches distutils and sysconfig to use a UNIX-like scheme. However, MSYS2 incorrectly patches sysconfig ``nt`` scheme. The fix is likely going to be included in their 3.10 release, so we ignore the warning. See msys2/MINGW-packages#9319. MSYS2 MINGW's patch uses lowercase ``"lib"`` instead of the usual uppercase, and is missing the final ``"site-packages"``.
MSYS2 patches distutils and sysconfig to use a UNIX-like scheme.
def _looks_like_msys2_mingw_scheme() -> bool: """MSYS2 patches distutils and sysconfig to use a UNIX-like scheme. However, MSYS2 incorrectly patches sysconfig ``nt`` scheme. The fix is likely going to be included in their 3.10 release, so we ignore the warning. See msys2/MINGW-packages#9319. MSYS2 MINGW's patch uses lowercase ``"lib"`` instead of the usual uppercase, and is missing the final ``"site-packages"``. """ paths = sysconfig.get_paths("nt", expand=False) return all( "Lib" not in p and "lib" in p and not p.endswith("site-packages") for p in (paths[key] for key in ("platlib", "purelib")) )
[ "def", "_looks_like_msys2_mingw_scheme", "(", ")", "->", "bool", ":", "paths", "=", "sysconfig", ".", "get_paths", "(", "\"nt\"", ",", "expand", "=", "False", ")", "return", "all", "(", "\"Lib\"", "not", "in", "p", "and", "\"lib\"", "in", "p", "and", "not", "p", ".", "endswith", "(", "\"site-packages\"", ")", "for", "p", "in", "(", "paths", "[", "key", "]", "for", "key", "in", "(", "\"platlib\"", ",", "\"purelib\"", ")", ")", ")" ]
[ 114, 0 ]
[ 128, 5 ]
python
en
['en', 'en', 'en']
True
_looks_like_deb_system_dist_packages
(value: str)
Check if the value is Debian's APT-controlled dist-packages. Debian's ``distutils.sysconfig.get_python_lib()`` implementation returns the default package path controlled by APT, but does not patch ``sysconfig`` to do the same. This is similar to the bug worked around in ``get_scheme()``, but here the default is ``deb_system`` instead of ``unix_local``. Ultimately we can't do anything about this Debian bug, and this detection allows us to skip the warning when needed.
Check if the value is Debian's APT-controlled dist-packages.
def _looks_like_deb_system_dist_packages(value: str) -> bool: """Check if the value is Debian's APT-controlled dist-packages. Debian's ``distutils.sysconfig.get_python_lib()`` implementation returns the default package path controlled by APT, but does not patch ``sysconfig`` to do the same. This is similar to the bug worked around in ``get_scheme()``, but here the default is ``deb_system`` instead of ``unix_local``. Ultimately we can't do anything about this Debian bug, and this detection allows us to skip the warning when needed. """ if not _looks_like_debian_scheme(): return False if value == "/usr/lib/python3/dist-packages": return True return False
[ "def", "_looks_like_deb_system_dist_packages", "(", "value", ":", "str", ")", "->", "bool", ":", "if", "not", "_looks_like_debian_scheme", "(", ")", ":", "return", "False", "if", "value", "==", "\"/usr/lib/python3/dist-packages\"", ":", "return", "True", "return", "False" ]
[ 346, 0 ]
[ 360, 16 ]
python
en
['en', 'en', 'en']
True
get_purelib
()
Return the default pure-Python lib location.
Return the default pure-Python lib location.
def get_purelib() -> str: """Return the default pure-Python lib location.""" old = _distutils.get_purelib() new = _sysconfig.get_purelib() if _looks_like_deb_system_dist_packages(old): return old if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"): _log_context() return old
[ "def", "get_purelib", "(", ")", "->", "str", ":", "old", "=", "_distutils", ".", "get_purelib", "(", ")", "new", "=", "_sysconfig", ".", "get_purelib", "(", ")", "if", "_looks_like_deb_system_dist_packages", "(", "old", ")", ":", "return", "old", "if", "_warn_if_mismatch", "(", "pathlib", ".", "Path", "(", "old", ")", ",", "pathlib", ".", "Path", "(", "new", ")", ",", "key", "=", "\"purelib\"", ")", ":", "_log_context", "(", ")", "return", "old" ]
[ 363, 0 ]
[ 371, 14 ]
python
en
['en', 'hmn', 'en']
True
get_platlib
()
Return the default platform-shared lib location.
Return the default platform-shared lib location.
def get_platlib() -> str: """Return the default platform-shared lib location.""" old = _distutils.get_platlib() new = _sysconfig.get_platlib() if _looks_like_deb_system_dist_packages(old): return old if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"): _log_context() return old
[ "def", "get_platlib", "(", ")", "->", "str", ":", "old", "=", "_distutils", ".", "get_platlib", "(", ")", "new", "=", "_sysconfig", ".", "get_platlib", "(", ")", "if", "_looks_like_deb_system_dist_packages", "(", "old", ")", ":", "return", "old", "if", "_warn_if_mismatch", "(", "pathlib", ".", "Path", "(", "old", ")", ",", "pathlib", ".", "Path", "(", "new", ")", ",", "key", "=", "\"platlib\"", ")", ":", "_log_context", "(", ")", "return", "old" ]
[ 374, 0 ]
[ 382, 14 ]
python
en
['en', 'sv', 'en']
True
get_prefixed_libs
(prefix: str)
Return the lib locations under ``prefix``.
Return the lib locations under ``prefix``.
def get_prefixed_libs(prefix: str) -> List[str]: """Return the lib locations under ``prefix``.""" old_pure, old_plat = _distutils.get_prefixed_libs(prefix) new_pure, new_plat = _sysconfig.get_prefixed_libs(prefix) warned = [ _warn_if_mismatch( pathlib.Path(old_pure), pathlib.Path(new_pure), key="prefixed-purelib", ), _warn_if_mismatch( pathlib.Path(old_plat), pathlib.Path(new_plat), key="prefixed-platlib", ), ] if any(warned): _log_context(prefix=prefix) if old_pure == old_plat: return [old_pure] return [old_pure, old_plat]
[ "def", "get_prefixed_libs", "(", "prefix", ":", "str", ")", "->", "List", "[", "str", "]", ":", "old_pure", ",", "old_plat", "=", "_distutils", ".", "get_prefixed_libs", "(", "prefix", ")", "new_pure", ",", "new_plat", "=", "_sysconfig", ".", "get_prefixed_libs", "(", "prefix", ")", "warned", "=", "[", "_warn_if_mismatch", "(", "pathlib", ".", "Path", "(", "old_pure", ")", ",", "pathlib", ".", "Path", "(", "new_pure", ")", ",", "key", "=", "\"prefixed-purelib\"", ",", ")", ",", "_warn_if_mismatch", "(", "pathlib", ".", "Path", "(", "old_plat", ")", ",", "pathlib", ".", "Path", "(", "new_plat", ")", ",", "key", "=", "\"prefixed-platlib\"", ",", ")", ",", "]", "if", "any", "(", "warned", ")", ":", "_log_context", "(", "prefix", "=", "prefix", ")", "if", "old_pure", "==", "old_plat", ":", "return", "[", "old_pure", "]", "return", "[", "old_pure", ",", "old_plat", "]" ]
[ 385, 0 ]
[ 407, 31 ]
python
en
['en', 'mt', 'en']
True
_bit_list_to_bytes
(bit_list)
Converts an iterable of 1's and 0's to bytes. Combines the list 8 at a time, treating each group of 8 bits as a single byte.
Converts an iterable of 1's and 0's to bytes.
def _bit_list_to_bytes(bit_list): """Converts an iterable of 1's and 0's to bytes. Combines the list 8 at a time, treating each group of 8 bits as a single byte. """ num_bits = len(bit_list) byte_vals = bytearray() for start in six.moves.xrange(0, num_bits, 8): curr_bits = bit_list[start:start + 8] char_val = sum(val * digit for val, digit in zip(_POW2, curr_bits)) byte_vals.append(char_val) return bytes(byte_vals)
[ "def", "_bit_list_to_bytes", "(", "bit_list", ")", ":", "num_bits", "=", "len", "(", "bit_list", ")", "byte_vals", "=", "bytearray", "(", ")", "for", "start", "in", "six", ".", "moves", ".", "xrange", "(", "0", ",", "num_bits", ",", "8", ")", ":", "curr_bits", "=", "bit_list", "[", "start", ":", "start", "+", "8", "]", "char_val", "=", "sum", "(", "val", "*", "digit", "for", "val", ",", "digit", "in", "zip", "(", "_POW2", ",", "curr_bits", ")", ")", "byte_vals", ".", "append", "(", "char_val", ")", "return", "bytes", "(", "byte_vals", ")" ]
[ 48, 0 ]
[ 61, 27 ]
python
en
['en', 'en', 'en']
True
RsaVerifier.verify
(self, message, signature)
Verifies a message against a signature. Args: message: string or bytes, The message to verify. If string, will be encoded to bytes as utf-8. signature: string or bytes, The signature on the message. If string, will be encoded to bytes as utf-8. Returns: True if message was signed by the private key associated with the public key that this object was constructed with.
Verifies a message against a signature.
def verify(self, message, signature): """Verifies a message against a signature. Args: message: string or bytes, The message to verify. If string, will be encoded to bytes as utf-8. signature: string or bytes, The signature on the message. If string, will be encoded to bytes as utf-8. Returns: True if message was signed by the private key associated with the public key that this object was constructed with. """ message = _helpers._to_bytes(message, encoding='utf-8') try: return rsa.pkcs1.verify(message, signature, self._pubkey) except (ValueError, rsa.pkcs1.VerificationError): return False
[ "def", "verify", "(", "self", ",", "message", ",", "signature", ")", ":", "message", "=", "_helpers", ".", "_to_bytes", "(", "message", ",", "encoding", "=", "'utf-8'", ")", "try", ":", "return", "rsa", ".", "pkcs1", ".", "verify", "(", "message", ",", "signature", ",", "self", ".", "_pubkey", ")", "except", "(", "ValueError", ",", "rsa", ".", "pkcs1", ".", "VerificationError", ")", ":", "return", "False" ]
[ 74, 4 ]
[ 91, 24 ]
python
en
['en', 'fr', 'en']
True
RsaVerifier.from_string
(cls, key_pem, is_x509_cert)
Construct an RsaVerifier instance from a string. Args: key_pem: string, public key in PEM format. is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it is expected to be an RSA key in PEM format. Returns: RsaVerifier instance. Raises: ValueError: if the key_pem can't be parsed. In either case, error will begin with 'No PEM start marker'. If ``is_x509_cert`` is True, will fail to find the "-----BEGIN CERTIFICATE-----" error, otherwise fails to find "-----BEGIN RSA PUBLIC KEY-----".
Construct an RsaVerifier instance from a string.
def from_string(cls, key_pem, is_x509_cert): """Construct an RsaVerifier instance from a string. Args: key_pem: string, public key in PEM format. is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it is expected to be an RSA key in PEM format. Returns: RsaVerifier instance. Raises: ValueError: if the key_pem can't be parsed. In either case, error will begin with 'No PEM start marker'. If ``is_x509_cert`` is True, will fail to find the "-----BEGIN CERTIFICATE-----" error, otherwise fails to find "-----BEGIN RSA PUBLIC KEY-----". """ key_pem = _helpers._to_bytes(key_pem) if is_x509_cert: der = rsa.pem.load_pem(key_pem, 'CERTIFICATE') asn1_cert, remaining = decoder.decode(der, asn1Spec=Certificate()) if remaining != b'': raise ValueError('Unused bytes', remaining) cert_info = asn1_cert['tbsCertificate']['subjectPublicKeyInfo'] key_bytes = _bit_list_to_bytes(cert_info['subjectPublicKey']) pubkey = rsa.PublicKey.load_pkcs1(key_bytes, 'DER') else: pubkey = rsa.PublicKey.load_pkcs1(key_pem, 'PEM') return cls(pubkey)
[ "def", "from_string", "(", "cls", ",", "key_pem", ",", "is_x509_cert", ")", ":", "key_pem", "=", "_helpers", ".", "_to_bytes", "(", "key_pem", ")", "if", "is_x509_cert", ":", "der", "=", "rsa", ".", "pem", ".", "load_pem", "(", "key_pem", ",", "'CERTIFICATE'", ")", "asn1_cert", ",", "remaining", "=", "decoder", ".", "decode", "(", "der", ",", "asn1Spec", "=", "Certificate", "(", ")", ")", "if", "remaining", "!=", "b''", ":", "raise", "ValueError", "(", "'Unused bytes'", ",", "remaining", ")", "cert_info", "=", "asn1_cert", "[", "'tbsCertificate'", "]", "[", "'subjectPublicKeyInfo'", "]", "key_bytes", "=", "_bit_list_to_bytes", "(", "cert_info", "[", "'subjectPublicKey'", "]", ")", "pubkey", "=", "rsa", ".", "PublicKey", ".", "load_pkcs1", "(", "key_bytes", ",", "'DER'", ")", "else", ":", "pubkey", "=", "rsa", ".", "PublicKey", ".", "load_pkcs1", "(", "key_pem", ",", "'PEM'", ")", "return", "cls", "(", "pubkey", ")" ]
[ 94, 4 ]
[ 124, 26 ]
python
en
['en', 'lb', 'en']
True
RsaSigner.sign
(self, message)
Signs a message. Args: message: bytes, Message to be signed. Returns: string, The signature of the message for the given key.
Signs a message.
def sign(self, message): """Signs a message. Args: message: bytes, Message to be signed. Returns: string, The signature of the message for the given key. """ message = _helpers._to_bytes(message, encoding='utf-8') return rsa.pkcs1.sign(message, self._key, 'SHA-256')
[ "def", "sign", "(", "self", ",", "message", ")", ":", "message", "=", "_helpers", ".", "_to_bytes", "(", "message", ",", "encoding", "=", "'utf-8'", ")", "return", "rsa", ".", "pkcs1", ".", "sign", "(", "message", ",", "self", ".", "_key", ",", "'SHA-256'", ")" ]
[ 137, 4 ]
[ 147, 60 ]
python
en
['en', 'en', 'en']
True
RsaSigner.from_string
(cls, key, password='notasecret')
Construct an RsaSigner instance from a string. Args: key: string, private key in PEM format. password: string, password for private key file. Unused for PEM files. Returns: RsaSigner instance. Raises: ValueError if the key cannot be parsed as PKCS#1 or PKCS#8 in PEM format.
Construct an RsaSigner instance from a string.
def from_string(cls, key, password='notasecret'): """Construct an RsaSigner instance from a string. Args: key: string, private key in PEM format. password: string, password for private key file. Unused for PEM files. Returns: RsaSigner instance. Raises: ValueError if the key cannot be parsed as PKCS#1 or PKCS#8 in PEM format. """ key = _helpers._from_bytes(key) # pem expects str in Py3 marker_id, key_bytes = pem.readPemBlocksFromFile( six.StringIO(key), _PKCS1_MARKER, _PKCS8_MARKER) if marker_id == 0: pkey = rsa.key.PrivateKey.load_pkcs1(key_bytes, format='DER') elif marker_id == 1: key_info, remaining = decoder.decode( key_bytes, asn1Spec=_PKCS8_SPEC) if remaining != b'': raise ValueError('Unused bytes', remaining) pkey_info = key_info.getComponentByName('privateKey') pkey = rsa.key.PrivateKey.load_pkcs1(pkey_info.asOctets(), format='DER') else: raise ValueError('No key could be detected.') return cls(pkey)
[ "def", "from_string", "(", "cls", ",", "key", ",", "password", "=", "'notasecret'", ")", ":", "key", "=", "_helpers", ".", "_from_bytes", "(", "key", ")", "# pem expects str in Py3", "marker_id", ",", "key_bytes", "=", "pem", ".", "readPemBlocksFromFile", "(", "six", ".", "StringIO", "(", "key", ")", ",", "_PKCS1_MARKER", ",", "_PKCS8_MARKER", ")", "if", "marker_id", "==", "0", ":", "pkey", "=", "rsa", ".", "key", ".", "PrivateKey", ".", "load_pkcs1", "(", "key_bytes", ",", "format", "=", "'DER'", ")", "elif", "marker_id", "==", "1", ":", "key_info", ",", "remaining", "=", "decoder", ".", "decode", "(", "key_bytes", ",", "asn1Spec", "=", "_PKCS8_SPEC", ")", "if", "remaining", "!=", "b''", ":", "raise", "ValueError", "(", "'Unused bytes'", ",", "remaining", ")", "pkey_info", "=", "key_info", ".", "getComponentByName", "(", "'privateKey'", ")", "pkey", "=", "rsa", ".", "key", ".", "PrivateKey", ".", "load_pkcs1", "(", "pkey_info", ".", "asOctets", "(", ")", ",", "format", "=", "'DER'", ")", "else", ":", "raise", "ValueError", "(", "'No key could be detected.'", ")", "return", "cls", "(", "pkey", ")" ]
[ 150, 4 ]
[ 183, 24 ]
python
en
['en', 'en', 'en']
True
setup_firebase
()
Instancia objeto de acesso do BD Firebase. :return: o objeto do BD Firebase instanciado.
Instancia objeto de acesso do BD Firebase.
def setup_firebase(): """ Instancia objeto de acesso do BD Firebase. :return: o objeto do BD Firebase instanciado. """ config = { "apiKey": environment_vars.FIREBASE_API_KEY, "authDomain": environment_vars.FIREBASE_PROJECT_ID, "databaseURL": environment_vars.FIREBASE_DB_URL, "storageBucket": environment_vars.FIREBASE_PROJECT_ID, "serviceAccount": "./bandex_services_account.json" } try: service_account = environment_vars.FIREBASE_SERVICE_ACCOUNT f = open('./bandex_services_account.json', 'w') f.write(service_account) f.close() except Exception as e: print("Erro ao escrever no arquivo de service account: ", e) else: print("Service account configurado com sucesso.") firebase = pyrebase.initialize_app(config) db = firebase.database() return db
[ "def", "setup_firebase", "(", ")", ":", "config", "=", "{", "\"apiKey\"", ":", "environment_vars", ".", "FIREBASE_API_KEY", ",", "\"authDomain\"", ":", "environment_vars", ".", "FIREBASE_PROJECT_ID", ",", "\"databaseURL\"", ":", "environment_vars", ".", "FIREBASE_DB_URL", ",", "\"storageBucket\"", ":", "environment_vars", ".", "FIREBASE_PROJECT_ID", ",", "\"serviceAccount\"", ":", "\"./bandex_services_account.json\"", "}", "try", ":", "service_account", "=", "environment_vars", ".", "FIREBASE_SERVICE_ACCOUNT", "f", "=", "open", "(", "'./bandex_services_account.json'", ",", "'w'", ")", "f", ".", "write", "(", "service_account", ")", "f", ".", "close", "(", ")", "except", "Exception", "as", "e", ":", "print", "(", "\"Erro ao escrever no arquivo de service account: \"", ",", "e", ")", "else", ":", "print", "(", "\"Service account configurado com sucesso.\"", ")", "firebase", "=", "pyrebase", ".", "initialize_app", "(", "config", ")", "db", "=", "firebase", ".", "database", "(", ")", "return", "db" ]
[ 10, 0 ]
[ 38, 13 ]
python
en
['en', 'error', 'th']
False
sms_ahoy_reply
()
Respond to incoming messages with a friendly SMS.
Respond to incoming messages with a friendly SMS.
def sms_ahoy_reply(): """Respond to incoming messages with a friendly SMS.""" # Start our response resp = MessagingResponse() # Add a message resp.message("Ahoy! Thanks so much for your message.") return str(resp)
[ "def", "sms_ahoy_reply", "(", ")", ":", "# Start our response", "resp", "=", "MessagingResponse", "(", ")", "# Add a message", "resp", ".", "message", "(", "\"Ahoy! Thanks so much for your message.\"", ")", "return", "str", "(", "resp", ")" ]
[ 8, 0 ]
[ 16, 20 ]
python
en
['en', 'en', 'en']
True
file_move_safe
(old_file_name, new_file_name, chunk_size=1024 * 64, allow_overwrite=False)
Move a file from one location to another in the safest way possible. First, try ``os.rename``, which is simple but will break across filesystems. If that fails, stream manually from one file to another in pure Python. If the destination file exists and ``allow_overwrite`` is ``False``, raise ``FileExistsError``.
Move a file from one location to another in the safest way possible.
def file_move_safe(old_file_name, new_file_name, chunk_size=1024 * 64, allow_overwrite=False): """ Move a file from one location to another in the safest way possible. First, try ``os.rename``, which is simple but will break across filesystems. If that fails, stream manually from one file to another in pure Python. If the destination file exists and ``allow_overwrite`` is ``False``, raise ``FileExistsError``. """ # There's no reason to move if we don't have to. if _samefile(old_file_name, new_file_name): return try: if not allow_overwrite and os.access(new_file_name, os.F_OK): raise FileExistsError('Destination file %s exists and allow_overwrite is False.' % new_file_name) os.rename(old_file_name, new_file_name) return except OSError: # OSError happens with os.rename() if moving to another filesystem or # when moving opened files on certain operating systems. pass # first open the old file, so that it won't go away with open(old_file_name, 'rb') as old_file: # now open the new file, not forgetting allow_overwrite fd = os.open(new_file_name, (os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0) | (os.O_EXCL if not allow_overwrite else 0))) try: locks.lock(fd, locks.LOCK_EX) current_chunk = None while current_chunk != b'': current_chunk = old_file.read(chunk_size) os.write(fd, current_chunk) finally: locks.unlock(fd) os.close(fd) try: copystat(old_file_name, new_file_name) except PermissionError as e: # Certain filesystems (e.g. CIFS) fail to copy the file's metadata if # the type of the destination filesystem isn't the same as the source # filesystem; ignore that. if e.errno != errno.EPERM: raise try: os.remove(old_file_name) except PermissionError as e: # Certain operating systems (Cygwin and Windows) # fail when deleting opened files, ignore it. (For the # systems where this happens, temporary files will be auto-deleted # on close anyway.) if getattr(e, 'winerror', 0) != 32: raise
[ "def", "file_move_safe", "(", "old_file_name", ",", "new_file_name", ",", "chunk_size", "=", "1024", "*", "64", ",", "allow_overwrite", "=", "False", ")", ":", "# There's no reason to move if we don't have to.", "if", "_samefile", "(", "old_file_name", ",", "new_file_name", ")", ":", "return", "try", ":", "if", "not", "allow_overwrite", "and", "os", ".", "access", "(", "new_file_name", ",", "os", ".", "F_OK", ")", ":", "raise", "FileExistsError", "(", "'Destination file %s exists and allow_overwrite is False.'", "%", "new_file_name", ")", "os", ".", "rename", "(", "old_file_name", ",", "new_file_name", ")", "return", "except", "OSError", ":", "# OSError happens with os.rename() if moving to another filesystem or", "# when moving opened files on certain operating systems.", "pass", "# first open the old file, so that it won't go away", "with", "open", "(", "old_file_name", ",", "'rb'", ")", "as", "old_file", ":", "# now open the new file, not forgetting allow_overwrite", "fd", "=", "os", ".", "open", "(", "new_file_name", ",", "(", "os", ".", "O_WRONLY", "|", "os", ".", "O_CREAT", "|", "getattr", "(", "os", ",", "'O_BINARY'", ",", "0", ")", "|", "(", "os", ".", "O_EXCL", "if", "not", "allow_overwrite", "else", "0", ")", ")", ")", "try", ":", "locks", ".", "lock", "(", "fd", ",", "locks", ".", "LOCK_EX", ")", "current_chunk", "=", "None", "while", "current_chunk", "!=", "b''", ":", "current_chunk", "=", "old_file", ".", "read", "(", "chunk_size", ")", "os", ".", "write", "(", "fd", ",", "current_chunk", ")", "finally", ":", "locks", ".", "unlock", "(", "fd", ")", "os", ".", "close", "(", "fd", ")", "try", ":", "copystat", "(", "old_file_name", ",", "new_file_name", ")", "except", "PermissionError", "as", "e", ":", "# Certain filesystems (e.g. CIFS) fail to copy the file's metadata if", "# the type of the destination filesystem isn't the same as the source", "# filesystem; ignore that.", "if", "e", ".", "errno", "!=", "errno", ".", "EPERM", ":", "raise", "try", ":", "os", ".", "remove", "(", "old_file_name", ")", "except", "PermissionError", "as", "e", ":", "# Certain operating systems (Cygwin and Windows)", "# fail when deleting opened files, ignore it. (For the", "# systems where this happens, temporary files will be auto-deleted", "# on close anyway.)", "if", "getattr", "(", "e", ",", "'winerror'", ",", "0", ")", "!=", "32", ":", "raise" ]
[ 29, 0 ]
[ 86, 17 ]
python
en
['en', 'error', 'th']
False
geos_version
()
Return the string version of the GEOS library.
Return the string version of the GEOS library.
def geos_version(): """Return the string version of the GEOS library.""" return lgeos.GEOSversion()
[ "def", "geos_version", "(", ")", ":", "return", "lgeos", ".", "GEOSversion", "(", ")" ]
[ 164, 0 ]
[ 166, 30 ]
python
en
['en', 'en', 'en']
True
geos_version_tuple
()
Return the GEOS version as a tuple (major, minor, subminor).
Return the GEOS version as a tuple (major, minor, subminor).
def geos_version_tuple(): """Return the GEOS version as a tuple (major, minor, subminor).""" return get_version_tuple(geos_version().decode())
[ "def", "geos_version_tuple", "(", ")", ":", "return", "get_version_tuple", "(", "geos_version", "(", ")", ".", "decode", "(", ")", ")" ]
[ 169, 0 ]
[ 171, 53 ]
python
en
['en', 'lt', 'en']
True
run
(argv=None)
Build and run the pipeline.
Build and run the pipeline.
def run(argv=None): """Build and run the pipeline.""" parser = argparse.ArgumentParser() parser.add_argument( '--project', help=('Google Cloud Project ID'), required=True) parser.add_argument( '--input_topic', help=('Google Cloud PubSub topic name '), required=True) known_args, pipeline_args = parser.parse_known_args(argv) pipeline_options = PipelineOptions( pipeline_args.append('--project={}'.format(known_args.project))) pipeline_options.view_as(SetupOptions).save_main_session = True pipeline_options.view_as(StandardOptions).streaming = True p = beam.Pipeline(options=pipeline_options) TOPIC = 'projects/{}/topics/{}'.format(known_args.project,known_args.input_topic) table_spec = '{}:taxifare.traffic_realtime'.format(known_args.project) # table needs to exist def to_bq_format(count): """BigQuery writer requires rows to be stored as python dictionary""" return {'trips_last_5min':count,'time':datetime.now().strftime("%Y-%m-%d %H:%M:%S")} pipeline = (p | 'read_from_pubusub' >> beam.io.ReadFromPubSub(topic=TOPIC).with_output_types(bytes) | 'window' >> beam.WindowInto(window.SlidingWindows(size=300,period=15)) | 'count' >> beam.CombineGlobally(CountFn()).without_defaults() | 'format_for_bq' >> beam.Map(to_bq_format) | 'write_to_bq' >> beam.io.WriteToBigQuery( table_spec, write_disposition=beam.io.BigQueryDisposition.WRITE_APPEND, #WRITE_TRUNCATE not supported for streaming create_disposition=beam.io.BigQueryDisposition.CREATE_NEVER)) result = p.run()
[ "def", "run", "(", "argv", "=", "None", ")", ":", "parser", "=", "argparse", ".", "ArgumentParser", "(", ")", "parser", ".", "add_argument", "(", "'--project'", ",", "help", "=", "(", "'Google Cloud Project ID'", ")", ",", "required", "=", "True", ")", "parser", ".", "add_argument", "(", "'--input_topic'", ",", "help", "=", "(", "'Google Cloud PubSub topic name '", ")", ",", "required", "=", "True", ")", "known_args", ",", "pipeline_args", "=", "parser", ".", "parse_known_args", "(", "argv", ")", "pipeline_options", "=", "PipelineOptions", "(", "pipeline_args", ".", "append", "(", "'--project={}'", ".", "format", "(", "known_args", ".", "project", ")", ")", ")", "pipeline_options", ".", "view_as", "(", "SetupOptions", ")", ".", "save_main_session", "=", "True", "pipeline_options", ".", "view_as", "(", "StandardOptions", ")", ".", "streaming", "=", "True", "p", "=", "beam", ".", "Pipeline", "(", "options", "=", "pipeline_options", ")", "TOPIC", "=", "'projects/{}/topics/{}'", ".", "format", "(", "known_args", ".", "project", ",", "known_args", ".", "input_topic", ")", "table_spec", "=", "'{}:taxifare.traffic_realtime'", ".", "format", "(", "known_args", ".", "project", ")", "# table needs to exist", "def", "to_bq_format", "(", "count", ")", ":", "\"\"\"BigQuery writer requires rows to be stored as python dictionary\"\"\"", "return", "{", "'trips_last_5min'", ":", "count", ",", "'time'", ":", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "\"%Y-%m-%d %H:%M:%S\"", ")", "}", "pipeline", "=", "(", "p", "|", "'read_from_pubusub'", ">>", "beam", ".", "io", ".", "ReadFromPubSub", "(", "topic", "=", "TOPIC", ")", ".", "with_output_types", "(", "bytes", ")", "|", "'window'", ">>", "beam", ".", "WindowInto", "(", "window", ".", "SlidingWindows", "(", "size", "=", "300", ",", "period", "=", "15", ")", ")", "|", "'count'", ">>", "beam", ".", "CombineGlobally", "(", "CountFn", "(", ")", ")", ".", "without_defaults", "(", ")", "|", "'format_for_bq'", ">>", "beam", ".", "Map", "(", "to_bq_format", ")", "|", "'write_to_bq'", ">>", "beam", ".", "io", ".", "WriteToBigQuery", "(", "table_spec", ",", "write_disposition", "=", "beam", ".", "io", ".", "BigQueryDisposition", ".", "WRITE_APPEND", ",", "#WRITE_TRUNCATE not supported for streaming", "create_disposition", "=", "beam", ".", "io", ".", "BigQueryDisposition", ".", "CREATE_NEVER", ")", ")", "result", "=", "p", ".", "run", "(", ")" ]
[ 32, 0 ]
[ 71, 18 ]
python
en
['en', 'en', 'en']
True
get_app_template_dirs
(dirname)
Return an iterable of paths of directories to load app templates from. dirname is the name of the subdirectory containing templates inside installed applications.
Return an iterable of paths of directories to load app templates from.
def get_app_template_dirs(dirname): """ Return an iterable of paths of directories to load app templates from. dirname is the name of the subdirectory containing templates inside installed applications. """ template_dirs = [ Path(app_config.path) / dirname for app_config in apps.get_app_configs() if app_config.path and (Path(app_config.path) / dirname).is_dir() ] # Immutable return value because it will be cached and shared by callers. return tuple(template_dirs)
[ "def", "get_app_template_dirs", "(", "dirname", ")", ":", "template_dirs", "=", "[", "Path", "(", "app_config", ".", "path", ")", "/", "dirname", "for", "app_config", "in", "apps", ".", "get_app_configs", "(", ")", "if", "app_config", ".", "path", "and", "(", "Path", "(", "app_config", ".", "path", ")", "/", "dirname", ")", ".", "is_dir", "(", ")", "]", "# Immutable return value because it will be cached and shared by callers.", "return", "tuple", "(", "template_dirs", ")" ]
[ 93, 0 ]
[ 106, 31 ]
python
en
['en', 'error', 'th']
False
EngineHandler.__init__
(self, templates=None)
templates is an optional list of template engine definitions (structured like settings.TEMPLATES).
templates is an optional list of template engine definitions (structured like settings.TEMPLATES).
def __init__(self, templates=None): """ templates is an optional list of template engine definitions (structured like settings.TEMPLATES). """ self._templates = templates self._engines = {}
[ "def", "__init__", "(", "self", ",", "templates", "=", "None", ")", ":", "self", ".", "_templates", "=", "templates", "self", ".", "_engines", "=", "{", "}" ]
[ 16, 4 ]
[ 22, 26 ]
python
en
['en', 'error', 'th']
False
conditional_content_removal
(request, response)
Simulate the behavior of most Web servers by removing the content of responses for HEAD requests, 1xx, 204, and 304 responses. Ensure compliance with RFC 7230, section 3.3.3.
Simulate the behavior of most Web servers by removing the content of responses for HEAD requests, 1xx, 204, and 304 responses. Ensure compliance with RFC 7230, section 3.3.3.
def conditional_content_removal(request, response): """ Simulate the behavior of most Web servers by removing the content of responses for HEAD requests, 1xx, 204, and 304 responses. Ensure compliance with RFC 7230, section 3.3.3. """ if 100 <= response.status_code < 200 or response.status_code in (204, 304): if response.streaming: response.streaming_content = [] else: response.content = b'' if request.method == 'HEAD': if response.streaming: response.streaming_content = [] else: response.content = b'' return response
[ "def", "conditional_content_removal", "(", "request", ",", "response", ")", ":", "if", "100", "<=", "response", ".", "status_code", "<", "200", "or", "response", ".", "status_code", "in", "(", "204", ",", "304", ")", ":", "if", "response", ".", "streaming", ":", "response", ".", "streaming_content", "=", "[", "]", "else", ":", "response", ".", "content", "=", "b''", "if", "request", ".", "method", "==", "'HEAD'", ":", "if", "response", ".", "streaming", ":", "response", ".", "streaming_content", "=", "[", "]", "else", ":", "response", ".", "content", "=", "b''", "return", "response" ]
[ 98, 0 ]
[ 114, 19 ]
python
en
['en', 'error', 'th']
False
store_rendered_templates
(store, signal, sender, template, context, **kwargs)
Store templates and contexts that are rendered. The context is copied so that it is an accurate representation at the time of rendering.
Store templates and contexts that are rendered.
def store_rendered_templates(store, signal, sender, template, context, **kwargs): """ Store templates and contexts that are rendered. The context is copied so that it is an accurate representation at the time of rendering. """ store.setdefault('templates', []).append(template) if 'context' not in store: store['context'] = ContextList() store['context'].append(copy(context))
[ "def", "store_rendered_templates", "(", "store", ",", "signal", ",", "sender", ",", "template", ",", "context", ",", "*", "*", "kwargs", ")", ":", "store", ".", "setdefault", "(", "'templates'", ",", "[", "]", ")", ".", "append", "(", "template", ")", "if", "'context'", "not", "in", "store", ":", "store", "[", "'context'", "]", "=", "ContextList", "(", ")", "store", "[", "'context'", "]", ".", "append", "(", "copy", "(", "context", ")", ")" ]
[ 211, 0 ]
[ 221, 42 ]
python
en
['en', 'error', 'th']
False
encode_multipart
(boundary, data)
Encode multipart POST data from a dictionary of form values. The key will be used as the form data name; the value will be transmitted as content. If the value is a file, the contents of the file will be sent as an application/octet-stream; otherwise, str(value) will be sent.
Encode multipart POST data from a dictionary of form values.
def encode_multipart(boundary, data): """ Encode multipart POST data from a dictionary of form values. The key will be used as the form data name; the value will be transmitted as content. If the value is a file, the contents of the file will be sent as an application/octet-stream; otherwise, str(value) will be sent. """ lines = [] def to_bytes(s): return force_bytes(s, settings.DEFAULT_CHARSET) # Not by any means perfect, but good enough for our purposes. def is_file(thing): return hasattr(thing, "read") and callable(thing.read) # Each bit of the multipart form data could be either a form value or a # file, or a *list* of form values and/or files. Remember that HTTP field # names can be duplicated! for (key, value) in data.items(): if value is None: raise TypeError( "Cannot encode None for key '%s' as POST data. Did you mean " "to pass an empty string or omit the value?" % key ) elif is_file(value): lines.extend(encode_file(boundary, key, value)) elif not isinstance(value, str) and is_iterable(value): for item in value: if is_file(item): lines.extend(encode_file(boundary, key, item)) else: lines.extend(to_bytes(val) for val in [ '--%s' % boundary, 'Content-Disposition: form-data; name="%s"' % key, '', item ]) else: lines.extend(to_bytes(val) for val in [ '--%s' % boundary, 'Content-Disposition: form-data; name="%s"' % key, '', value ]) lines.extend([ to_bytes('--%s--' % boundary), b'', ]) return b'\r\n'.join(lines)
[ "def", "encode_multipart", "(", "boundary", ",", "data", ")", ":", "lines", "=", "[", "]", "def", "to_bytes", "(", "s", ")", ":", "return", "force_bytes", "(", "s", ",", "settings", ".", "DEFAULT_CHARSET", ")", "# Not by any means perfect, but good enough for our purposes.", "def", "is_file", "(", "thing", ")", ":", "return", "hasattr", "(", "thing", ",", "\"read\"", ")", "and", "callable", "(", "thing", ".", "read", ")", "# Each bit of the multipart form data could be either a form value or a", "# file, or a *list* of form values and/or files. Remember that HTTP field", "# names can be duplicated!", "for", "(", "key", ",", "value", ")", "in", "data", ".", "items", "(", ")", ":", "if", "value", "is", "None", ":", "raise", "TypeError", "(", "\"Cannot encode None for key '%s' as POST data. Did you mean \"", "\"to pass an empty string or omit the value?\"", "%", "key", ")", "elif", "is_file", "(", "value", ")", ":", "lines", ".", "extend", "(", "encode_file", "(", "boundary", ",", "key", ",", "value", ")", ")", "elif", "not", "isinstance", "(", "value", ",", "str", ")", "and", "is_iterable", "(", "value", ")", ":", "for", "item", "in", "value", ":", "if", "is_file", "(", "item", ")", ":", "lines", ".", "extend", "(", "encode_file", "(", "boundary", ",", "key", ",", "item", ")", ")", "else", ":", "lines", ".", "extend", "(", "to_bytes", "(", "val", ")", "for", "val", "in", "[", "'--%s'", "%", "boundary", ",", "'Content-Disposition: form-data; name=\"%s\"'", "%", "key", ",", "''", ",", "item", "]", ")", "else", ":", "lines", ".", "extend", "(", "to_bytes", "(", "val", ")", "for", "val", "in", "[", "'--%s'", "%", "boundary", ",", "'Content-Disposition: form-data; name=\"%s\"'", "%", "key", ",", "''", ",", "value", "]", ")", "lines", ".", "extend", "(", "[", "to_bytes", "(", "'--%s--'", "%", "boundary", ")", ",", "b''", ",", "]", ")", "return", "b'\\r\\n'", ".", "join", "(", "lines", ")" ]
[ 224, 0 ]
[ 275, 30 ]
python
en
['en', 'error', 'th']
False
RequestFactory._base_environ
(self, **request)
The base environment for a request.
The base environment for a request.
def _base_environ(self, **request): """ The base environment for a request. """ # This is a minimal valid WSGI environ dictionary, plus: # - HTTP_COOKIE: for cookie support, # - REMOTE_ADDR: often useful, see #8551. # See https://www.python.org/dev/peps/pep-3333/#environ-variables return { 'HTTP_COOKIE': '; '.join(sorted( '%s=%s' % (morsel.key, morsel.coded_value) for morsel in self.cookies.values() )), 'PATH_INFO': '/', 'REMOTE_ADDR': '127.0.0.1', 'REQUEST_METHOD': 'GET', 'SCRIPT_NAME': '', 'SERVER_NAME': 'testserver', 'SERVER_PORT': '80', 'SERVER_PROTOCOL': 'HTTP/1.1', 'wsgi.version': (1, 0), 'wsgi.url_scheme': 'http', 'wsgi.input': FakePayload(b''), 'wsgi.errors': self.errors, 'wsgi.multiprocess': True, 'wsgi.multithread': False, 'wsgi.run_once': False, **self.defaults, **request, }
[ "def", "_base_environ", "(", "self", ",", "*", "*", "request", ")", ":", "# This is a minimal valid WSGI environ dictionary, plus:", "# - HTTP_COOKIE: for cookie support,", "# - REMOTE_ADDR: often useful, see #8551.", "# See https://www.python.org/dev/peps/pep-3333/#environ-variables", "return", "{", "'HTTP_COOKIE'", ":", "'; '", ".", "join", "(", "sorted", "(", "'%s=%s'", "%", "(", "morsel", ".", "key", ",", "morsel", ".", "coded_value", ")", "for", "morsel", "in", "self", ".", "cookies", ".", "values", "(", ")", ")", ")", ",", "'PATH_INFO'", ":", "'/'", ",", "'REMOTE_ADDR'", ":", "'127.0.0.1'", ",", "'REQUEST_METHOD'", ":", "'GET'", ",", "'SCRIPT_NAME'", ":", "''", ",", "'SERVER_NAME'", ":", "'testserver'", ",", "'SERVER_PORT'", ":", "'80'", ",", "'SERVER_PROTOCOL'", ":", "'HTTP/1.1'", ",", "'wsgi.version'", ":", "(", "1", ",", "0", ")", ",", "'wsgi.url_scheme'", ":", "'http'", ",", "'wsgi.input'", ":", "FakePayload", "(", "b''", ")", ",", "'wsgi.errors'", ":", "self", ".", "errors", ",", "'wsgi.multiprocess'", ":", "True", ",", "'wsgi.multithread'", ":", "False", ",", "'wsgi.run_once'", ":", "False", ",", "*", "*", "self", ".", "defaults", ",", "*", "*", "request", ",", "}" ]
[ 326, 4 ]
[ 355, 9 ]
python
en
['en', 'error', 'th']
False
RequestFactory.request
(self, **request)
Construct a generic request object.
Construct a generic request object.
def request(self, **request): "Construct a generic request object." return WSGIRequest(self._base_environ(**request))
[ "def", "request", "(", "self", ",", "*", "*", "request", ")", ":", "return", "WSGIRequest", "(", "self", ".", "_base_environ", "(", "*", "*", "request", ")", ")" ]
[ 357, 4 ]
[ 359, 57 ]
python
en
['en', 'en', 'en']
True
RequestFactory._encode_json
(self, data, content_type)
Return encoded JSON if data is a dict, list, or tuple and content_type is application/json.
Return encoded JSON if data is a dict, list, or tuple and content_type is application/json.
def _encode_json(self, data, content_type): """ Return encoded JSON if data is a dict, list, or tuple and content_type is application/json. """ should_encode = JSON_CONTENT_TYPE_RE.match(content_type) and isinstance(data, (dict, list, tuple)) return json.dumps(data, cls=self.json_encoder) if should_encode else data
[ "def", "_encode_json", "(", "self", ",", "data", ",", "content_type", ")", ":", "should_encode", "=", "JSON_CONTENT_TYPE_RE", ".", "match", "(", "content_type", ")", "and", "isinstance", "(", "data", ",", "(", "dict", ",", "list", ",", "tuple", ")", ")", "return", "json", ".", "dumps", "(", "data", ",", "cls", "=", "self", ".", "json_encoder", ")", "if", "should_encode", "else", "data" ]
[ 373, 4 ]
[ 379, 81 ]
python
en
['en', 'error', 'th']
False
RequestFactory.get
(self, path, data=None, secure=False, **extra)
Construct a GET request.
Construct a GET request.
def get(self, path, data=None, secure=False, **extra): """Construct a GET request.""" data = {} if data is None else data return self.generic('GET', path, secure=secure, **{ 'QUERY_STRING': urlencode(data, doseq=True), **extra, })
[ "def", "get", "(", "self", ",", "path", ",", "data", "=", "None", ",", "secure", "=", "False", ",", "*", "*", "extra", ")", ":", "data", "=", "{", "}", "if", "data", "is", "None", "else", "data", "return", "self", ".", "generic", "(", "'GET'", ",", "path", ",", "secure", "=", "secure", ",", "*", "*", "{", "'QUERY_STRING'", ":", "urlencode", "(", "data", ",", "doseq", "=", "True", ")", ",", "*", "*", "extra", ",", "}", ")" ]
[ 392, 4 ]
[ 398, 10 ]
python
en
['en', 'en', 'en']
True
RequestFactory.post
(self, path, data=None, content_type=MULTIPART_CONTENT, secure=False, **extra)
Construct a POST request.
Construct a POST request.
def post(self, path, data=None, content_type=MULTIPART_CONTENT, secure=False, **extra): """Construct a POST request.""" data = self._encode_json({} if data is None else data, content_type) post_data = self._encode_data(data, content_type) return self.generic('POST', path, post_data, content_type, secure=secure, **extra)
[ "def", "post", "(", "self", ",", "path", ",", "data", "=", "None", ",", "content_type", "=", "MULTIPART_CONTENT", ",", "secure", "=", "False", ",", "*", "*", "extra", ")", ":", "data", "=", "self", ".", "_encode_json", "(", "{", "}", "if", "data", "is", "None", "else", "data", ",", "content_type", ")", "post_data", "=", "self", ".", "_encode_data", "(", "data", ",", "content_type", ")", "return", "self", ".", "generic", "(", "'POST'", ",", "path", ",", "post_data", ",", "content_type", ",", "secure", "=", "secure", ",", "*", "*", "extra", ")" ]
[ 400, 4 ]
[ 407, 51 ]
python
en
['en', 'en', 'en']
True
RequestFactory.head
(self, path, data=None, secure=False, **extra)
Construct a HEAD request.
Construct a HEAD request.
def head(self, path, data=None, secure=False, **extra): """Construct a HEAD request.""" data = {} if data is None else data return self.generic('HEAD', path, secure=secure, **{ 'QUERY_STRING': urlencode(data, doseq=True), **extra, })
[ "def", "head", "(", "self", ",", "path", ",", "data", "=", "None", ",", "secure", "=", "False", ",", "*", "*", "extra", ")", ":", "data", "=", "{", "}", "if", "data", "is", "None", "else", "data", "return", "self", ".", "generic", "(", "'HEAD'", ",", "path", ",", "secure", "=", "secure", ",", "*", "*", "{", "'QUERY_STRING'", ":", "urlencode", "(", "data", ",", "doseq", "=", "True", ")", ",", "*", "*", "extra", ",", "}", ")" ]
[ 409, 4 ]
[ 415, 10 ]
python
en
['en', 'en', 'en']
True
RequestFactory.trace
(self, path, secure=False, **extra)
Construct a TRACE request.
Construct a TRACE request.
def trace(self, path, secure=False, **extra): """Construct a TRACE request.""" return self.generic('TRACE', path, secure=secure, **extra)
[ "def", "trace", "(", "self", ",", "path", ",", "secure", "=", "False", ",", "*", "*", "extra", ")", ":", "return", "self", ".", "generic", "(", "'TRACE'", ",", "path", ",", "secure", "=", "secure", ",", "*", "*", "extra", ")" ]
[ 417, 4 ]
[ 419, 66 ]
python
en
['en', 'en', 'en']
True
RequestFactory.options
(self, path, data='', content_type='application/octet-stream', secure=False, **extra)
Construct an OPTIONS request.
Construct an OPTIONS request.
def options(self, path, data='', content_type='application/octet-stream', secure=False, **extra): "Construct an OPTIONS request." return self.generic('OPTIONS', path, data, content_type, secure=secure, **extra)
[ "def", "options", "(", "self", ",", "path", ",", "data", "=", "''", ",", "content_type", "=", "'application/octet-stream'", ",", "secure", "=", "False", ",", "*", "*", "extra", ")", ":", "return", "self", ".", "generic", "(", "'OPTIONS'", ",", "path", ",", "data", ",", "content_type", ",", "secure", "=", "secure", ",", "*", "*", "extra", ")" ]
[ 421, 4 ]
[ 425, 51 ]
python
en
['en', 'en', 'en']
True
RequestFactory.put
(self, path, data='', content_type='application/octet-stream', secure=False, **extra)
Construct a PUT request.
Construct a PUT request.
def put(self, path, data='', content_type='application/octet-stream', secure=False, **extra): """Construct a PUT request.""" data = self._encode_json(data, content_type) return self.generic('PUT', path, data, content_type, secure=secure, **extra)
[ "def", "put", "(", "self", ",", "path", ",", "data", "=", "''", ",", "content_type", "=", "'application/octet-stream'", ",", "secure", "=", "False", ",", "*", "*", "extra", ")", ":", "data", "=", "self", ".", "_encode_json", "(", "data", ",", "content_type", ")", "return", "self", ".", "generic", "(", "'PUT'", ",", "path", ",", "data", ",", "content_type", ",", "secure", "=", "secure", ",", "*", "*", "extra", ")" ]
[ 427, 4 ]
[ 432, 51 ]
python
en
['en', 'en', 'en']
True
RequestFactory.patch
(self, path, data='', content_type='application/octet-stream', secure=False, **extra)
Construct a PATCH request.
Construct a PATCH request.
def patch(self, path, data='', content_type='application/octet-stream', secure=False, **extra): """Construct a PATCH request.""" data = self._encode_json(data, content_type) return self.generic('PATCH', path, data, content_type, secure=secure, **extra)
[ "def", "patch", "(", "self", ",", "path", ",", "data", "=", "''", ",", "content_type", "=", "'application/octet-stream'", ",", "secure", "=", "False", ",", "*", "*", "extra", ")", ":", "data", "=", "self", ".", "_encode_json", "(", "data", ",", "content_type", ")", "return", "self", ".", "generic", "(", "'PATCH'", ",", "path", ",", "data", ",", "content_type", ",", "secure", "=", "secure", ",", "*", "*", "extra", ")" ]
[ 434, 4 ]
[ 439, 51 ]
python
en
['en', 'en', 'en']
True
RequestFactory.delete
(self, path, data='', content_type='application/octet-stream', secure=False, **extra)
Construct a DELETE request.
Construct a DELETE request.
def delete(self, path, data='', content_type='application/octet-stream', secure=False, **extra): """Construct a DELETE request.""" data = self._encode_json(data, content_type) return self.generic('DELETE', path, data, content_type, secure=secure, **extra)
[ "def", "delete", "(", "self", ",", "path", ",", "data", "=", "''", ",", "content_type", "=", "'application/octet-stream'", ",", "secure", "=", "False", ",", "*", "*", "extra", ")", ":", "data", "=", "self", ".", "_encode_json", "(", "data", ",", "content_type", ")", "return", "self", ".", "generic", "(", "'DELETE'", ",", "path", ",", "data", ",", "content_type", ",", "secure", "=", "secure", ",", "*", "*", "extra", ")" ]
[ 441, 4 ]
[ 446, 51 ]
python
en
['en', 'it', 'en']
True
RequestFactory.generic
(self, method, path, data='', content_type='application/octet-stream', secure=False, **extra)
Construct an arbitrary HTTP request.
Construct an arbitrary HTTP request.
def generic(self, method, path, data='', content_type='application/octet-stream', secure=False, **extra): """Construct an arbitrary HTTP request.""" parsed = urlparse(str(path)) # path can be lazy data = force_bytes(data, settings.DEFAULT_CHARSET) r = { 'PATH_INFO': self._get_path(parsed), 'REQUEST_METHOD': method, 'SERVER_PORT': '443' if secure else '80', 'wsgi.url_scheme': 'https' if secure else 'http', } if data: r.update({ 'CONTENT_LENGTH': str(len(data)), 'CONTENT_TYPE': content_type, 'wsgi.input': FakePayload(data), }) r.update(extra) # If QUERY_STRING is absent or empty, we want to extract it from the URL. if not r.get('QUERY_STRING'): # WSGI requires latin-1 encoded strings. See get_path_info(). query_string = parsed[4].encode().decode('iso-8859-1') r['QUERY_STRING'] = query_string return self.request(**r)
[ "def", "generic", "(", "self", ",", "method", ",", "path", ",", "data", "=", "''", ",", "content_type", "=", "'application/octet-stream'", ",", "secure", "=", "False", ",", "*", "*", "extra", ")", ":", "parsed", "=", "urlparse", "(", "str", "(", "path", ")", ")", "# path can be lazy", "data", "=", "force_bytes", "(", "data", ",", "settings", ".", "DEFAULT_CHARSET", ")", "r", "=", "{", "'PATH_INFO'", ":", "self", ".", "_get_path", "(", "parsed", ")", ",", "'REQUEST_METHOD'", ":", "method", ",", "'SERVER_PORT'", ":", "'443'", "if", "secure", "else", "'80'", ",", "'wsgi.url_scheme'", ":", "'https'", "if", "secure", "else", "'http'", ",", "}", "if", "data", ":", "r", ".", "update", "(", "{", "'CONTENT_LENGTH'", ":", "str", "(", "len", "(", "data", ")", ")", ",", "'CONTENT_TYPE'", ":", "content_type", ",", "'wsgi.input'", ":", "FakePayload", "(", "data", ")", ",", "}", ")", "r", ".", "update", "(", "extra", ")", "# If QUERY_STRING is absent or empty, we want to extract it from the URL.", "if", "not", "r", ".", "get", "(", "'QUERY_STRING'", ")", ":", "# WSGI requires latin-1 encoded strings. See get_path_info().", "query_string", "=", "parsed", "[", "4", "]", ".", "encode", "(", ")", ".", "decode", "(", "'iso-8859-1'", ")", "r", "[", "'QUERY_STRING'", "]", "=", "query_string", "return", "self", ".", "request", "(", "*", "*", "r", ")" ]
[ 448, 4 ]
[ 472, 32 ]
python
en
['en', 'en', 'en']
True
AsyncRequestFactory._base_scope
(self, **request)
The base scope for a request.
The base scope for a request.
def _base_scope(self, **request): """The base scope for a request.""" # This is a minimal valid ASGI scope, plus: # - headers['cookie'] for cookie support, # - 'client' often useful, see #8551. scope = { 'asgi': {'version': '3.0'}, 'type': 'http', 'http_version': '1.1', 'client': ['127.0.0.1', 0], 'server': ('testserver', '80'), 'scheme': 'http', 'method': 'GET', 'headers': [], **self.defaults, **request, } scope['headers'].append(( b'cookie', b'; '.join(sorted( ('%s=%s' % (morsel.key, morsel.coded_value)).encode('ascii') for morsel in self.cookies.values() )), )) return scope
[ "def", "_base_scope", "(", "self", ",", "*", "*", "request", ")", ":", "# This is a minimal valid ASGI scope, plus:", "# - headers['cookie'] for cookie support,", "# - 'client' often useful, see #8551.", "scope", "=", "{", "'asgi'", ":", "{", "'version'", ":", "'3.0'", "}", ",", "'type'", ":", "'http'", ",", "'http_version'", ":", "'1.1'", ",", "'client'", ":", "[", "'127.0.0.1'", ",", "0", "]", ",", "'server'", ":", "(", "'testserver'", ",", "'80'", ")", ",", "'scheme'", ":", "'http'", ",", "'method'", ":", "'GET'", ",", "'headers'", ":", "[", "]", ",", "*", "*", "self", ".", "defaults", ",", "*", "*", "request", ",", "}", "scope", "[", "'headers'", "]", ".", "append", "(", "(", "b'cookie'", ",", "b'; '", ".", "join", "(", "sorted", "(", "(", "'%s=%s'", "%", "(", "morsel", ".", "key", ",", "morsel", ".", "coded_value", ")", ")", ".", "encode", "(", "'ascii'", ")", "for", "morsel", "in", "self", ".", "cookies", ".", "values", "(", ")", ")", ")", ",", ")", ")", "return", "scope" ]
[ 489, 4 ]
[ 513, 20 ]
python
en
['en', 'en', 'en']
True
AsyncRequestFactory.request
(self, **request)
Construct a generic request object.
Construct a generic request object.
def request(self, **request): """Construct a generic request object.""" # This is synchronous, which means all methods on this class are. # AsyncClient, however, has an async request function, which makes all # its methods async. if '_body_file' in request: body_file = request.pop('_body_file') else: body_file = FakePayload('') return ASGIRequest(self._base_scope(**request), body_file)
[ "def", "request", "(", "self", ",", "*", "*", "request", ")", ":", "# This is synchronous, which means all methods on this class are.", "# AsyncClient, however, has an async request function, which makes all", "# its methods async.", "if", "'_body_file'", "in", "request", ":", "body_file", "=", "request", ".", "pop", "(", "'_body_file'", ")", "else", ":", "body_file", "=", "FakePayload", "(", "''", ")", "return", "ASGIRequest", "(", "self", ".", "_base_scope", "(", "*", "*", "request", ")", ",", "body_file", ")" ]
[ 515, 4 ]
[ 524, 66 ]
python
en
['en', 'en', 'en']
True
AsyncRequestFactory.generic
( self, method, path, data='', content_type='application/octet-stream', secure=False, **extra, )
Construct an arbitrary HTTP request.
Construct an arbitrary HTTP request.
def generic( self, method, path, data='', content_type='application/octet-stream', secure=False, **extra, ): """Construct an arbitrary HTTP request.""" parsed = urlparse(str(path)) # path can be lazy. data = force_bytes(data, settings.DEFAULT_CHARSET) s = { 'method': method, 'path': self._get_path(parsed), 'server': ('127.0.0.1', '443' if secure else '80'), 'scheme': 'https' if secure else 'http', 'headers': [(b'host', b'testserver')], } if data: s['headers'].extend([ (b'content-length', str(len(data)).encode('ascii')), (b'content-type', content_type.encode('ascii')), ]) s['_body_file'] = FakePayload(data) follow = extra.pop('follow', None) if follow is not None: s['follow'] = follow s['headers'] += [ (key.lower().encode('ascii'), value.encode('latin1')) for key, value in extra.items() ] # If QUERY_STRING is absent or empty, we want to extract it from the # URL. if not s.get('query_string'): s['query_string'] = parsed[4] return self.request(**s)
[ "def", "generic", "(", "self", ",", "method", ",", "path", ",", "data", "=", "''", ",", "content_type", "=", "'application/octet-stream'", ",", "secure", "=", "False", ",", "*", "*", "extra", ",", ")", ":", "parsed", "=", "urlparse", "(", "str", "(", "path", ")", ")", "# path can be lazy.", "data", "=", "force_bytes", "(", "data", ",", "settings", ".", "DEFAULT_CHARSET", ")", "s", "=", "{", "'method'", ":", "method", ",", "'path'", ":", "self", ".", "_get_path", "(", "parsed", ")", ",", "'server'", ":", "(", "'127.0.0.1'", ",", "'443'", "if", "secure", "else", "'80'", ")", ",", "'scheme'", ":", "'https'", "if", "secure", "else", "'http'", ",", "'headers'", ":", "[", "(", "b'host'", ",", "b'testserver'", ")", "]", ",", "}", "if", "data", ":", "s", "[", "'headers'", "]", ".", "extend", "(", "[", "(", "b'content-length'", ",", "str", "(", "len", "(", "data", ")", ")", ".", "encode", "(", "'ascii'", ")", ")", ",", "(", "b'content-type'", ",", "content_type", ".", "encode", "(", "'ascii'", ")", ")", ",", "]", ")", "s", "[", "'_body_file'", "]", "=", "FakePayload", "(", "data", ")", "follow", "=", "extra", ".", "pop", "(", "'follow'", ",", "None", ")", "if", "follow", "is", "not", "None", ":", "s", "[", "'follow'", "]", "=", "follow", "s", "[", "'headers'", "]", "+=", "[", "(", "key", ".", "lower", "(", ")", ".", "encode", "(", "'ascii'", ")", ",", "value", ".", "encode", "(", "'latin1'", ")", ")", "for", "key", ",", "value", "in", "extra", ".", "items", "(", ")", "]", "# If QUERY_STRING is absent or empty, we want to extract it from the", "# URL.", "if", "not", "s", ".", "get", "(", "'query_string'", ")", ":", "s", "[", "'query_string'", "]", "=", "parsed", "[", "4", "]", "return", "self", ".", "request", "(", "*", "*", "s", ")" ]
[ 526, 4 ]
[ 557, 32 ]
python
en
['en', 'en', 'en']
True
ClientMixin.store_exc_info
(self, **kwargs)
Store exceptions when they are generated by a view.
Store exceptions when they are generated by a view.
def store_exc_info(self, **kwargs): """Store exceptions when they are generated by a view.""" self.exc_info = sys.exc_info()
[ "def", "store_exc_info", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "exc_info", "=", "sys", ".", "exc_info", "(", ")" ]
[ 564, 4 ]
[ 566, 38 ]
python
en
['en', 'en', 'en']
True
ClientMixin.check_exception
(self, response)
Look for a signaled exception, clear the current context exception data, re-raise the signaled exception, and clear the signaled exception from the local cache.
Look for a signaled exception, clear the current context exception data, re-raise the signaled exception, and clear the signaled exception from the local cache.
def check_exception(self, response): """ Look for a signaled exception, clear the current context exception data, re-raise the signaled exception, and clear the signaled exception from the local cache. """ response.exc_info = self.exc_info if self.exc_info: _, exc_value, _ = self.exc_info self.exc_info = None if self.raise_request_exception: raise exc_value
[ "def", "check_exception", "(", "self", ",", "response", ")", ":", "response", ".", "exc_info", "=", "self", ".", "exc_info", "if", "self", ".", "exc_info", ":", "_", ",", "exc_value", ",", "_", "=", "self", ".", "exc_info", "self", ".", "exc_info", "=", "None", "if", "self", ".", "raise_request_exception", ":", "raise", "exc_value" ]
[ 568, 4 ]
[ 579, 31 ]
python
en
['en', 'error', 'th']
False
ClientMixin.session
(self)
Return the current session variables.
Return the current session variables.
def session(self): """Return the current session variables.""" engine = import_module(settings.SESSION_ENGINE) cookie = self.cookies.get(settings.SESSION_COOKIE_NAME) if cookie: return engine.SessionStore(cookie.value) session = engine.SessionStore() session.save() self.cookies[settings.SESSION_COOKIE_NAME] = session.session_key return session
[ "def", "session", "(", "self", ")", ":", "engine", "=", "import_module", "(", "settings", ".", "SESSION_ENGINE", ")", "cookie", "=", "self", ".", "cookies", ".", "get", "(", "settings", ".", "SESSION_COOKIE_NAME", ")", "if", "cookie", ":", "return", "engine", ".", "SessionStore", "(", "cookie", ".", "value", ")", "session", "=", "engine", ".", "SessionStore", "(", ")", "session", ".", "save", "(", ")", "self", ".", "cookies", "[", "settings", ".", "SESSION_COOKIE_NAME", "]", "=", "session", ".", "session_key", "return", "session" ]
[ 582, 4 ]
[ 591, 22 ]
python
en
['en', 'en', 'en']
True
ClientMixin.login
(self, **credentials)
Set the Factory to appear as if it has successfully logged into a site. Return True if login is possible or False if the provided credentials are incorrect.
Set the Factory to appear as if it has successfully logged into a site.
def login(self, **credentials): """ Set the Factory to appear as if it has successfully logged into a site. Return True if login is possible or False if the provided credentials are incorrect. """ from django.contrib.auth import authenticate user = authenticate(**credentials) if user: self._login(user) return True return False
[ "def", "login", "(", "self", ",", "*", "*", "credentials", ")", ":", "from", "django", ".", "contrib", ".", "auth", "import", "authenticate", "user", "=", "authenticate", "(", "*", "*", "credentials", ")", "if", "user", ":", "self", ".", "_login", "(", "user", ")", "return", "True", "return", "False" ]
[ 593, 4 ]
[ 605, 20 ]
python
en
['en', 'error', 'th']
False
ClientMixin.logout
(self)
Log out the user by removing the cookies and session object.
Log out the user by removing the cookies and session object.
def logout(self): """Log out the user by removing the cookies and session object.""" from django.contrib.auth import get_user, logout request = HttpRequest() if self.session: request.session = self.session request.user = get_user(request) else: engine = import_module(settings.SESSION_ENGINE) request.session = engine.SessionStore() logout(request) self.cookies = SimpleCookie()
[ "def", "logout", "(", "self", ")", ":", "from", "django", ".", "contrib", ".", "auth", "import", "get_user", ",", "logout", "request", "=", "HttpRequest", "(", ")", "if", "self", ".", "session", ":", "request", ".", "session", "=", "self", ".", "session", "request", ".", "user", "=", "get_user", "(", "request", ")", "else", ":", "engine", "=", "import_module", "(", "settings", ".", "SESSION_ENGINE", ")", "request", ".", "session", "=", "engine", ".", "SessionStore", "(", ")", "logout", "(", "request", ")", "self", ".", "cookies", "=", "SimpleCookie", "(", ")" ]
[ 645, 4 ]
[ 656, 37 ]
python
en
['en', 'en', 'en']
True
Client.request
(self, **request)
The master request method. Compose the environment dictionary and pass to the handler, return the result of the handler. Assume defaults for the query environment, which can be overridden using the arguments to the request.
The master request method. Compose the environment dictionary and pass to the handler, return the result of the handler. Assume defaults for the query environment, which can be overridden using the arguments to the request.
def request(self, **request): """ The master request method. Compose the environment dictionary and pass to the handler, return the result of the handler. Assume defaults for the query environment, which can be overridden using the arguments to the request. """ environ = self._base_environ(**request) # Curry a data dictionary into an instance of the template renderer # callback function. data = {} on_template_render = partial(store_rendered_templates, data) signal_uid = "template-render-%s" % id(request) signals.template_rendered.connect(on_template_render, dispatch_uid=signal_uid) # Capture exceptions created by the handler. exception_uid = "request-exception-%s" % id(request) got_request_exception.connect(self.store_exc_info, dispatch_uid=exception_uid) try: response = self.handler(environ) finally: signals.template_rendered.disconnect(dispatch_uid=signal_uid) got_request_exception.disconnect(dispatch_uid=exception_uid) # Check for signaled exceptions. self.check_exception(response) # Save the client and request that stimulated the response. response.client = self response.request = request # Add any rendered template detail to the response. response.templates = data.get('templates', []) response.context = data.get('context') response.json = partial(self._parse_json, response) # Attach the ResolverMatch instance to the response. response.resolver_match = SimpleLazyObject(lambda: resolve(request['PATH_INFO'])) # Flatten a single context. Not really necessary anymore thanks to the # __getattr__ flattening in ContextList, but has some edge case # backwards compatibility implications. if response.context and len(response.context) == 1: response.context = response.context[0] # Update persistent cookie data. if response.cookies: self.cookies.update(response.cookies) return response
[ "def", "request", "(", "self", ",", "*", "*", "request", ")", ":", "environ", "=", "self", ".", "_base_environ", "(", "*", "*", "request", ")", "# Curry a data dictionary into an instance of the template renderer", "# callback function.", "data", "=", "{", "}", "on_template_render", "=", "partial", "(", "store_rendered_templates", ",", "data", ")", "signal_uid", "=", "\"template-render-%s\"", "%", "id", "(", "request", ")", "signals", ".", "template_rendered", ".", "connect", "(", "on_template_render", ",", "dispatch_uid", "=", "signal_uid", ")", "# Capture exceptions created by the handler.", "exception_uid", "=", "\"request-exception-%s\"", "%", "id", "(", "request", ")", "got_request_exception", ".", "connect", "(", "self", ".", "store_exc_info", ",", "dispatch_uid", "=", "exception_uid", ")", "try", ":", "response", "=", "self", ".", "handler", "(", "environ", ")", "finally", ":", "signals", ".", "template_rendered", ".", "disconnect", "(", "dispatch_uid", "=", "signal_uid", ")", "got_request_exception", ".", "disconnect", "(", "dispatch_uid", "=", "exception_uid", ")", "# Check for signaled exceptions.", "self", ".", "check_exception", "(", "response", ")", "# Save the client and request that stimulated the response.", "response", ".", "client", "=", "self", "response", ".", "request", "=", "request", "# Add any rendered template detail to the response.", "response", ".", "templates", "=", "data", ".", "get", "(", "'templates'", ",", "[", "]", ")", "response", ".", "context", "=", "data", ".", "get", "(", "'context'", ")", "response", ".", "json", "=", "partial", "(", "self", ".", "_parse_json", ",", "response", ")", "# Attach the ResolverMatch instance to the response.", "response", ".", "resolver_match", "=", "SimpleLazyObject", "(", "lambda", ":", "resolve", "(", "request", "[", "'PATH_INFO'", "]", ")", ")", "# Flatten a single context. Not really necessary anymore thanks to the", "# __getattr__ flattening in ContextList, but has some edge case", "# backwards compatibility implications.", "if", "response", ".", "context", "and", "len", "(", "response", ".", "context", ")", "==", "1", ":", "response", ".", "context", "=", "response", ".", "context", "[", "0", "]", "# Update persistent cookie data.", "if", "response", ".", "cookies", ":", "self", ".", "cookies", ".", "update", "(", "response", ".", "cookies", ")", "return", "response" ]
[ 694, 4 ]
[ 736, 23 ]
python
en
['en', 'error', 'th']
False
Client.get
(self, path, data=None, follow=False, secure=False, **extra)
Request a response from the server using GET.
Request a response from the server using GET.
def get(self, path, data=None, follow=False, secure=False, **extra): """Request a response from the server using GET.""" self.extra = extra response = super().get(path, data=data, secure=secure, **extra) if follow: response = self._handle_redirects(response, data=data, **extra) return response
[ "def", "get", "(", "self", ",", "path", ",", "data", "=", "None", ",", "follow", "=", "False", ",", "secure", "=", "False", ",", "*", "*", "extra", ")", ":", "self", ".", "extra", "=", "extra", "response", "=", "super", "(", ")", ".", "get", "(", "path", ",", "data", "=", "data", ",", "secure", "=", "secure", ",", "*", "*", "extra", ")", "if", "follow", ":", "response", "=", "self", ".", "_handle_redirects", "(", "response", ",", "data", "=", "data", ",", "*", "*", "extra", ")", "return", "response" ]
[ 738, 4 ]
[ 744, 23 ]
python
en
['en', 'en', 'en']
True
Client.post
(self, path, data=None, content_type=MULTIPART_CONTENT, follow=False, secure=False, **extra)
Request a response from the server using POST.
Request a response from the server using POST.
def post(self, path, data=None, content_type=MULTIPART_CONTENT, follow=False, secure=False, **extra): """Request a response from the server using POST.""" self.extra = extra response = super().post(path, data=data, content_type=content_type, secure=secure, **extra) if follow: response = self._handle_redirects(response, data=data, content_type=content_type, **extra) return response
[ "def", "post", "(", "self", ",", "path", ",", "data", "=", "None", ",", "content_type", "=", "MULTIPART_CONTENT", ",", "follow", "=", "False", ",", "secure", "=", "False", ",", "*", "*", "extra", ")", ":", "self", ".", "extra", "=", "extra", "response", "=", "super", "(", ")", ".", "post", "(", "path", ",", "data", "=", "data", ",", "content_type", "=", "content_type", ",", "secure", "=", "secure", ",", "*", "*", "extra", ")", "if", "follow", ":", "response", "=", "self", ".", "_handle_redirects", "(", "response", ",", "data", "=", "data", ",", "content_type", "=", "content_type", ",", "*", "*", "extra", ")", "return", "response" ]
[ 746, 4 ]
[ 753, 23 ]
python
en
['en', 'en', 'en']
True