Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
options | (url, **kwargs) | r"""Sends an OPTIONS request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
| r"""Sends an OPTIONS request. | def options(url, **kwargs):
r"""Sends an OPTIONS request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('options', url, **kwargs) | [
"def",
"options",
"(",
"url",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"request",
"(",
"'options'",
",",
"url",
",",
"*",
"*",
"kwargs",
")"
] | [
77,
0
] | [
86,
44
] | python | en | ['en', 'en', 'en'] | True |
head | (url, **kwargs) | r"""Sends a HEAD request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes. If
`allow_redirects` is not provided, it will be set to `False` (as
opposed to the default :meth:`request` behavior).
:return: :class:`Response <Response>` object
:rtype: requests.Response
| r"""Sends a HEAD request. | def head(url, **kwargs):
r"""Sends a HEAD request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes. If
`allow_redirects` is not provided, it will be set to `False` (as
opposed to the default :meth:`request` behavior).
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
kwargs.setdefault('allow_redirects', False)
return request('head', url, **kwargs) | [
"def",
"head",
"(",
"url",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
".",
"setdefault",
"(",
"'allow_redirects'",
",",
"False",
")",
"return",
"request",
"(",
"'head'",
",",
"url",
",",
"*",
"*",
"kwargs",
")"
] | [
89,
0
] | [
101,
41
] | python | en | ['en', 'co', 'en'] | True |
post | (url, data=None, json=None, **kwargs) | r"""Sends a POST request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
| r"""Sends a POST request. | def post(url, data=None, json=None, **kwargs):
r"""Sends a POST request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('post', url, data=data, json=json, **kwargs) | [
"def",
"post",
"(",
"url",
",",
"data",
"=",
"None",
",",
"json",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"request",
"(",
"'post'",
",",
"url",
",",
"data",
"=",
"data",
",",
"json",
"=",
"json",
",",
"*",
"*",
"kwargs",
")"
] | [
104,
0
] | [
116,
63
] | python | en | ['en', 'en', 'en'] | True |
put | (url, data=None, **kwargs) | r"""Sends a PUT request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
| r"""Sends a PUT request. | def put(url, data=None, **kwargs):
r"""Sends a PUT request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('put', url, data=data, **kwargs) | [
"def",
"put",
"(",
"url",
",",
"data",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"request",
"(",
"'put'",
",",
"url",
",",
"data",
"=",
"data",
",",
"*",
"*",
"kwargs",
")"
] | [
119,
0
] | [
131,
51
] | python | en | ['en', 'co', 'en'] | True |
patch | (url, data=None, **kwargs) | r"""Sends a PATCH request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
| r"""Sends a PATCH request. | def patch(url, data=None, **kwargs):
r"""Sends a PATCH request.
:param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('patch', url, data=data, **kwargs) | [
"def",
"patch",
"(",
"url",
",",
"data",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"request",
"(",
"'patch'",
",",
"url",
",",
"data",
"=",
"data",
",",
"*",
"*",
"kwargs",
")"
] | [
134,
0
] | [
146,
53
] | python | en | ['en', 'co', 'en'] | True |
delete | (url, **kwargs) | r"""Sends a DELETE request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
| r"""Sends a DELETE request. | def delete(url, **kwargs):
r"""Sends a DELETE request.
:param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
"""
return request('delete', url, **kwargs) | [
"def",
"delete",
"(",
"url",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"request",
"(",
"'delete'",
",",
"url",
",",
"*",
"*",
"kwargs",
")"
] | [
149,
0
] | [
158,
43
] | python | en | ['en', 'it', 'en'] | True |
split_and_convert_string | (string_tensor) | Splits and converts string tensor into dense float tensor.
Given string tensor, splits string by delimiter, converts to and returns
dense float tensor.
Args:
string_tensor: tf.string tensor.
Returns:
tf.float64 tensor split along delimiter.
| Splits and converts string tensor into dense float tensor. | def split_and_convert_string(string_tensor):
"""Splits and converts string tensor into dense float tensor.
Given string tensor, splits string by delimiter, converts to and returns
dense float tensor.
Args:
string_tensor: tf.string tensor.
Returns:
tf.float64 tensor split along delimiter.
"""
# Split string tensor into a sparse tensor based on delimiter
split_string = tf.string_split(source=tf.expand_dims(
input=string_tensor, axis=0), delimiter=";")
# Converts the values of the sparse tensor to floats
converted_tensor = tf.string_to_number(
string_tensor=split_string.values,
out_type=tf.float64)
# Create a new sparse tensor with the new converted values,
# because the original sparse tensor values are immutable
new_sparse_tensor = tf.SparseTensor(
indices=split_string.indices,
values=converted_tensor,
dense_shape=split_string.dense_shape)
# Create a dense tensor of the float values that were converted from text csv
dense_floats = tf.sparse_tensor_to_dense(
sp_input=new_sparse_tensor, default_value=0.0)
dense_floats_vector = tf.squeeze(input=dense_floats, axis=0)
return dense_floats_vector | [
"def",
"split_and_convert_string",
"(",
"string_tensor",
")",
":",
"# Split string tensor into a sparse tensor based on delimiter",
"split_string",
"=",
"tf",
".",
"string_split",
"(",
"source",
"=",
"tf",
".",
"expand_dims",
"(",
"input",
"=",
"string_tensor",
",",
"axis",
"=",
"0",
")",
",",
"delimiter",
"=",
"\";\"",
")",
"# Converts the values of the sparse tensor to floats",
"converted_tensor",
"=",
"tf",
".",
"string_to_number",
"(",
"string_tensor",
"=",
"split_string",
".",
"values",
",",
"out_type",
"=",
"tf",
".",
"float64",
")",
"# Create a new sparse tensor with the new converted values,",
"# because the original sparse tensor values are immutable",
"new_sparse_tensor",
"=",
"tf",
".",
"SparseTensor",
"(",
"indices",
"=",
"split_string",
".",
"indices",
",",
"values",
"=",
"converted_tensor",
",",
"dense_shape",
"=",
"split_string",
".",
"dense_shape",
")",
"# Create a dense tensor of the float values that were converted from text csv",
"dense_floats",
"=",
"tf",
".",
"sparse_tensor_to_dense",
"(",
"sp_input",
"=",
"new_sparse_tensor",
",",
"default_value",
"=",
"0.0",
")",
"dense_floats_vector",
"=",
"tf",
".",
"squeeze",
"(",
"input",
"=",
"dense_floats",
",",
"axis",
"=",
"0",
")",
"return",
"dense_floats_vector"
] | [
4,
0
] | [
38,
28
] | python | en | ['en', 'en', 'en'] | True |
convert_sequences_from_strings_to_floats | (features, column_list, seq_len) | Converts sequences from single strings to a sequence of floats.
Given features dictionary and feature column names list, convert features
from strings to a sequence of floats.
Args:
features: Dictionary of tensors of our features as tf.strings.
column_list: List of column names of our features.
seq_len: Number of timesteps in sequence.
Returns:
Dictionary of tensors of our features as tf.float64s.
| Converts sequences from single strings to a sequence of floats. | def convert_sequences_from_strings_to_floats(features, column_list, seq_len):
"""Converts sequences from single strings to a sequence of floats.
Given features dictionary and feature column names list, convert features
from strings to a sequence of floats.
Args:
features: Dictionary of tensors of our features as tf.strings.
column_list: List of column names of our features.
seq_len: Number of timesteps in sequence.
Returns:
Dictionary of tensors of our features as tf.float64s.
"""
for column in column_list:
features[column] = split_and_convert_string(features[column])
# Since we know the sequence length, set the shape to remove the ambiguity
features[column].set_shape([seq_len])
return features | [
"def",
"convert_sequences_from_strings_to_floats",
"(",
"features",
",",
"column_list",
",",
"seq_len",
")",
":",
"for",
"column",
"in",
"column_list",
":",
"features",
"[",
"column",
"]",
"=",
"split_and_convert_string",
"(",
"features",
"[",
"column",
"]",
")",
"# Since we know the sequence length, set the shape to remove the ambiguity",
"features",
"[",
"column",
"]",
".",
"set_shape",
"(",
"[",
"seq_len",
"]",
")",
"return",
"features"
] | [
41,
0
] | [
60,
17
] | python | en | ['en', 'en', 'en'] | True |
decode_csv | (value_column, mode, batch_size, params) | Decodes CSV file into tensors.
Given single string tensor, sequence length, and number of features,
returns features dictionary of tensors and labels tensor.
Args:
value_column: tf.string tensor of shape () compromising entire line of
CSV file.
mode: The estimator ModeKeys. Can be TRAIN or EVAL.
batch_size: Number of examples per batch.
params: Dictionary of user passed parameters.
Returns:
Features dictionary of tensors and labels tensor.
| Decodes CSV file into tensors. | def decode_csv(value_column, mode, batch_size, params):
"""Decodes CSV file into tensors.
Given single string tensor, sequence length, and number of features,
returns features dictionary of tensors and labels tensor.
Args:
value_column: tf.string tensor of shape () compromising entire line of
CSV file.
mode: The estimator ModeKeys. Can be TRAIN or EVAL.
batch_size: Number of examples per batch.
params: Dictionary of user passed parameters.
Returns:
Features dictionary of tensors and labels tensor.
"""
if (mode == tf.estimator.ModeKeys.TRAIN or
(mode == tf.estimator.ModeKeys.EVAL and
(params["training_mode"] != "tune_anomaly_thresholds" or
(params["training_mode"] == "tune_anomaly_thresholds" and
not params["labeled_tune_thresh"])))):
# For subset of CSV files that do NOT have labels
columns = tf.decode_csv(
records=value_column,
record_defaults=params["feat_defaults"],
field_delim=",")
features = dict(zip(params["feat_names"], columns))
features = convert_sequences_from_strings_to_floats(
features=features,
column_list=params["feat_names"],
seq_len=params["seq_len"])
return features
else:
# For subset of CSV files that DO have labels
columns = tf.decode_csv(
records=value_column,
record_defaults=params["feat_defaults"] + [[0.0]], # add label default
field_delim=",")
features = dict(zip(params["feat_names"] + ["anomalous_sequence_flag"], columns))
labels = tf.cast(x=features.pop("anomalous_sequence_flag"), dtype=tf.float64)
features = convert_sequences_from_strings_to_floats(
features=features,
column_list=params["feat_names"],
seq_len=params["seq_len"])
return features, labels | [
"def",
"decode_csv",
"(",
"value_column",
",",
"mode",
",",
"batch_size",
",",
"params",
")",
":",
"if",
"(",
"mode",
"==",
"tf",
".",
"estimator",
".",
"ModeKeys",
".",
"TRAIN",
"or",
"(",
"mode",
"==",
"tf",
".",
"estimator",
".",
"ModeKeys",
".",
"EVAL",
"and",
"(",
"params",
"[",
"\"training_mode\"",
"]",
"!=",
"\"tune_anomaly_thresholds\"",
"or",
"(",
"params",
"[",
"\"training_mode\"",
"]",
"==",
"\"tune_anomaly_thresholds\"",
"and",
"not",
"params",
"[",
"\"labeled_tune_thresh\"",
"]",
")",
")",
")",
")",
":",
"# For subset of CSV files that do NOT have labels",
"columns",
"=",
"tf",
".",
"decode_csv",
"(",
"records",
"=",
"value_column",
",",
"record_defaults",
"=",
"params",
"[",
"\"feat_defaults\"",
"]",
",",
"field_delim",
"=",
"\",\"",
")",
"features",
"=",
"dict",
"(",
"zip",
"(",
"params",
"[",
"\"feat_names\"",
"]",
",",
"columns",
")",
")",
"features",
"=",
"convert_sequences_from_strings_to_floats",
"(",
"features",
"=",
"features",
",",
"column_list",
"=",
"params",
"[",
"\"feat_names\"",
"]",
",",
"seq_len",
"=",
"params",
"[",
"\"seq_len\"",
"]",
")",
"return",
"features",
"else",
":",
"# For subset of CSV files that DO have labels",
"columns",
"=",
"tf",
".",
"decode_csv",
"(",
"records",
"=",
"value_column",
",",
"record_defaults",
"=",
"params",
"[",
"\"feat_defaults\"",
"]",
"+",
"[",
"[",
"0.0",
"]",
"]",
",",
"# add label default",
"field_delim",
"=",
"\",\"",
")",
"features",
"=",
"dict",
"(",
"zip",
"(",
"params",
"[",
"\"feat_names\"",
"]",
"+",
"[",
"\"anomalous_sequence_flag\"",
"]",
",",
"columns",
")",
")",
"labels",
"=",
"tf",
".",
"cast",
"(",
"x",
"=",
"features",
".",
"pop",
"(",
"\"anomalous_sequence_flag\"",
")",
",",
"dtype",
"=",
"tf",
".",
"float64",
")",
"features",
"=",
"convert_sequences_from_strings_to_floats",
"(",
"features",
"=",
"features",
",",
"column_list",
"=",
"params",
"[",
"\"feat_names\"",
"]",
",",
"seq_len",
"=",
"params",
"[",
"\"seq_len\"",
"]",
")",
"return",
"features",
",",
"labels"
] | [
63,
0
] | [
113,
27
] | python | en | ['en', 'en', 'pt'] | True |
read_dataset | (filename, mode, batch_size, params) | Reads CSV time series dataset using tf.data, doing necessary preprocessing.
Given filename, mode, batch size and other parameters, read CSV dataset using
Dataset API, apply necessary preprocessing, and return an input function to
the Estimator API.
Args:
filename: The file pattern that we want to read into our tf.data dataset.
mode: The estimator ModeKeys. Can be TRAIN or EVAL.
batch_size: Number of examples per batch.
params: Dictionary of user passed parameters.
Returns:
An input function.
| Reads CSV time series dataset using tf.data, doing necessary preprocessing. | def read_dataset(filename, mode, batch_size, params):
"""Reads CSV time series dataset using tf.data, doing necessary preprocessing.
Given filename, mode, batch size and other parameters, read CSV dataset using
Dataset API, apply necessary preprocessing, and return an input function to
the Estimator API.
Args:
filename: The file pattern that we want to read into our tf.data dataset.
mode: The estimator ModeKeys. Can be TRAIN or EVAL.
batch_size: Number of examples per batch.
params: Dictionary of user passed parameters.
Returns:
An input function.
"""
def _input_fn():
"""Wrapper input function to be used by Estimator API to get data tensors.
Returns:
Batched dataset object of dictionary of feature tensors and label tensor.
"""
# Create list of files that match pattern
file_list = tf.gfile.Glob(filename=filename)
# Create dataset from file list
dataset = tf.data.TextLineDataset(filenames=file_list) # Read text file
# Decode the CSV file into a features dictionary of tensors
dataset = dataset.map(
map_func=lambda x: decode_csv(
value_column=x, mode=mode, batch_size=batch_size, params=params))
# Determine amount of times to repeat file if we are training or evaluating
if mode == tf.estimator.ModeKeys.TRAIN:
num_epochs = None # indefinitely
else:
num_epochs = 1 # end-of-input after this
# Repeat files num_epoch times
dataset = dataset.repeat(count=num_epochs)
# Group the data into batches
dataset = dataset.batch(batch_size=batch_size)
# Determine if we should shuffle based on if we are training or evaluating
if mode == tf.estimator.ModeKeys.TRAIN:
dataset = dataset.shuffle(buffer_size=10 * batch_size)
# Create a iterator, then pull batch of features from the example queue
batched_dataset = dataset.make_one_shot_iterator().get_next()
return batched_dataset
return _input_fn | [
"def",
"read_dataset",
"(",
"filename",
",",
"mode",
",",
"batch_size",
",",
"params",
")",
":",
"def",
"_input_fn",
"(",
")",
":",
"\"\"\"Wrapper input function to be used by Estimator API to get data tensors.\n\n Returns:\n Batched dataset object of dictionary of feature tensors and label tensor.\n \"\"\"",
"# Create list of files that match pattern",
"file_list",
"=",
"tf",
".",
"gfile",
".",
"Glob",
"(",
"filename",
"=",
"filename",
")",
"# Create dataset from file list",
"dataset",
"=",
"tf",
".",
"data",
".",
"TextLineDataset",
"(",
"filenames",
"=",
"file_list",
")",
"# Read text file",
"# Decode the CSV file into a features dictionary of tensors",
"dataset",
"=",
"dataset",
".",
"map",
"(",
"map_func",
"=",
"lambda",
"x",
":",
"decode_csv",
"(",
"value_column",
"=",
"x",
",",
"mode",
"=",
"mode",
",",
"batch_size",
"=",
"batch_size",
",",
"params",
"=",
"params",
")",
")",
"# Determine amount of times to repeat file if we are training or evaluating",
"if",
"mode",
"==",
"tf",
".",
"estimator",
".",
"ModeKeys",
".",
"TRAIN",
":",
"num_epochs",
"=",
"None",
"# indefinitely",
"else",
":",
"num_epochs",
"=",
"1",
"# end-of-input after this",
"# Repeat files num_epoch times",
"dataset",
"=",
"dataset",
".",
"repeat",
"(",
"count",
"=",
"num_epochs",
")",
"# Group the data into batches",
"dataset",
"=",
"dataset",
".",
"batch",
"(",
"batch_size",
"=",
"batch_size",
")",
"# Determine if we should shuffle based on if we are training or evaluating",
"if",
"mode",
"==",
"tf",
".",
"estimator",
".",
"ModeKeys",
".",
"TRAIN",
":",
"dataset",
"=",
"dataset",
".",
"shuffle",
"(",
"buffer_size",
"=",
"10",
"*",
"batch_size",
")",
"# Create a iterator, then pull batch of features from the example queue",
"batched_dataset",
"=",
"dataset",
".",
"make_one_shot_iterator",
"(",
")",
".",
"get_next",
"(",
")",
"return",
"batched_dataset",
"return",
"_input_fn"
] | [
116,
0
] | [
171,
18
] | python | en | ['en', 'en', 'en'] | True |
Node.iter_fields | (self, exclude=None, only=None) | This method iterates over all fields that are defined and yields
``(key, value)`` tuples. Per default all fields are returned, but
it's possible to limit that to some fields by providing the `only`
parameter or to exclude some using the `exclude` parameter. Both
should be sets or tuples of field names.
| This method iterates over all fields that are defined and yields
``(key, value)`` tuples. Per default all fields are returned, but
it's possible to limit that to some fields by providing the `only`
parameter or to exclude some using the `exclude` parameter. Both
should be sets or tuples of field names.
| def iter_fields(self, exclude=None, only=None):
"""This method iterates over all fields that are defined and yields
``(key, value)`` tuples. Per default all fields are returned, but
it's possible to limit that to some fields by providing the `only`
parameter or to exclude some using the `exclude` parameter. Both
should be sets or tuples of field names.
"""
for name in self.fields:
if (exclude is only is None) or \
(exclude is not None and name not in exclude) or \
(only is not None and name in only):
try:
yield name, getattr(self, name)
except AttributeError:
pass | [
"def",
"iter_fields",
"(",
"self",
",",
"exclude",
"=",
"None",
",",
"only",
"=",
"None",
")",
":",
"for",
"name",
"in",
"self",
".",
"fields",
":",
"if",
"(",
"exclude",
"is",
"only",
"is",
"None",
")",
"or",
"(",
"exclude",
"is",
"not",
"None",
"and",
"name",
"not",
"in",
"exclude",
")",
"or",
"(",
"only",
"is",
"not",
"None",
"and",
"name",
"in",
"only",
")",
":",
"try",
":",
"yield",
"name",
",",
"getattr",
"(",
"self",
",",
"name",
")",
"except",
"AttributeError",
":",
"pass"
] | [
147,
4
] | [
161,
24
] | python | en | ['en', 'en', 'en'] | True |
Node.iter_child_nodes | (self, exclude=None, only=None) | Iterates over all direct child nodes of the node. This iterates
over all fields and yields the values of they are nodes. If the value
of a field is a list all the nodes in that list are returned.
| Iterates over all direct child nodes of the node. This iterates
over all fields and yields the values of they are nodes. If the value
of a field is a list all the nodes in that list are returned.
| def iter_child_nodes(self, exclude=None, only=None):
"""Iterates over all direct child nodes of the node. This iterates
over all fields and yields the values of they are nodes. If the value
of a field is a list all the nodes in that list are returned.
"""
for field, item in self.iter_fields(exclude, only):
if isinstance(item, list):
for n in item:
if isinstance(n, Node):
yield n
elif isinstance(item, Node):
yield item | [
"def",
"iter_child_nodes",
"(",
"self",
",",
"exclude",
"=",
"None",
",",
"only",
"=",
"None",
")",
":",
"for",
"field",
",",
"item",
"in",
"self",
".",
"iter_fields",
"(",
"exclude",
",",
"only",
")",
":",
"if",
"isinstance",
"(",
"item",
",",
"list",
")",
":",
"for",
"n",
"in",
"item",
":",
"if",
"isinstance",
"(",
"n",
",",
"Node",
")",
":",
"yield",
"n",
"elif",
"isinstance",
"(",
"item",
",",
"Node",
")",
":",
"yield",
"item"
] | [
163,
4
] | [
174,
26
] | python | en | ['en', 'en', 'en'] | True |
Node.find | (self, node_type) | Find the first node of a given type. If no such node exists the
return value is `None`.
| Find the first node of a given type. If no such node exists the
return value is `None`.
| def find(self, node_type):
"""Find the first node of a given type. If no such node exists the
return value is `None`.
"""
for result in self.find_all(node_type):
return result | [
"def",
"find",
"(",
"self",
",",
"node_type",
")",
":",
"for",
"result",
"in",
"self",
".",
"find_all",
"(",
"node_type",
")",
":",
"return",
"result"
] | [
176,
4
] | [
181,
25
] | python | en | ['en', 'en', 'en'] | True |
Node.find_all | (self, node_type) | Find all the nodes of a given type. If the type is a tuple,
the check is performed for any of the tuple items.
| Find all the nodes of a given type. If the type is a tuple,
the check is performed for any of the tuple items.
| def find_all(self, node_type):
"""Find all the nodes of a given type. If the type is a tuple,
the check is performed for any of the tuple items.
"""
for child in self.iter_child_nodes():
if isinstance(child, node_type):
yield child
for result in child.find_all(node_type):
yield result | [
"def",
"find_all",
"(",
"self",
",",
"node_type",
")",
":",
"for",
"child",
"in",
"self",
".",
"iter_child_nodes",
"(",
")",
":",
"if",
"isinstance",
"(",
"child",
",",
"node_type",
")",
":",
"yield",
"child",
"for",
"result",
"in",
"child",
".",
"find_all",
"(",
"node_type",
")",
":",
"yield",
"result"
] | [
183,
4
] | [
191,
28
] | python | en | ['en', 'en', 'en'] | True |
Node.set_ctx | (self, ctx) | Reset the context of a node and all child nodes. Per default the
parser will all generate nodes that have a 'load' context as it's the
most common one. This method is used in the parser to set assignment
targets and other nodes to a store context.
| Reset the context of a node and all child nodes. Per default the
parser will all generate nodes that have a 'load' context as it's the
most common one. This method is used in the parser to set assignment
targets and other nodes to a store context.
| def set_ctx(self, ctx):
"""Reset the context of a node and all child nodes. Per default the
parser will all generate nodes that have a 'load' context as it's the
most common one. This method is used in the parser to set assignment
targets and other nodes to a store context.
"""
todo = deque([self])
while todo:
node = todo.popleft()
if 'ctx' in node.fields:
node.ctx = ctx
todo.extend(node.iter_child_nodes())
return self | [
"def",
"set_ctx",
"(",
"self",
",",
"ctx",
")",
":",
"todo",
"=",
"deque",
"(",
"[",
"self",
"]",
")",
"while",
"todo",
":",
"node",
"=",
"todo",
".",
"popleft",
"(",
")",
"if",
"'ctx'",
"in",
"node",
".",
"fields",
":",
"node",
".",
"ctx",
"=",
"ctx",
"todo",
".",
"extend",
"(",
"node",
".",
"iter_child_nodes",
"(",
")",
")",
"return",
"self"
] | [
193,
4
] | [
205,
19
] | python | en | ['en', 'en', 'en'] | True |
Node.set_lineno | (self, lineno, override=False) | Set the line numbers of the node and children. | Set the line numbers of the node and children. | def set_lineno(self, lineno, override=False):
"""Set the line numbers of the node and children."""
todo = deque([self])
while todo:
node = todo.popleft()
if 'lineno' in node.attributes:
if node.lineno is None or override:
node.lineno = lineno
todo.extend(node.iter_child_nodes())
return self | [
"def",
"set_lineno",
"(",
"self",
",",
"lineno",
",",
"override",
"=",
"False",
")",
":",
"todo",
"=",
"deque",
"(",
"[",
"self",
"]",
")",
"while",
"todo",
":",
"node",
"=",
"todo",
".",
"popleft",
"(",
")",
"if",
"'lineno'",
"in",
"node",
".",
"attributes",
":",
"if",
"node",
".",
"lineno",
"is",
"None",
"or",
"override",
":",
"node",
".",
"lineno",
"=",
"lineno",
"todo",
".",
"extend",
"(",
"node",
".",
"iter_child_nodes",
"(",
")",
")",
"return",
"self"
] | [
207,
4
] | [
216,
19
] | python | en | ['en', 'en', 'en'] | True |
Node.set_environment | (self, environment) | Set the environment for all nodes. | Set the environment for all nodes. | def set_environment(self, environment):
"""Set the environment for all nodes."""
todo = deque([self])
while todo:
node = todo.popleft()
node.environment = environment
todo.extend(node.iter_child_nodes())
return self | [
"def",
"set_environment",
"(",
"self",
",",
"environment",
")",
":",
"todo",
"=",
"deque",
"(",
"[",
"self",
"]",
")",
"while",
"todo",
":",
"node",
"=",
"todo",
".",
"popleft",
"(",
")",
"node",
".",
"environment",
"=",
"environment",
"todo",
".",
"extend",
"(",
"node",
".",
"iter_child_nodes",
"(",
")",
")",
"return",
"self"
] | [
218,
4
] | [
225,
19
] | python | en | ['en', 'en', 'en'] | True |
Expr.as_const | (self, eval_ctx=None) | Return the value of the expression as constant or raise
:exc:`Impossible` if this was not possible.
An :class:`EvalContext` can be provided, if none is given
a default context is created which requires the nodes to have
an attached environment.
.. versionchanged:: 2.4
the `eval_ctx` parameter was added.
| Return the value of the expression as constant or raise
:exc:`Impossible` if this was not possible. | def as_const(self, eval_ctx=None):
"""Return the value of the expression as constant or raise
:exc:`Impossible` if this was not possible.
An :class:`EvalContext` can be provided, if none is given
a default context is created which requires the nodes to have
an attached environment.
.. versionchanged:: 2.4
the `eval_ctx` parameter was added.
"""
raise Impossible() | [
"def",
"as_const",
"(",
"self",
",",
"eval_ctx",
"=",
"None",
")",
":",
"raise",
"Impossible",
"(",
")"
] | [
396,
4
] | [
407,
26
] | python | en | ['en', 'en', 'en'] | True |
Expr.can_assign | (self) | Check if it's possible to assign something to this node. | Check if it's possible to assign something to this node. | def can_assign(self):
"""Check if it's possible to assign something to this node."""
return False | [
"def",
"can_assign",
"(",
"self",
")",
":",
"return",
"False"
] | [
409,
4
] | [
411,
20
] | python | en | ['en', 'en', 'en'] | True |
Const.from_untrusted | (cls, value, lineno=None, environment=None) | Return a const object if the value is representable as
constant value in the generated code, otherwise it will raise
an `Impossible` exception.
| Return a const object if the value is representable as
constant value in the generated code, otherwise it will raise
an `Impossible` exception.
| def from_untrusted(cls, value, lineno=None, environment=None):
"""Return a const object if the value is representable as
constant value in the generated code, otherwise it will raise
an `Impossible` exception.
"""
from .compiler import has_safe_repr
if not has_safe_repr(value):
raise Impossible()
return cls(value, lineno=lineno, environment=environment) | [
"def",
"from_untrusted",
"(",
"cls",
",",
"value",
",",
"lineno",
"=",
"None",
",",
"environment",
"=",
"None",
")",
":",
"from",
".",
"compiler",
"import",
"has_safe_repr",
"if",
"not",
"has_safe_repr",
"(",
"value",
")",
":",
"raise",
"Impossible",
"(",
")",
"return",
"cls",
"(",
"value",
",",
"lineno",
"=",
"lineno",
",",
"environment",
"=",
"environment",
")"
] | [
503,
4
] | [
511,
65
] | python | en | ['en', 'en', 'en'] | True |
with_metaclass | (meta, *bases) | Create a base class with a metaclass. | Create a base class with a metaclass. | def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a
# dummy metaclass for one level of class instantiation that replaces
# itself with the actual metaclass.
class metaclass(type):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {}) | [
"def",
"with_metaclass",
"(",
"meta",
",",
"*",
"bases",
")",
":",
"# This requires a bit of explanation: the basic idea is to make a",
"# dummy metaclass for one level of class instantiation that replaces",
"# itself with the actual metaclass.",
"class",
"metaclass",
"(",
"type",
")",
":",
"def",
"__new__",
"(",
"cls",
",",
"name",
",",
"this_bases",
",",
"d",
")",
":",
"return",
"meta",
"(",
"name",
",",
"bases",
",",
"d",
")",
"return",
"type",
".",
"__new__",
"(",
"metaclass",
",",
"'temporary_class'",
",",
"(",
")",
",",
"{",
"}",
")"
] | [
84,
0
] | [
92,
61
] | python | en | ['en', 'en', 'en'] | True |
action | (function=None, *, permissions=None, description=None) |
Conveniently add attributes to an action function::
@admin.action(
permissions=['publish'],
description='Mark selected stories as published',
)
def make_published(self, request, queryset):
queryset.update(status='p')
This is equivalent to setting some attributes (with the original, longer
names) on the function directly::
def make_published(self, request, queryset):
queryset.update(status='p')
make_published.allowed_permissions = ['publish']
make_published.short_description = 'Mark selected stories as published'
|
Conveniently add attributes to an action function:: | def action(function=None, *, permissions=None, description=None):
"""
Conveniently add attributes to an action function::
@admin.action(
permissions=['publish'],
description='Mark selected stories as published',
)
def make_published(self, request, queryset):
queryset.update(status='p')
This is equivalent to setting some attributes (with the original, longer
names) on the function directly::
def make_published(self, request, queryset):
queryset.update(status='p')
make_published.allowed_permissions = ['publish']
make_published.short_description = 'Mark selected stories as published'
"""
def decorator(func):
if permissions is not None:
func.allowed_permissions = permissions
if description is not None:
func.short_description = description
return func
if function is None:
return decorator
else:
return decorator(function) | [
"def",
"action",
"(",
"function",
"=",
"None",
",",
"*",
",",
"permissions",
"=",
"None",
",",
"description",
"=",
"None",
")",
":",
"def",
"decorator",
"(",
"func",
")",
":",
"if",
"permissions",
"is",
"not",
"None",
":",
"func",
".",
"allowed_permissions",
"=",
"permissions",
"if",
"description",
"is",
"not",
"None",
":",
"func",
".",
"short_description",
"=",
"description",
"return",
"func",
"if",
"function",
"is",
"None",
":",
"return",
"decorator",
"else",
":",
"return",
"decorator",
"(",
"function",
")"
] | [
0,
0
] | [
28,
34
] | python | en | ['en', 'error', 'th'] | False |
display | (function=None, *, boolean=None, ordering=None, description=None, empty_value=None) |
Conveniently add attributes to a display function::
@admin.display(
boolean=True,
ordering='-publish_date',
description='Is Published?',
)
def is_published(self, obj):
return obj.publish_date is not None
This is equivalent to setting some attributes (with the original, longer
names) on the function directly::
def is_published(self, obj):
return obj.publish_date is not None
is_published.boolean = True
is_published.admin_order_field = '-publish_date'
is_published.short_description = 'Is Published?'
|
Conveniently add attributes to a display function:: | def display(function=None, *, boolean=None, ordering=None, description=None, empty_value=None):
"""
Conveniently add attributes to a display function::
@admin.display(
boolean=True,
ordering='-publish_date',
description='Is Published?',
)
def is_published(self, obj):
return obj.publish_date is not None
This is equivalent to setting some attributes (with the original, longer
names) on the function directly::
def is_published(self, obj):
return obj.publish_date is not None
is_published.boolean = True
is_published.admin_order_field = '-publish_date'
is_published.short_description = 'Is Published?'
"""
def decorator(func):
if boolean is not None and empty_value is not None:
raise ValueError(
'The boolean and empty_value arguments to the @display '
'decorator are mutually exclusive.'
)
if boolean is not None:
func.boolean = boolean
if ordering is not None:
func.admin_order_field = ordering
if description is not None:
func.short_description = description
if empty_value is not None:
func.empty_value_display = empty_value
return func
if function is None:
return decorator
else:
return decorator(function) | [
"def",
"display",
"(",
"function",
"=",
"None",
",",
"*",
",",
"boolean",
"=",
"None",
",",
"ordering",
"=",
"None",
",",
"description",
"=",
"None",
",",
"empty_value",
"=",
"None",
")",
":",
"def",
"decorator",
"(",
"func",
")",
":",
"if",
"boolean",
"is",
"not",
"None",
"and",
"empty_value",
"is",
"not",
"None",
":",
"raise",
"ValueError",
"(",
"'The boolean and empty_value arguments to the @display '",
"'decorator are mutually exclusive.'",
")",
"if",
"boolean",
"is",
"not",
"None",
":",
"func",
".",
"boolean",
"=",
"boolean",
"if",
"ordering",
"is",
"not",
"None",
":",
"func",
".",
"admin_order_field",
"=",
"ordering",
"if",
"description",
"is",
"not",
"None",
":",
"func",
".",
"short_description",
"=",
"description",
"if",
"empty_value",
"is",
"not",
"None",
":",
"func",
".",
"empty_value_display",
"=",
"empty_value",
"return",
"func",
"if",
"function",
"is",
"None",
":",
"return",
"decorator",
"else",
":",
"return",
"decorator",
"(",
"function",
")"
] | [
31,
0
] | [
70,
34
] | python | en | ['en', 'error', 'th'] | False |
register | (*models, site=None) |
Register the given model(s) classes and wrapped ModelAdmin class with
admin site:
@register(Author)
class AuthorAdmin(admin.ModelAdmin):
pass
The `site` kwarg is an admin site to use instead of the default admin site.
|
Register the given model(s) classes and wrapped ModelAdmin class with
admin site: | def register(*models, site=None):
"""
Register the given model(s) classes and wrapped ModelAdmin class with
admin site:
@register(Author)
class AuthorAdmin(admin.ModelAdmin):
pass
The `site` kwarg is an admin site to use instead of the default admin site.
"""
from django.contrib.admin import ModelAdmin
from django.contrib.admin.sites import AdminSite, site as default_site
def _model_admin_wrapper(admin_class):
if not models:
raise ValueError('At least one model must be passed to register.')
admin_site = site or default_site
if not isinstance(admin_site, AdminSite):
raise ValueError('site must subclass AdminSite')
if not issubclass(admin_class, ModelAdmin):
raise ValueError('Wrapped class must subclass ModelAdmin.')
admin_site.register(models, admin_class=admin_class)
return admin_class
return _model_admin_wrapper | [
"def",
"register",
"(",
"*",
"models",
",",
"site",
"=",
"None",
")",
":",
"from",
"django",
".",
"contrib",
".",
"admin",
"import",
"ModelAdmin",
"from",
"django",
".",
"contrib",
".",
"admin",
".",
"sites",
"import",
"AdminSite",
",",
"site",
"as",
"default_site",
"def",
"_model_admin_wrapper",
"(",
"admin_class",
")",
":",
"if",
"not",
"models",
":",
"raise",
"ValueError",
"(",
"'At least one model must be passed to register.'",
")",
"admin_site",
"=",
"site",
"or",
"default_site",
"if",
"not",
"isinstance",
"(",
"admin_site",
",",
"AdminSite",
")",
":",
"raise",
"ValueError",
"(",
"'site must subclass AdminSite'",
")",
"if",
"not",
"issubclass",
"(",
"admin_class",
",",
"ModelAdmin",
")",
":",
"raise",
"ValueError",
"(",
"'Wrapped class must subclass ModelAdmin.'",
")",
"admin_site",
".",
"register",
"(",
"models",
",",
"admin_class",
"=",
"admin_class",
")",
"return",
"admin_class",
"return",
"_model_admin_wrapper"
] | [
73,
0
] | [
102,
31
] | python | en | ['en', 'error', 'th'] | False |
_check_dist_requires_python | (
dist: Distribution,
version_info: Tuple[int, int, int],
ignore_requires_python: bool = False,
) |
Check whether the given Python version is compatible with a distribution's
"Requires-Python" value.
:param version_info: A 3-tuple of ints representing the Python
major-minor-micro version to check.
:param ignore_requires_python: Whether to ignore the "Requires-Python"
value if the given Python version isn't compatible.
:raises UnsupportedPythonVersion: When the given Python version isn't
compatible.
|
Check whether the given Python version is compatible with a distribution's
"Requires-Python" value. | def _check_dist_requires_python(
dist: Distribution,
version_info: Tuple[int, int, int],
ignore_requires_python: bool = False,
) -> None:
"""
Check whether the given Python version is compatible with a distribution's
"Requires-Python" value.
:param version_info: A 3-tuple of ints representing the Python
major-minor-micro version to check.
:param ignore_requires_python: Whether to ignore the "Requires-Python"
value if the given Python version isn't compatible.
:raises UnsupportedPythonVersion: When the given Python version isn't
compatible.
"""
requires_python = get_requires_python(dist)
try:
is_compatible = check_requires_python(
requires_python, version_info=version_info
)
except specifiers.InvalidSpecifier as exc:
logger.warning(
"Package %r has an invalid Requires-Python: %s", dist.project_name, exc
)
return
if is_compatible:
return
version = ".".join(map(str, version_info))
if ignore_requires_python:
logger.debug(
"Ignoring failed Requires-Python check for package %r: %s not in %r",
dist.project_name,
version,
requires_python,
)
return
raise UnsupportedPythonVersion(
"Package {!r} requires a different Python: {} not in {!r}".format(
dist.project_name, version, requires_python
)
) | [
"def",
"_check_dist_requires_python",
"(",
"dist",
":",
"Distribution",
",",
"version_info",
":",
"Tuple",
"[",
"int",
",",
"int",
",",
"int",
"]",
",",
"ignore_requires_python",
":",
"bool",
"=",
"False",
",",
")",
"->",
"None",
":",
"requires_python",
"=",
"get_requires_python",
"(",
"dist",
")",
"try",
":",
"is_compatible",
"=",
"check_requires_python",
"(",
"requires_python",
",",
"version_info",
"=",
"version_info",
")",
"except",
"specifiers",
".",
"InvalidSpecifier",
"as",
"exc",
":",
"logger",
".",
"warning",
"(",
"\"Package %r has an invalid Requires-Python: %s\"",
",",
"dist",
".",
"project_name",
",",
"exc",
")",
"return",
"if",
"is_compatible",
":",
"return",
"version",
"=",
"\".\"",
".",
"join",
"(",
"map",
"(",
"str",
",",
"version_info",
")",
")",
"if",
"ignore_requires_python",
":",
"logger",
".",
"debug",
"(",
"\"Ignoring failed Requires-Python check for package %r: %s not in %r\"",
",",
"dist",
".",
"project_name",
",",
"version",
",",
"requires_python",
",",
")",
"return",
"raise",
"UnsupportedPythonVersion",
"(",
"\"Package {!r} requires a different Python: {} not in {!r}\"",
".",
"format",
"(",
"dist",
".",
"project_name",
",",
"version",
",",
"requires_python",
")",
")"
] | [
51,
0
] | [
96,
5
] | python | en | ['en', 'error', 'th'] | False |
Resolver.resolve | (
self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
) | Resolve what operations need to be done
As a side-effect of this method, the packages (and their dependencies)
are downloaded, unpacked and prepared for installation. This
preparation is done by ``pip.operations.prepare``.
Once PyPI has static dependency metadata available, it would be
possible to move the preparation to become a step separated from
dependency resolution.
| Resolve what operations need to be done | def resolve(
self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
) -> RequirementSet:
"""Resolve what operations need to be done
As a side-effect of this method, the packages (and their dependencies)
are downloaded, unpacked and prepared for installation. This
preparation is done by ``pip.operations.prepare``.
Once PyPI has static dependency metadata available, it would be
possible to move the preparation to become a step separated from
dependency resolution.
"""
requirement_set = RequirementSet(check_supported_wheels=check_supported_wheels)
for req in root_reqs:
if req.constraint:
check_invalid_constraint_type(req)
requirement_set.add_requirement(req)
# Actually prepare the files, and collect any exceptions. Most hash
# exceptions cannot be checked ahead of time, because
# _populate_link() needs to be called before we can make decisions
# based on link type.
discovered_reqs: List[InstallRequirement] = []
hash_errors = HashErrors()
for req in chain(requirement_set.all_requirements, discovered_reqs):
try:
discovered_reqs.extend(self._resolve_one(requirement_set, req))
except HashError as exc:
exc.req = req
hash_errors.append(exc)
if hash_errors:
raise hash_errors
return requirement_set | [
"def",
"resolve",
"(",
"self",
",",
"root_reqs",
":",
"List",
"[",
"InstallRequirement",
"]",
",",
"check_supported_wheels",
":",
"bool",
")",
"->",
"RequirementSet",
":",
"requirement_set",
"=",
"RequirementSet",
"(",
"check_supported_wheels",
"=",
"check_supported_wheels",
")",
"for",
"req",
"in",
"root_reqs",
":",
"if",
"req",
".",
"constraint",
":",
"check_invalid_constraint_type",
"(",
"req",
")",
"requirement_set",
".",
"add_requirement",
"(",
"req",
")",
"# Actually prepare the files, and collect any exceptions. Most hash",
"# exceptions cannot be checked ahead of time, because",
"# _populate_link() needs to be called before we can make decisions",
"# based on link type.",
"discovered_reqs",
":",
"List",
"[",
"InstallRequirement",
"]",
"=",
"[",
"]",
"hash_errors",
"=",
"HashErrors",
"(",
")",
"for",
"req",
"in",
"chain",
"(",
"requirement_set",
".",
"all_requirements",
",",
"discovered_reqs",
")",
":",
"try",
":",
"discovered_reqs",
".",
"extend",
"(",
"self",
".",
"_resolve_one",
"(",
"requirement_set",
",",
"req",
")",
")",
"except",
"HashError",
"as",
"exc",
":",
"exc",
".",
"req",
"=",
"req",
"hash_errors",
".",
"append",
"(",
"exc",
")",
"if",
"hash_errors",
":",
"raise",
"hash_errors",
"return",
"requirement_set"
] | [
144,
4
] | [
179,
30
] | python | en | ['en', 'en', 'en'] | True |
Resolver._set_req_to_reinstall | (self, req: InstallRequirement) |
Set a requirement to be installed.
|
Set a requirement to be installed.
| def _set_req_to_reinstall(self, req: InstallRequirement) -> None:
"""
Set a requirement to be installed.
"""
# Don't uninstall the conflict if doing a user install and the
# conflict is not a user install.
if not self.use_user_site or dist_in_usersite(req.satisfied_by):
req.should_reinstall = True
req.satisfied_by = None | [
"def",
"_set_req_to_reinstall",
"(",
"self",
",",
"req",
":",
"InstallRequirement",
")",
"->",
"None",
":",
"# Don't uninstall the conflict if doing a user install and the",
"# conflict is not a user install.",
"if",
"not",
"self",
".",
"use_user_site",
"or",
"dist_in_usersite",
"(",
"req",
".",
"satisfied_by",
")",
":",
"req",
".",
"should_reinstall",
"=",
"True",
"req",
".",
"satisfied_by",
"=",
"None"
] | [
190,
4
] | [
198,
31
] | python | en | ['en', 'error', 'th'] | False |
Resolver._check_skip_installed | (
self, req_to_install: InstallRequirement
) | Check if req_to_install should be skipped.
This will check if the req is installed, and whether we should upgrade
or reinstall it, taking into account all the relevant user options.
After calling this req_to_install will only have satisfied_by set to
None if the req_to_install is to be upgraded/reinstalled etc. Any
other value will be a dist recording the current thing installed that
satisfies the requirement.
Note that for vcs urls and the like we can't assess skipping in this
routine - we simply identify that we need to pull the thing down,
then later on it is pulled down and introspected to assess upgrade/
reinstalls etc.
:return: A text reason for why it was skipped, or None.
| Check if req_to_install should be skipped. | def _check_skip_installed(
self, req_to_install: InstallRequirement
) -> Optional[str]:
"""Check if req_to_install should be skipped.
This will check if the req is installed, and whether we should upgrade
or reinstall it, taking into account all the relevant user options.
After calling this req_to_install will only have satisfied_by set to
None if the req_to_install is to be upgraded/reinstalled etc. Any
other value will be a dist recording the current thing installed that
satisfies the requirement.
Note that for vcs urls and the like we can't assess skipping in this
routine - we simply identify that we need to pull the thing down,
then later on it is pulled down and introspected to assess upgrade/
reinstalls etc.
:return: A text reason for why it was skipped, or None.
"""
if self.ignore_installed:
return None
req_to_install.check_if_exists(self.use_user_site)
if not req_to_install.satisfied_by:
return None
if self.force_reinstall:
self._set_req_to_reinstall(req_to_install)
return None
if not self._is_upgrade_allowed(req_to_install):
if self.upgrade_strategy == "only-if-needed":
return "already satisfied, skipping upgrade"
return "already satisfied"
# Check for the possibility of an upgrade. For link-based
# requirements we have to pull the tree down and inspect to assess
# the version #, so it's handled way down.
if not req_to_install.link:
try:
self.finder.find_requirement(req_to_install, upgrade=True)
except BestVersionAlreadyInstalled:
# Then the best version is installed.
return "already up-to-date"
except DistributionNotFound:
# No distribution found, so we squash the error. It will
# be raised later when we re-try later to do the install.
# Why don't we just raise here?
pass
self._set_req_to_reinstall(req_to_install)
return None | [
"def",
"_check_skip_installed",
"(",
"self",
",",
"req_to_install",
":",
"InstallRequirement",
")",
"->",
"Optional",
"[",
"str",
"]",
":",
"if",
"self",
".",
"ignore_installed",
":",
"return",
"None",
"req_to_install",
".",
"check_if_exists",
"(",
"self",
".",
"use_user_site",
")",
"if",
"not",
"req_to_install",
".",
"satisfied_by",
":",
"return",
"None",
"if",
"self",
".",
"force_reinstall",
":",
"self",
".",
"_set_req_to_reinstall",
"(",
"req_to_install",
")",
"return",
"None",
"if",
"not",
"self",
".",
"_is_upgrade_allowed",
"(",
"req_to_install",
")",
":",
"if",
"self",
".",
"upgrade_strategy",
"==",
"\"only-if-needed\"",
":",
"return",
"\"already satisfied, skipping upgrade\"",
"return",
"\"already satisfied\"",
"# Check for the possibility of an upgrade. For link-based",
"# requirements we have to pull the tree down and inspect to assess",
"# the version #, so it's handled way down.",
"if",
"not",
"req_to_install",
".",
"link",
":",
"try",
":",
"self",
".",
"finder",
".",
"find_requirement",
"(",
"req_to_install",
",",
"upgrade",
"=",
"True",
")",
"except",
"BestVersionAlreadyInstalled",
":",
"# Then the best version is installed.",
"return",
"\"already up-to-date\"",
"except",
"DistributionNotFound",
":",
"# No distribution found, so we squash the error. It will",
"# be raised later when we re-try later to do the install.",
"# Why don't we just raise here?",
"pass",
"self",
".",
"_set_req_to_reinstall",
"(",
"req_to_install",
")",
"return",
"None"
] | [
200,
4
] | [
252,
19
] | python | en | ['en', 'en', 'en'] | True |
Resolver._populate_link | (self, req: InstallRequirement) | Ensure that if a link can be found for this, that it is found.
Note that req.link may still be None - if the requirement is already
installed and not needed to be upgraded based on the return value of
_is_upgrade_allowed().
If preparer.require_hashes is True, don't use the wheel cache, because
cached wheels, always built locally, have different hashes than the
files downloaded from the index server and thus throw false hash
mismatches. Furthermore, cached wheels at present have undeterministic
contents due to file modification times.
| Ensure that if a link can be found for this, that it is found. | def _populate_link(self, req: InstallRequirement) -> None:
"""Ensure that if a link can be found for this, that it is found.
Note that req.link may still be None - if the requirement is already
installed and not needed to be upgraded based on the return value of
_is_upgrade_allowed().
If preparer.require_hashes is True, don't use the wheel cache, because
cached wheels, always built locally, have different hashes than the
files downloaded from the index server and thus throw false hash
mismatches. Furthermore, cached wheels at present have undeterministic
contents due to file modification times.
"""
if req.link is None:
req.link = self._find_requirement_link(req)
if self.wheel_cache is None or self.preparer.require_hashes:
return
cache_entry = self.wheel_cache.get_cache_entry(
link=req.link,
package_name=req.name,
supported_tags=get_supported(),
)
if cache_entry is not None:
logger.debug("Using cached wheel link: %s", cache_entry.link)
if req.link is req.original_link and cache_entry.persistent:
req.original_link_is_in_wheel_cache = True
req.link = cache_entry.link | [
"def",
"_populate_link",
"(",
"self",
",",
"req",
":",
"InstallRequirement",
")",
"->",
"None",
":",
"if",
"req",
".",
"link",
"is",
"None",
":",
"req",
".",
"link",
"=",
"self",
".",
"_find_requirement_link",
"(",
"req",
")",
"if",
"self",
".",
"wheel_cache",
"is",
"None",
"or",
"self",
".",
"preparer",
".",
"require_hashes",
":",
"return",
"cache_entry",
"=",
"self",
".",
"wheel_cache",
".",
"get_cache_entry",
"(",
"link",
"=",
"req",
".",
"link",
",",
"package_name",
"=",
"req",
".",
"name",
",",
"supported_tags",
"=",
"get_supported",
"(",
")",
",",
")",
"if",
"cache_entry",
"is",
"not",
"None",
":",
"logger",
".",
"debug",
"(",
"\"Using cached wheel link: %s\"",
",",
"cache_entry",
".",
"link",
")",
"if",
"req",
".",
"link",
"is",
"req",
".",
"original_link",
"and",
"cache_entry",
".",
"persistent",
":",
"req",
".",
"original_link_is_in_wheel_cache",
"=",
"True",
"req",
".",
"link",
"=",
"cache_entry",
".",
"link"
] | [
276,
4
] | [
303,
39
] | python | en | ['en', 'en', 'en'] | True |
Resolver._get_dist_for | (self, req: InstallRequirement) | Takes a InstallRequirement and returns a single AbstractDist \
representing a prepared variant of the same.
| Takes a InstallRequirement and returns a single AbstractDist \
representing a prepared variant of the same.
| def _get_dist_for(self, req: InstallRequirement) -> Distribution:
"""Takes a InstallRequirement and returns a single AbstractDist \
representing a prepared variant of the same.
"""
if req.editable:
return self.preparer.prepare_editable_requirement(req)
# satisfied_by is only evaluated by calling _check_skip_installed,
# so it must be None here.
assert req.satisfied_by is None
skip_reason = self._check_skip_installed(req)
if req.satisfied_by:
return self.preparer.prepare_installed_requirement(req, skip_reason)
# We eagerly populate the link, since that's our "legacy" behavior.
self._populate_link(req)
dist = self.preparer.prepare_linked_requirement(req)
# NOTE
# The following portion is for determining if a certain package is
# going to be re-installed/upgraded or not and reporting to the user.
# This should probably get cleaned up in a future refactor.
# req.req is only avail after unpack for URL
# pkgs repeat check_if_exists to uninstall-on-upgrade
# (#14)
if not self.ignore_installed:
req.check_if_exists(self.use_user_site)
if req.satisfied_by:
should_modify = (
self.upgrade_strategy != "to-satisfy-only"
or self.force_reinstall
or self.ignore_installed
or req.link.scheme == "file"
)
if should_modify:
self._set_req_to_reinstall(req)
else:
logger.info(
"Requirement already satisfied (use --upgrade to upgrade): %s",
req,
)
return dist | [
"def",
"_get_dist_for",
"(",
"self",
",",
"req",
":",
"InstallRequirement",
")",
"->",
"Distribution",
":",
"if",
"req",
".",
"editable",
":",
"return",
"self",
".",
"preparer",
".",
"prepare_editable_requirement",
"(",
"req",
")",
"# satisfied_by is only evaluated by calling _check_skip_installed,",
"# so it must be None here.",
"assert",
"req",
".",
"satisfied_by",
"is",
"None",
"skip_reason",
"=",
"self",
".",
"_check_skip_installed",
"(",
"req",
")",
"if",
"req",
".",
"satisfied_by",
":",
"return",
"self",
".",
"preparer",
".",
"prepare_installed_requirement",
"(",
"req",
",",
"skip_reason",
")",
"# We eagerly populate the link, since that's our \"legacy\" behavior.",
"self",
".",
"_populate_link",
"(",
"req",
")",
"dist",
"=",
"self",
".",
"preparer",
".",
"prepare_linked_requirement",
"(",
"req",
")",
"# NOTE",
"# The following portion is for determining if a certain package is",
"# going to be re-installed/upgraded or not and reporting to the user.",
"# This should probably get cleaned up in a future refactor.",
"# req.req is only avail after unpack for URL",
"# pkgs repeat check_if_exists to uninstall-on-upgrade",
"# (#14)",
"if",
"not",
"self",
".",
"ignore_installed",
":",
"req",
".",
"check_if_exists",
"(",
"self",
".",
"use_user_site",
")",
"if",
"req",
".",
"satisfied_by",
":",
"should_modify",
"=",
"(",
"self",
".",
"upgrade_strategy",
"!=",
"\"to-satisfy-only\"",
"or",
"self",
".",
"force_reinstall",
"or",
"self",
".",
"ignore_installed",
"or",
"req",
".",
"link",
".",
"scheme",
"==",
"\"file\"",
")",
"if",
"should_modify",
":",
"self",
".",
"_set_req_to_reinstall",
"(",
"req",
")",
"else",
":",
"logger",
".",
"info",
"(",
"\"Requirement already satisfied (use --upgrade to upgrade): %s\"",
",",
"req",
",",
")",
"return",
"dist"
] | [
305,
4
] | [
349,
19
] | python | en | ['en', 'en', 'en'] | True |
Resolver._resolve_one | (
self,
requirement_set: RequirementSet,
req_to_install: InstallRequirement,
) | Prepare a single requirements file.
:return: A list of additional InstallRequirements to also install.
| Prepare a single requirements file. | def _resolve_one(
self,
requirement_set: RequirementSet,
req_to_install: InstallRequirement,
) -> List[InstallRequirement]:
"""Prepare a single requirements file.
:return: A list of additional InstallRequirements to also install.
"""
# Tell user what we are doing for this requirement:
# obtain (editable), skipping, processing (local url), collecting
# (remote url or package name)
if req_to_install.constraint or req_to_install.prepared:
return []
req_to_install.prepared = True
# Parse and return dependencies
dist = self._get_dist_for(req_to_install)
# This will raise UnsupportedPythonVersion if the given Python
# version isn't compatible with the distribution's Requires-Python.
_check_dist_requires_python(
dist,
version_info=self._py_version_info,
ignore_requires_python=self.ignore_requires_python,
)
more_reqs: List[InstallRequirement] = []
def add_req(subreq: Distribution, extras_requested: Iterable[str]) -> None:
sub_install_req = self._make_install_req(
str(subreq),
req_to_install,
)
parent_req_name = req_to_install.name
to_scan_again, add_to_parent = requirement_set.add_requirement(
sub_install_req,
parent_req_name=parent_req_name,
extras_requested=extras_requested,
)
if parent_req_name and add_to_parent:
self._discovered_dependencies[parent_req_name].append(add_to_parent)
more_reqs.extend(to_scan_again)
with indent_log():
# We add req_to_install before its dependencies, so that we
# can refer to it when adding dependencies.
if not requirement_set.has_requirement(req_to_install.name):
# 'unnamed' requirements will get added here
# 'unnamed' requirements can only come from being directly
# provided by the user.
assert req_to_install.user_supplied
requirement_set.add_requirement(req_to_install, parent_req_name=None)
if not self.ignore_dependencies:
if req_to_install.extras:
logger.debug(
"Installing extra requirements: %r",
",".join(req_to_install.extras),
)
missing_requested = sorted(
set(req_to_install.extras) - set(dist.extras)
)
for missing in missing_requested:
logger.warning("%s does not provide the extra '%s'", dist, missing)
available_requested = sorted(
set(dist.extras) & set(req_to_install.extras)
)
for subreq in dist.requires(available_requested):
add_req(subreq, extras_requested=available_requested)
return more_reqs | [
"def",
"_resolve_one",
"(",
"self",
",",
"requirement_set",
":",
"RequirementSet",
",",
"req_to_install",
":",
"InstallRequirement",
",",
")",
"->",
"List",
"[",
"InstallRequirement",
"]",
":",
"# Tell user what we are doing for this requirement:",
"# obtain (editable), skipping, processing (local url), collecting",
"# (remote url or package name)",
"if",
"req_to_install",
".",
"constraint",
"or",
"req_to_install",
".",
"prepared",
":",
"return",
"[",
"]",
"req_to_install",
".",
"prepared",
"=",
"True",
"# Parse and return dependencies",
"dist",
"=",
"self",
".",
"_get_dist_for",
"(",
"req_to_install",
")",
"# This will raise UnsupportedPythonVersion if the given Python",
"# version isn't compatible with the distribution's Requires-Python.",
"_check_dist_requires_python",
"(",
"dist",
",",
"version_info",
"=",
"self",
".",
"_py_version_info",
",",
"ignore_requires_python",
"=",
"self",
".",
"ignore_requires_python",
",",
")",
"more_reqs",
":",
"List",
"[",
"InstallRequirement",
"]",
"=",
"[",
"]",
"def",
"add_req",
"(",
"subreq",
":",
"Distribution",
",",
"extras_requested",
":",
"Iterable",
"[",
"str",
"]",
")",
"->",
"None",
":",
"sub_install_req",
"=",
"self",
".",
"_make_install_req",
"(",
"str",
"(",
"subreq",
")",
",",
"req_to_install",
",",
")",
"parent_req_name",
"=",
"req_to_install",
".",
"name",
"to_scan_again",
",",
"add_to_parent",
"=",
"requirement_set",
".",
"add_requirement",
"(",
"sub_install_req",
",",
"parent_req_name",
"=",
"parent_req_name",
",",
"extras_requested",
"=",
"extras_requested",
",",
")",
"if",
"parent_req_name",
"and",
"add_to_parent",
":",
"self",
".",
"_discovered_dependencies",
"[",
"parent_req_name",
"]",
".",
"append",
"(",
"add_to_parent",
")",
"more_reqs",
".",
"extend",
"(",
"to_scan_again",
")",
"with",
"indent_log",
"(",
")",
":",
"# We add req_to_install before its dependencies, so that we",
"# can refer to it when adding dependencies.",
"if",
"not",
"requirement_set",
".",
"has_requirement",
"(",
"req_to_install",
".",
"name",
")",
":",
"# 'unnamed' requirements will get added here",
"# 'unnamed' requirements can only come from being directly",
"# provided by the user.",
"assert",
"req_to_install",
".",
"user_supplied",
"requirement_set",
".",
"add_requirement",
"(",
"req_to_install",
",",
"parent_req_name",
"=",
"None",
")",
"if",
"not",
"self",
".",
"ignore_dependencies",
":",
"if",
"req_to_install",
".",
"extras",
":",
"logger",
".",
"debug",
"(",
"\"Installing extra requirements: %r\"",
",",
"\",\"",
".",
"join",
"(",
"req_to_install",
".",
"extras",
")",
",",
")",
"missing_requested",
"=",
"sorted",
"(",
"set",
"(",
"req_to_install",
".",
"extras",
")",
"-",
"set",
"(",
"dist",
".",
"extras",
")",
")",
"for",
"missing",
"in",
"missing_requested",
":",
"logger",
".",
"warning",
"(",
"\"%s does not provide the extra '%s'\"",
",",
"dist",
",",
"missing",
")",
"available_requested",
"=",
"sorted",
"(",
"set",
"(",
"dist",
".",
"extras",
")",
"&",
"set",
"(",
"req_to_install",
".",
"extras",
")",
")",
"for",
"subreq",
"in",
"dist",
".",
"requires",
"(",
"available_requested",
")",
":",
"add_req",
"(",
"subreq",
",",
"extras_requested",
"=",
"available_requested",
")",
"return",
"more_reqs"
] | [
351,
4
] | [
423,
24
] | python | en | ['en', 'it', 'en'] | True |
Resolver.get_installation_order | (
self, req_set: RequirementSet
) | Create the installation order.
The installation order is topological - requirements are installed
before the requiring thing. We break cycles at an arbitrary point,
and make no other guarantees.
| Create the installation order. | def get_installation_order(
self, req_set: RequirementSet
) -> List[InstallRequirement]:
"""Create the installation order.
The installation order is topological - requirements are installed
before the requiring thing. We break cycles at an arbitrary point,
and make no other guarantees.
"""
# The current implementation, which we may change at any point
# installs the user specified things in the order given, except when
# dependencies must come earlier to achieve topological order.
order = []
ordered_reqs: Set[InstallRequirement] = set()
def schedule(req: InstallRequirement) -> None:
if req.satisfied_by or req in ordered_reqs:
return
if req.constraint:
return
ordered_reqs.add(req)
for dep in self._discovered_dependencies[req.name]:
schedule(dep)
order.append(req)
for install_req in req_set.requirements.values():
schedule(install_req)
return order | [
"def",
"get_installation_order",
"(",
"self",
",",
"req_set",
":",
"RequirementSet",
")",
"->",
"List",
"[",
"InstallRequirement",
"]",
":",
"# The current implementation, which we may change at any point",
"# installs the user specified things in the order given, except when",
"# dependencies must come earlier to achieve topological order.",
"order",
"=",
"[",
"]",
"ordered_reqs",
":",
"Set",
"[",
"InstallRequirement",
"]",
"=",
"set",
"(",
")",
"def",
"schedule",
"(",
"req",
":",
"InstallRequirement",
")",
"->",
"None",
":",
"if",
"req",
".",
"satisfied_by",
"or",
"req",
"in",
"ordered_reqs",
":",
"return",
"if",
"req",
".",
"constraint",
":",
"return",
"ordered_reqs",
".",
"add",
"(",
"req",
")",
"for",
"dep",
"in",
"self",
".",
"_discovered_dependencies",
"[",
"req",
".",
"name",
"]",
":",
"schedule",
"(",
"dep",
")",
"order",
".",
"append",
"(",
"req",
")",
"for",
"install_req",
"in",
"req_set",
".",
"requirements",
".",
"values",
"(",
")",
":",
"schedule",
"(",
"install_req",
")",
"return",
"order"
] | [
425,
4
] | [
452,
20
] | python | en | ['en', 'en', 'en'] | True |
_add_doc | (func, doc) | Add documentation to a function. | Add documentation to a function. | def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc | [
"def",
"_add_doc",
"(",
"func",
",",
"doc",
")",
":",
"func",
".",
"__doc__",
"=",
"doc"
] | [
79,
0
] | [
81,
22
] | python | en | ['en', 'en', 'en'] | True |
_import_module | (name) | Import module, returning the module after the last dot. | Import module, returning the module after the last dot. | def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name] | [
"def",
"_import_module",
"(",
"name",
")",
":",
"__import__",
"(",
"name",
")",
"return",
"sys",
".",
"modules",
"[",
"name",
"]"
] | [
84,
0
] | [
87,
28
] | python | en | ['en', 'en', 'en'] | True |
add_move | (move) | Add an item to six.moves. | Add an item to six.moves. | def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move) | [
"def",
"add_move",
"(",
"move",
")",
":",
"setattr",
"(",
"_MovedItems",
",",
"move",
".",
"name",
",",
"move",
")"
] | [
509,
0
] | [
511,
41
] | python | en | ['en', 'en', 'en'] | True |
remove_move | (name) | Remove item from six.moves. | Remove item from six.moves. | def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,)) | [
"def",
"remove_move",
"(",
"name",
")",
":",
"try",
":",
"delattr",
"(",
"_MovedItems",
",",
"name",
")",
"except",
"AttributeError",
":",
"try",
":",
"del",
"moves",
".",
"__dict__",
"[",
"name",
"]",
"except",
"KeyError",
":",
"raise",
"AttributeError",
"(",
"\"no such move, %r\"",
"%",
"(",
"name",
",",
")",
")"
] | [
514,
0
] | [
522,
62
] | python | en | ['en', 'en', 'en'] | True |
with_metaclass | (meta, *bases) | Create a base class with a metaclass. | Create a base class with a metaclass. | def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(type):
def __new__(cls, name, this_bases, d):
if sys.version_info[:2] >= (3, 7):
# This version introduced PEP 560 that requires a bit
# of extra care (we mimic what is done by __build_class__).
resolved_bases = types.resolve_bases(bases)
if resolved_bases is not bases:
d['__orig_bases__'] = bases
else:
resolved_bases = bases
return meta(name, resolved_bases, d)
@classmethod
def __prepare__(cls, name, this_bases):
return meta.__prepare__(name, bases)
return type.__new__(metaclass, 'temporary_class', (), {}) | [
"def",
"with_metaclass",
"(",
"meta",
",",
"*",
"bases",
")",
":",
"# This requires a bit of explanation: the basic idea is to make a dummy",
"# metaclass for one level of class instantiation that replaces itself with",
"# the actual metaclass.",
"class",
"metaclass",
"(",
"type",
")",
":",
"def",
"__new__",
"(",
"cls",
",",
"name",
",",
"this_bases",
",",
"d",
")",
":",
"if",
"sys",
".",
"version_info",
"[",
":",
"2",
"]",
">=",
"(",
"3",
",",
"7",
")",
":",
"# This version introduced PEP 560 that requires a bit",
"# of extra care (we mimic what is done by __build_class__).",
"resolved_bases",
"=",
"types",
".",
"resolve_bases",
"(",
"bases",
")",
"if",
"resolved_bases",
"is",
"not",
"bases",
":",
"d",
"[",
"'__orig_bases__'",
"]",
"=",
"bases",
"else",
":",
"resolved_bases",
"=",
"bases",
"return",
"meta",
"(",
"name",
",",
"resolved_bases",
",",
"d",
")",
"@",
"classmethod",
"def",
"__prepare__",
"(",
"cls",
",",
"name",
",",
"this_bases",
")",
":",
"return",
"meta",
".",
"__prepare__",
"(",
"name",
",",
"bases",
")",
"return",
"type",
".",
"__new__",
"(",
"metaclass",
",",
"'temporary_class'",
",",
"(",
")",
",",
"{",
"}",
")"
] | [
855,
0
] | [
876,
61
] | python | en | ['en', 'en', 'en'] | True |
add_metaclass | (metaclass) | Class decorator for creating a class with a metaclass. | Class decorator for creating a class with a metaclass. | def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
if hasattr(cls, '__qualname__'):
orig_vars['__qualname__'] = cls.__qualname__
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper | [
"def",
"add_metaclass",
"(",
"metaclass",
")",
":",
"def",
"wrapper",
"(",
"cls",
")",
":",
"orig_vars",
"=",
"cls",
".",
"__dict__",
".",
"copy",
"(",
")",
"slots",
"=",
"orig_vars",
".",
"get",
"(",
"'__slots__'",
")",
"if",
"slots",
"is",
"not",
"None",
":",
"if",
"isinstance",
"(",
"slots",
",",
"str",
")",
":",
"slots",
"=",
"[",
"slots",
"]",
"for",
"slots_var",
"in",
"slots",
":",
"orig_vars",
".",
"pop",
"(",
"slots_var",
")",
"orig_vars",
".",
"pop",
"(",
"'__dict__'",
",",
"None",
")",
"orig_vars",
".",
"pop",
"(",
"'__weakref__'",
",",
"None",
")",
"if",
"hasattr",
"(",
"cls",
",",
"'__qualname__'",
")",
":",
"orig_vars",
"[",
"'__qualname__'",
"]",
"=",
"cls",
".",
"__qualname__",
"return",
"metaclass",
"(",
"cls",
".",
"__name__",
",",
"cls",
".",
"__bases__",
",",
"orig_vars",
")",
"return",
"wrapper"
] | [
879,
0
] | [
894,
18
] | python | en | ['en', 'en', 'en'] | True |
ensure_binary | (s, encoding='utf-8', errors='strict') | Coerce **s** to six.binary_type.
For Python 2:
- `unicode` -> encoded to `str`
- `str` -> `str`
For Python 3:
- `str` -> encoded to `bytes`
- `bytes` -> `bytes`
| Coerce **s** to six.binary_type. | def ensure_binary(s, encoding='utf-8', errors='strict'):
"""Coerce **s** to six.binary_type.
For Python 2:
- `unicode` -> encoded to `str`
- `str` -> `str`
For Python 3:
- `str` -> encoded to `bytes`
- `bytes` -> `bytes`
"""
if isinstance(s, binary_type):
return s
if isinstance(s, text_type):
return s.encode(encoding, errors)
raise TypeError("not expecting type '%s'" % type(s)) | [
"def",
"ensure_binary",
"(",
"s",
",",
"encoding",
"=",
"'utf-8'",
",",
"errors",
"=",
"'strict'",
")",
":",
"if",
"isinstance",
"(",
"s",
",",
"binary_type",
")",
":",
"return",
"s",
"if",
"isinstance",
"(",
"s",
",",
"text_type",
")",
":",
"return",
"s",
".",
"encode",
"(",
"encoding",
",",
"errors",
")",
"raise",
"TypeError",
"(",
"\"not expecting type '%s'\"",
"%",
"type",
"(",
"s",
")",
")"
] | [
897,
0
] | [
912,
56
] | python | en | ['en', 'sn', 'en'] | True |
ensure_str | (s, encoding='utf-8', errors='strict') | Coerce *s* to `str`.
For Python 2:
- `unicode` -> encoded to `str`
- `str` -> `str`
For Python 3:
- `str` -> `str`
- `bytes` -> decoded to `str`
| Coerce *s* to `str`. | def ensure_str(s, encoding='utf-8', errors='strict'):
"""Coerce *s* to `str`.
For Python 2:
- `unicode` -> encoded to `str`
- `str` -> `str`
For Python 3:
- `str` -> `str`
- `bytes` -> decoded to `str`
"""
# Optimization: Fast return for the common case.
if type(s) is str:
return s
if PY2 and isinstance(s, text_type):
return s.encode(encoding, errors)
elif PY3 and isinstance(s, binary_type):
return s.decode(encoding, errors)
elif not isinstance(s, (text_type, binary_type)):
raise TypeError("not expecting type '%s'" % type(s))
return s | [
"def",
"ensure_str",
"(",
"s",
",",
"encoding",
"=",
"'utf-8'",
",",
"errors",
"=",
"'strict'",
")",
":",
"# Optimization: Fast return for the common case.",
"if",
"type",
"(",
"s",
")",
"is",
"str",
":",
"return",
"s",
"if",
"PY2",
"and",
"isinstance",
"(",
"s",
",",
"text_type",
")",
":",
"return",
"s",
".",
"encode",
"(",
"encoding",
",",
"errors",
")",
"elif",
"PY3",
"and",
"isinstance",
"(",
"s",
",",
"binary_type",
")",
":",
"return",
"s",
".",
"decode",
"(",
"encoding",
",",
"errors",
")",
"elif",
"not",
"isinstance",
"(",
"s",
",",
"(",
"text_type",
",",
"binary_type",
")",
")",
":",
"raise",
"TypeError",
"(",
"\"not expecting type '%s'\"",
"%",
"type",
"(",
"s",
")",
")",
"return",
"s"
] | [
915,
0
] | [
935,
12
] | python | en | ['en', 'sl', 'en'] | True |
ensure_text | (s, encoding='utf-8', errors='strict') | Coerce *s* to six.text_type.
For Python 2:
- `unicode` -> `unicode`
- `str` -> `unicode`
For Python 3:
- `str` -> `str`
- `bytes` -> decoded to `str`
| Coerce *s* to six.text_type. | def ensure_text(s, encoding='utf-8', errors='strict'):
"""Coerce *s* to six.text_type.
For Python 2:
- `unicode` -> `unicode`
- `str` -> `unicode`
For Python 3:
- `str` -> `str`
- `bytes` -> decoded to `str`
"""
if isinstance(s, binary_type):
return s.decode(encoding, errors)
elif isinstance(s, text_type):
return s
else:
raise TypeError("not expecting type '%s'" % type(s)) | [
"def",
"ensure_text",
"(",
"s",
",",
"encoding",
"=",
"'utf-8'",
",",
"errors",
"=",
"'strict'",
")",
":",
"if",
"isinstance",
"(",
"s",
",",
"binary_type",
")",
":",
"return",
"s",
".",
"decode",
"(",
"encoding",
",",
"errors",
")",
"elif",
"isinstance",
"(",
"s",
",",
"text_type",
")",
":",
"return",
"s",
"else",
":",
"raise",
"TypeError",
"(",
"\"not expecting type '%s'\"",
"%",
"type",
"(",
"s",
")",
")"
] | [
938,
0
] | [
954,
60
] | python | en | ['en', 'sr', 'en'] | True |
python_2_unicode_compatible | (klass) |
A class decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
|
A class decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing. | def python_2_unicode_compatible(klass):
"""
A class decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if PY2:
if '__str__' not in klass.__dict__:
raise ValueError("@python_2_unicode_compatible cannot be applied "
"to %s because it doesn't define __str__()." %
klass.__name__)
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass | [
"def",
"python_2_unicode_compatible",
"(",
"klass",
")",
":",
"if",
"PY2",
":",
"if",
"'__str__'",
"not",
"in",
"klass",
".",
"__dict__",
":",
"raise",
"ValueError",
"(",
"\"@python_2_unicode_compatible cannot be applied \"",
"\"to %s because it doesn't define __str__().\"",
"%",
"klass",
".",
"__name__",
")",
"klass",
".",
"__unicode__",
"=",
"klass",
".",
"__str__",
"klass",
".",
"__str__",
"=",
"lambda",
"self",
":",
"self",
".",
"__unicode__",
"(",
")",
".",
"encode",
"(",
"'utf-8'",
")",
"return",
"klass"
] | [
957,
0
] | [
972,
16
] | python | en | ['en', 'error', 'th'] | False |
_SixMetaPathImporter.is_package | (self, fullname) |
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
|
Return true, if the named module is a package. | def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__") | [
"def",
"is_package",
"(",
"self",
",",
"fullname",
")",
":",
"return",
"hasattr",
"(",
"self",
".",
"__get_module",
"(",
"fullname",
")",
",",
"\"__path__\"",
")"
] | [
218,
4
] | [
225,
63
] | python | en | ['en', 'error', 'th'] | False |
_SixMetaPathImporter.get_code | (self, fullname) | Return None
Required, if is_package is implemented | Return None | def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None | [
"def",
"get_code",
"(",
"self",
",",
"fullname",
")",
":",
"self",
".",
"__get_module",
"(",
"fullname",
")",
"# eventually raises ImportError",
"return",
"None"
] | [
227,
4
] | [
232,
19
] | python | en | ['en', 'co', 'en'] | False |
find_commands | (management_dir) |
Given a path to a management directory, return a list of all the command
names that are available.
|
Given a path to a management directory, return a list of all the command
names that are available.
| def find_commands(management_dir):
"""
Given a path to a management directory, return a list of all the command
names that are available.
"""
command_dir = os.path.join(management_dir, 'commands')
return [name for _, name, is_pkg in pkgutil.iter_modules([command_dir])
if not is_pkg and not name.startswith('_')] | [
"def",
"find_commands",
"(",
"management_dir",
")",
":",
"command_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"management_dir",
",",
"'commands'",
")",
"return",
"[",
"name",
"for",
"_",
",",
"name",
",",
"is_pkg",
"in",
"pkgutil",
".",
"iter_modules",
"(",
"[",
"command_dir",
"]",
")",
"if",
"not",
"is_pkg",
"and",
"not",
"name",
".",
"startswith",
"(",
"'_'",
")",
"]"
] | [
22,
0
] | [
29,
55
] | python | en | ['en', 'error', 'th'] | False |
load_command_class | (app_name, name) |
Given a command name and an application name, return the Command
class instance. Allow all errors raised by the import process
(ImportError, AttributeError) to propagate.
|
Given a command name and an application name, return the Command
class instance. Allow all errors raised by the import process
(ImportError, AttributeError) to propagate.
| def load_command_class(app_name, name):
"""
Given a command name and an application name, return the Command
class instance. Allow all errors raised by the import process
(ImportError, AttributeError) to propagate.
"""
module = import_module('%s.management.commands.%s' % (app_name, name))
return module.Command() | [
"def",
"load_command_class",
"(",
"app_name",
",",
"name",
")",
":",
"module",
"=",
"import_module",
"(",
"'%s.management.commands.%s'",
"%",
"(",
"app_name",
",",
"name",
")",
")",
"return",
"module",
".",
"Command",
"(",
")"
] | [
32,
0
] | [
39,
27
] | python | en | ['en', 'error', 'th'] | False |
get_commands | () |
Return a dictionary mapping command names to their callback applications.
Look for a management.commands package in django.core, and in each
installed application -- if a commands package exists, register all
commands in that package.
Core commands are always included. If a settings module has been
specified, also include user-defined commands.
The dictionary is in the format {command_name: app_name}. Key-value
pairs from this dictionary can then be used in calls to
load_command_class(app_name, command_name)
If a specific version of a command must be loaded (e.g., with the
startapp command), the instantiated module can be placed in the
dictionary in place of the application name.
The dictionary is cached on the first call and reused on subsequent
calls.
|
Return a dictionary mapping command names to their callback applications. | def get_commands():
"""
Return a dictionary mapping command names to their callback applications.
Look for a management.commands package in django.core, and in each
installed application -- if a commands package exists, register all
commands in that package.
Core commands are always included. If a settings module has been
specified, also include user-defined commands.
The dictionary is in the format {command_name: app_name}. Key-value
pairs from this dictionary can then be used in calls to
load_command_class(app_name, command_name)
If a specific version of a command must be loaded (e.g., with the
startapp command), the instantiated module can be placed in the
dictionary in place of the application name.
The dictionary is cached on the first call and reused on subsequent
calls.
"""
commands = {name: 'django.core' for name in find_commands(__path__[0])}
if not settings.configured:
return commands
for app_config in reversed(list(apps.get_app_configs())):
path = os.path.join(app_config.path, 'management')
commands.update({name: app_config.name for name in find_commands(path)})
return commands | [
"def",
"get_commands",
"(",
")",
":",
"commands",
"=",
"{",
"name",
":",
"'django.core'",
"for",
"name",
"in",
"find_commands",
"(",
"__path__",
"[",
"0",
"]",
")",
"}",
"if",
"not",
"settings",
".",
"configured",
":",
"return",
"commands",
"for",
"app_config",
"in",
"reversed",
"(",
"list",
"(",
"apps",
".",
"get_app_configs",
"(",
")",
")",
")",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"app_config",
".",
"path",
",",
"'management'",
")",
"commands",
".",
"update",
"(",
"{",
"name",
":",
"app_config",
".",
"name",
"for",
"name",
"in",
"find_commands",
"(",
"path",
")",
"}",
")",
"return",
"commands"
] | [
43,
0
] | [
74,
19
] | python | en | ['en', 'error', 'th'] | False |
call_command | (command_name, *args, **options) |
Call the given command, with the given options and args/kwargs.
This is the primary API you should use for calling specific commands.
`command_name` may be a string or a command object. Using a string is
preferred unless the command object is required for further processing or
testing.
Some examples:
call_command('migrate')
call_command('shell', plain=True)
call_command('sqlmigrate', 'myapp')
from django.core.management.commands import flush
cmd = flush.Command()
call_command(cmd, verbosity=0, interactive=False)
# Do something with cmd ...
|
Call the given command, with the given options and args/kwargs. | def call_command(command_name, *args, **options):
"""
Call the given command, with the given options and args/kwargs.
This is the primary API you should use for calling specific commands.
`command_name` may be a string or a command object. Using a string is
preferred unless the command object is required for further processing or
testing.
Some examples:
call_command('migrate')
call_command('shell', plain=True)
call_command('sqlmigrate', 'myapp')
from django.core.management.commands import flush
cmd = flush.Command()
call_command(cmd, verbosity=0, interactive=False)
# Do something with cmd ...
"""
if isinstance(command_name, BaseCommand):
# Command object passed in.
command = command_name
command_name = command.__class__.__module__.split('.')[-1]
else:
# Load the command object by name.
try:
app_name = get_commands()[command_name]
except KeyError:
raise CommandError("Unknown command: %r" % command_name)
if isinstance(app_name, BaseCommand):
# If the command is already loaded, use it directly.
command = app_name
else:
command = load_command_class(app_name, command_name)
# Simulate argument parsing to get the option defaults (see #10080 for details).
parser = command.create_parser('', command_name)
# Use the `dest` option name from the parser option
opt_mapping = {
min(s_opt.option_strings).lstrip('-').replace('-', '_'): s_opt.dest
for s_opt in parser._actions if s_opt.option_strings
}
arg_options = {opt_mapping.get(key, key): value for key, value in options.items()}
parse_args = []
for arg in args:
if isinstance(arg, (list, tuple)):
parse_args += map(str, arg)
else:
parse_args.append(str(arg))
def get_actions(parser):
# Parser actions and actions from sub-parser choices.
for opt in parser._actions:
if isinstance(opt, _SubParsersAction):
for sub_opt in opt.choices.values():
yield from get_actions(sub_opt)
else:
yield opt
parser_actions = list(get_actions(parser))
mutually_exclusive_required_options = {
opt
for group in parser._mutually_exclusive_groups
for opt in group._group_actions if group.required
}
# Any required arguments which are passed in via **options must be passed
# to parse_args().
for opt in parser_actions:
if (
opt.dest in options and
(opt.required or opt in mutually_exclusive_required_options)
):
parse_args.append(min(opt.option_strings))
if isinstance(opt, (_AppendConstAction, _CountAction, _StoreConstAction)):
continue
value = arg_options[opt.dest]
if isinstance(value, (list, tuple)):
parse_args += map(str, value)
else:
parse_args.append(str(value))
defaults = parser.parse_args(args=parse_args)
defaults = dict(defaults._get_kwargs(), **arg_options)
# Raise an error if any unknown options were passed.
stealth_options = set(command.base_stealth_options + command.stealth_options)
dest_parameters = {action.dest for action in parser_actions}
valid_options = (dest_parameters | stealth_options).union(opt_mapping)
unknown_options = set(options) - valid_options
if unknown_options:
raise TypeError(
"Unknown option(s) for %s command: %s. "
"Valid options are: %s." % (
command_name,
', '.join(sorted(unknown_options)),
', '.join(sorted(valid_options)),
)
)
# Move positional args out of options to mimic legacy optparse
args = defaults.pop('args', ())
if 'skip_checks' not in options:
defaults['skip_checks'] = True
return command.execute(*args, **defaults) | [
"def",
"call_command",
"(",
"command_name",
",",
"*",
"args",
",",
"*",
"*",
"options",
")",
":",
"if",
"isinstance",
"(",
"command_name",
",",
"BaseCommand",
")",
":",
"# Command object passed in.",
"command",
"=",
"command_name",
"command_name",
"=",
"command",
".",
"__class__",
".",
"__module__",
".",
"split",
"(",
"'.'",
")",
"[",
"-",
"1",
"]",
"else",
":",
"# Load the command object by name.",
"try",
":",
"app_name",
"=",
"get_commands",
"(",
")",
"[",
"command_name",
"]",
"except",
"KeyError",
":",
"raise",
"CommandError",
"(",
"\"Unknown command: %r\"",
"%",
"command_name",
")",
"if",
"isinstance",
"(",
"app_name",
",",
"BaseCommand",
")",
":",
"# If the command is already loaded, use it directly.",
"command",
"=",
"app_name",
"else",
":",
"command",
"=",
"load_command_class",
"(",
"app_name",
",",
"command_name",
")",
"# Simulate argument parsing to get the option defaults (see #10080 for details).",
"parser",
"=",
"command",
".",
"create_parser",
"(",
"''",
",",
"command_name",
")",
"# Use the `dest` option name from the parser option",
"opt_mapping",
"=",
"{",
"min",
"(",
"s_opt",
".",
"option_strings",
")",
".",
"lstrip",
"(",
"'-'",
")",
".",
"replace",
"(",
"'-'",
",",
"'_'",
")",
":",
"s_opt",
".",
"dest",
"for",
"s_opt",
"in",
"parser",
".",
"_actions",
"if",
"s_opt",
".",
"option_strings",
"}",
"arg_options",
"=",
"{",
"opt_mapping",
".",
"get",
"(",
"key",
",",
"key",
")",
":",
"value",
"for",
"key",
",",
"value",
"in",
"options",
".",
"items",
"(",
")",
"}",
"parse_args",
"=",
"[",
"]",
"for",
"arg",
"in",
"args",
":",
"if",
"isinstance",
"(",
"arg",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"parse_args",
"+=",
"map",
"(",
"str",
",",
"arg",
")",
"else",
":",
"parse_args",
".",
"append",
"(",
"str",
"(",
"arg",
")",
")",
"def",
"get_actions",
"(",
"parser",
")",
":",
"# Parser actions and actions from sub-parser choices.",
"for",
"opt",
"in",
"parser",
".",
"_actions",
":",
"if",
"isinstance",
"(",
"opt",
",",
"_SubParsersAction",
")",
":",
"for",
"sub_opt",
"in",
"opt",
".",
"choices",
".",
"values",
"(",
")",
":",
"yield",
"from",
"get_actions",
"(",
"sub_opt",
")",
"else",
":",
"yield",
"opt",
"parser_actions",
"=",
"list",
"(",
"get_actions",
"(",
"parser",
")",
")",
"mutually_exclusive_required_options",
"=",
"{",
"opt",
"for",
"group",
"in",
"parser",
".",
"_mutually_exclusive_groups",
"for",
"opt",
"in",
"group",
".",
"_group_actions",
"if",
"group",
".",
"required",
"}",
"# Any required arguments which are passed in via **options must be passed",
"# to parse_args().",
"for",
"opt",
"in",
"parser_actions",
":",
"if",
"(",
"opt",
".",
"dest",
"in",
"options",
"and",
"(",
"opt",
".",
"required",
"or",
"opt",
"in",
"mutually_exclusive_required_options",
")",
")",
":",
"parse_args",
".",
"append",
"(",
"min",
"(",
"opt",
".",
"option_strings",
")",
")",
"if",
"isinstance",
"(",
"opt",
",",
"(",
"_AppendConstAction",
",",
"_CountAction",
",",
"_StoreConstAction",
")",
")",
":",
"continue",
"value",
"=",
"arg_options",
"[",
"opt",
".",
"dest",
"]",
"if",
"isinstance",
"(",
"value",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"parse_args",
"+=",
"map",
"(",
"str",
",",
"value",
")",
"else",
":",
"parse_args",
".",
"append",
"(",
"str",
"(",
"value",
")",
")",
"defaults",
"=",
"parser",
".",
"parse_args",
"(",
"args",
"=",
"parse_args",
")",
"defaults",
"=",
"dict",
"(",
"defaults",
".",
"_get_kwargs",
"(",
")",
",",
"*",
"*",
"arg_options",
")",
"# Raise an error if any unknown options were passed.",
"stealth_options",
"=",
"set",
"(",
"command",
".",
"base_stealth_options",
"+",
"command",
".",
"stealth_options",
")",
"dest_parameters",
"=",
"{",
"action",
".",
"dest",
"for",
"action",
"in",
"parser_actions",
"}",
"valid_options",
"=",
"(",
"dest_parameters",
"|",
"stealth_options",
")",
".",
"union",
"(",
"opt_mapping",
")",
"unknown_options",
"=",
"set",
"(",
"options",
")",
"-",
"valid_options",
"if",
"unknown_options",
":",
"raise",
"TypeError",
"(",
"\"Unknown option(s) for %s command: %s. \"",
"\"Valid options are: %s.\"",
"%",
"(",
"command_name",
",",
"', '",
".",
"join",
"(",
"sorted",
"(",
"unknown_options",
")",
")",
",",
"', '",
".",
"join",
"(",
"sorted",
"(",
"valid_options",
")",
")",
",",
")",
")",
"# Move positional args out of options to mimic legacy optparse",
"args",
"=",
"defaults",
".",
"pop",
"(",
"'args'",
",",
"(",
")",
")",
"if",
"'skip_checks'",
"not",
"in",
"options",
":",
"defaults",
"[",
"'skip_checks'",
"]",
"=",
"True",
"return",
"command",
".",
"execute",
"(",
"*",
"args",
",",
"*",
"*",
"defaults",
")"
] | [
77,
0
] | [
180,
45
] | python | en | ['en', 'error', 'th'] | False |
execute_from_command_line | (argv=None) | Run a ManagementUtility. | Run a ManagementUtility. | def execute_from_command_line(argv=None):
"""Run a ManagementUtility."""
utility = ManagementUtility(argv)
utility.execute() | [
"def",
"execute_from_command_line",
"(",
"argv",
"=",
"None",
")",
":",
"utility",
"=",
"ManagementUtility",
"(",
"argv",
")",
"utility",
".",
"execute",
"(",
")"
] | [
415,
0
] | [
418,
21
] | python | en | ['es', 'lb', 'en'] | False |
ManagementUtility.main_help_text | (self, commands_only=False) | Return the script's main help text, as a string. | Return the script's main help text, as a string. | def main_help_text(self, commands_only=False):
"""Return the script's main help text, as a string."""
if commands_only:
usage = sorted(get_commands())
else:
usage = [
"",
"Type '%s help <subcommand>' for help on a specific subcommand." % self.prog_name,
"",
"Available subcommands:",
]
commands_dict = defaultdict(lambda: [])
for name, app in get_commands().items():
if app == 'django.core':
app = 'django'
else:
app = app.rpartition('.')[-1]
commands_dict[app].append(name)
style = color_style()
for app in sorted(commands_dict):
usage.append("")
usage.append(style.NOTICE("[%s]" % app))
for name in sorted(commands_dict[app]):
usage.append(" %s" % name)
# Output an extra note if settings are not properly configured
if self.settings_exception is not None:
usage.append(style.NOTICE(
"Note that only Django core commands are listed "
"as settings are not properly configured (error: %s)."
% self.settings_exception))
return '\n'.join(usage) | [
"def",
"main_help_text",
"(",
"self",
",",
"commands_only",
"=",
"False",
")",
":",
"if",
"commands_only",
":",
"usage",
"=",
"sorted",
"(",
"get_commands",
"(",
")",
")",
"else",
":",
"usage",
"=",
"[",
"\"\"",
",",
"\"Type '%s help <subcommand>' for help on a specific subcommand.\"",
"%",
"self",
".",
"prog_name",
",",
"\"\"",
",",
"\"Available subcommands:\"",
",",
"]",
"commands_dict",
"=",
"defaultdict",
"(",
"lambda",
":",
"[",
"]",
")",
"for",
"name",
",",
"app",
"in",
"get_commands",
"(",
")",
".",
"items",
"(",
")",
":",
"if",
"app",
"==",
"'django.core'",
":",
"app",
"=",
"'django'",
"else",
":",
"app",
"=",
"app",
".",
"rpartition",
"(",
"'.'",
")",
"[",
"-",
"1",
"]",
"commands_dict",
"[",
"app",
"]",
".",
"append",
"(",
"name",
")",
"style",
"=",
"color_style",
"(",
")",
"for",
"app",
"in",
"sorted",
"(",
"commands_dict",
")",
":",
"usage",
".",
"append",
"(",
"\"\"",
")",
"usage",
".",
"append",
"(",
"style",
".",
"NOTICE",
"(",
"\"[%s]\"",
"%",
"app",
")",
")",
"for",
"name",
"in",
"sorted",
"(",
"commands_dict",
"[",
"app",
"]",
")",
":",
"usage",
".",
"append",
"(",
"\" %s\"",
"%",
"name",
")",
"# Output an extra note if settings are not properly configured",
"if",
"self",
".",
"settings_exception",
"is",
"not",
"None",
":",
"usage",
".",
"append",
"(",
"style",
".",
"NOTICE",
"(",
"\"Note that only Django core commands are listed \"",
"\"as settings are not properly configured (error: %s).\"",
"%",
"self",
".",
"settings_exception",
")",
")",
"return",
"'\\n'",
".",
"join",
"(",
"usage",
")"
] | [
194,
4
] | [
225,
31
] | python | en | ['en', 'gd', 'en'] | True |
ManagementUtility.fetch_command | (self, subcommand) |
Try to fetch the given subcommand, printing a message with the
appropriate command called from the command line (usually
"django-admin" or "manage.py") if it can't be found.
|
Try to fetch the given subcommand, printing a message with the
appropriate command called from the command line (usually
"django-admin" or "manage.py") if it can't be found.
| def fetch_command(self, subcommand):
"""
Try to fetch the given subcommand, printing a message with the
appropriate command called from the command line (usually
"django-admin" or "manage.py") if it can't be found.
"""
# Get commands outside of try block to prevent swallowing exceptions
commands = get_commands()
try:
app_name = commands[subcommand]
except KeyError:
if os.environ.get('DJANGO_SETTINGS_MODULE'):
# If `subcommand` is missing due to misconfigured settings, the
# following line will retrigger an ImproperlyConfigured exception
# (get_commands() swallows the original one) so the user is
# informed about it.
settings.INSTALLED_APPS
elif not settings.configured:
sys.stderr.write("No Django settings specified.\n")
possible_matches = get_close_matches(subcommand, commands)
sys.stderr.write('Unknown command: %r' % subcommand)
if possible_matches:
sys.stderr.write('. Did you mean %s?' % possible_matches[0])
sys.stderr.write("\nType '%s help' for usage.\n" % self.prog_name)
sys.exit(1)
if isinstance(app_name, BaseCommand):
# If the command is already loaded, use it directly.
klass = app_name
else:
klass = load_command_class(app_name, subcommand)
return klass | [
"def",
"fetch_command",
"(",
"self",
",",
"subcommand",
")",
":",
"# Get commands outside of try block to prevent swallowing exceptions",
"commands",
"=",
"get_commands",
"(",
")",
"try",
":",
"app_name",
"=",
"commands",
"[",
"subcommand",
"]",
"except",
"KeyError",
":",
"if",
"os",
".",
"environ",
".",
"get",
"(",
"'DJANGO_SETTINGS_MODULE'",
")",
":",
"# If `subcommand` is missing due to misconfigured settings, the",
"# following line will retrigger an ImproperlyConfigured exception",
"# (get_commands() swallows the original one) so the user is",
"# informed about it.",
"settings",
".",
"INSTALLED_APPS",
"elif",
"not",
"settings",
".",
"configured",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"No Django settings specified.\\n\"",
")",
"possible_matches",
"=",
"get_close_matches",
"(",
"subcommand",
",",
"commands",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"'Unknown command: %r'",
"%",
"subcommand",
")",
"if",
"possible_matches",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'. Did you mean %s?'",
"%",
"possible_matches",
"[",
"0",
"]",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"\"\\nType '%s help' for usage.\\n\"",
"%",
"self",
".",
"prog_name",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"if",
"isinstance",
"(",
"app_name",
",",
"BaseCommand",
")",
":",
"# If the command is already loaded, use it directly.",
"klass",
"=",
"app_name",
"else",
":",
"klass",
"=",
"load_command_class",
"(",
"app_name",
",",
"subcommand",
")",
"return",
"klass"
] | [
227,
4
] | [
257,
20
] | python | en | ['en', 'error', 'th'] | False |
ManagementUtility.autocomplete | (self) |
Output completion suggestions for BASH.
The output of this function is passed to BASH's `COMREPLY` variable and
treated as completion suggestions. `COMREPLY` expects a space
separated string as the result.
The `COMP_WORDS` and `COMP_CWORD` BASH environment variables are used
to get information about the cli input. Please refer to the BASH
man-page for more information about this variables.
Subcommand options are saved as pairs. A pair consists of
the long option string (e.g. '--exclude') and a boolean
value indicating if the option requires arguments. When printing to
stdout, an equal sign is appended to options which require arguments.
Note: If debugging this function, it is recommended to write the debug
output in a separate file. Otherwise the debug output will be treated
and formatted as potential completion suggestions.
|
Output completion suggestions for BASH. | def autocomplete(self):
"""
Output completion suggestions for BASH.
The output of this function is passed to BASH's `COMREPLY` variable and
treated as completion suggestions. `COMREPLY` expects a space
separated string as the result.
The `COMP_WORDS` and `COMP_CWORD` BASH environment variables are used
to get information about the cli input. Please refer to the BASH
man-page for more information about this variables.
Subcommand options are saved as pairs. A pair consists of
the long option string (e.g. '--exclude') and a boolean
value indicating if the option requires arguments. When printing to
stdout, an equal sign is appended to options which require arguments.
Note: If debugging this function, it is recommended to write the debug
output in a separate file. Otherwise the debug output will be treated
and formatted as potential completion suggestions.
"""
# Don't complete if user hasn't sourced bash_completion file.
if 'DJANGO_AUTO_COMPLETE' not in os.environ:
return
cwords = os.environ['COMP_WORDS'].split()[1:]
cword = int(os.environ['COMP_CWORD'])
try:
curr = cwords[cword - 1]
except IndexError:
curr = ''
subcommands = [*get_commands(), 'help']
options = [('--help', False)]
# subcommand
if cword == 1:
print(' '.join(sorted(filter(lambda x: x.startswith(curr), subcommands))))
# subcommand options
# special case: the 'help' subcommand has no options
elif cwords[0] in subcommands and cwords[0] != 'help':
subcommand_cls = self.fetch_command(cwords[0])
# special case: add the names of installed apps to options
if cwords[0] in ('dumpdata', 'sqlmigrate', 'sqlsequencereset', 'test'):
try:
app_configs = apps.get_app_configs()
# Get the last part of the dotted path as the app name.
options.extend((app_config.label, 0) for app_config in app_configs)
except ImportError:
# Fail silently if DJANGO_SETTINGS_MODULE isn't set. The
# user will find out once they execute the command.
pass
parser = subcommand_cls.create_parser('', cwords[0])
options.extend(
(min(s_opt.option_strings), s_opt.nargs != 0)
for s_opt in parser._actions if s_opt.option_strings
)
# filter out previously specified options from available options
prev_opts = {x.split('=')[0] for x in cwords[1:cword - 1]}
options = (opt for opt in options if opt[0] not in prev_opts)
# filter options by current input
options = sorted((k, v) for k, v in options if k.startswith(curr))
for opt_label, require_arg in options:
# append '=' to options which require args
if require_arg:
opt_label += '='
print(opt_label)
# Exit code of the bash completion function is never passed back to
# the user, so it's safe to always exit with 0.
# For more details see #25420.
sys.exit(0) | [
"def",
"autocomplete",
"(",
"self",
")",
":",
"# Don't complete if user hasn't sourced bash_completion file.",
"if",
"'DJANGO_AUTO_COMPLETE'",
"not",
"in",
"os",
".",
"environ",
":",
"return",
"cwords",
"=",
"os",
".",
"environ",
"[",
"'COMP_WORDS'",
"]",
".",
"split",
"(",
")",
"[",
"1",
":",
"]",
"cword",
"=",
"int",
"(",
"os",
".",
"environ",
"[",
"'COMP_CWORD'",
"]",
")",
"try",
":",
"curr",
"=",
"cwords",
"[",
"cword",
"-",
"1",
"]",
"except",
"IndexError",
":",
"curr",
"=",
"''",
"subcommands",
"=",
"[",
"*",
"get_commands",
"(",
")",
",",
"'help'",
"]",
"options",
"=",
"[",
"(",
"'--help'",
",",
"False",
")",
"]",
"# subcommand",
"if",
"cword",
"==",
"1",
":",
"print",
"(",
"' '",
".",
"join",
"(",
"sorted",
"(",
"filter",
"(",
"lambda",
"x",
":",
"x",
".",
"startswith",
"(",
"curr",
")",
",",
"subcommands",
")",
")",
")",
")",
"# subcommand options",
"# special case: the 'help' subcommand has no options",
"elif",
"cwords",
"[",
"0",
"]",
"in",
"subcommands",
"and",
"cwords",
"[",
"0",
"]",
"!=",
"'help'",
":",
"subcommand_cls",
"=",
"self",
".",
"fetch_command",
"(",
"cwords",
"[",
"0",
"]",
")",
"# special case: add the names of installed apps to options",
"if",
"cwords",
"[",
"0",
"]",
"in",
"(",
"'dumpdata'",
",",
"'sqlmigrate'",
",",
"'sqlsequencereset'",
",",
"'test'",
")",
":",
"try",
":",
"app_configs",
"=",
"apps",
".",
"get_app_configs",
"(",
")",
"# Get the last part of the dotted path as the app name.",
"options",
".",
"extend",
"(",
"(",
"app_config",
".",
"label",
",",
"0",
")",
"for",
"app_config",
"in",
"app_configs",
")",
"except",
"ImportError",
":",
"# Fail silently if DJANGO_SETTINGS_MODULE isn't set. The",
"# user will find out once they execute the command.",
"pass",
"parser",
"=",
"subcommand_cls",
".",
"create_parser",
"(",
"''",
",",
"cwords",
"[",
"0",
"]",
")",
"options",
".",
"extend",
"(",
"(",
"min",
"(",
"s_opt",
".",
"option_strings",
")",
",",
"s_opt",
".",
"nargs",
"!=",
"0",
")",
"for",
"s_opt",
"in",
"parser",
".",
"_actions",
"if",
"s_opt",
".",
"option_strings",
")",
"# filter out previously specified options from available options",
"prev_opts",
"=",
"{",
"x",
".",
"split",
"(",
"'='",
")",
"[",
"0",
"]",
"for",
"x",
"in",
"cwords",
"[",
"1",
":",
"cword",
"-",
"1",
"]",
"}",
"options",
"=",
"(",
"opt",
"for",
"opt",
"in",
"options",
"if",
"opt",
"[",
"0",
"]",
"not",
"in",
"prev_opts",
")",
"# filter options by current input",
"options",
"=",
"sorted",
"(",
"(",
"k",
",",
"v",
")",
"for",
"k",
",",
"v",
"in",
"options",
"if",
"k",
".",
"startswith",
"(",
"curr",
")",
")",
"for",
"opt_label",
",",
"require_arg",
"in",
"options",
":",
"# append '=' to options which require args",
"if",
"require_arg",
":",
"opt_label",
"+=",
"'='",
"print",
"(",
"opt_label",
")",
"# Exit code of the bash completion function is never passed back to",
"# the user, so it's safe to always exit with 0.",
"# For more details see #25420.",
"sys",
".",
"exit",
"(",
"0",
")"
] | [
259,
4
] | [
331,
19
] | python | en | ['en', 'error', 'th'] | False |
ManagementUtility.execute | (self) |
Given the command-line arguments, figure out which subcommand is being
run, create a parser appropriate to that command, and run it.
|
Given the command-line arguments, figure out which subcommand is being
run, create a parser appropriate to that command, and run it.
| def execute(self):
"""
Given the command-line arguments, figure out which subcommand is being
run, create a parser appropriate to that command, and run it.
"""
try:
subcommand = self.argv[1]
except IndexError:
subcommand = 'help' # Display help if no arguments were given.
# Preprocess options to extract --settings and --pythonpath.
# These options could affect the commands that are available, so they
# must be processed early.
parser = CommandParser(
prog=self.prog_name,
usage='%(prog)s subcommand [options] [args]',
add_help=False,
allow_abbrev=False,
)
parser.add_argument('--settings')
parser.add_argument('--pythonpath')
parser.add_argument('args', nargs='*') # catch-all
try:
options, args = parser.parse_known_args(self.argv[2:])
handle_default_options(options)
except CommandError:
pass # Ignore any option errors at this point.
try:
settings.INSTALLED_APPS
except ImproperlyConfigured as exc:
self.settings_exception = exc
except ImportError as exc:
self.settings_exception = exc
if settings.configured:
# Start the auto-reloading dev server even if the code is broken.
# The hardcoded condition is a code smell but we can't rely on a
# flag on the command class because we haven't located it yet.
if subcommand == 'runserver' and '--noreload' not in self.argv:
try:
autoreload.check_errors(django.setup)()
except Exception:
# The exception will be raised later in the child process
# started by the autoreloader. Pretend it didn't happen by
# loading an empty list of applications.
apps.all_models = defaultdict(dict)
apps.app_configs = {}
apps.apps_ready = apps.models_ready = apps.ready = True
# Remove options not compatible with the built-in runserver
# (e.g. options for the contrib.staticfiles' runserver).
# Changes here require manually testing as described in
# #27522.
_parser = self.fetch_command('runserver').create_parser('django', 'runserver')
_options, _args = _parser.parse_known_args(self.argv[2:])
for _arg in _args:
self.argv.remove(_arg)
# In all other cases, django.setup() is required to succeed.
else:
django.setup()
self.autocomplete()
if subcommand == 'help':
if '--commands' in args:
sys.stdout.write(self.main_help_text(commands_only=True) + '\n')
elif not options.args:
sys.stdout.write(self.main_help_text() + '\n')
else:
self.fetch_command(options.args[0]).print_help(self.prog_name, options.args[0])
# Special-cases: We want 'django-admin --version' and
# 'django-admin --help' to work, for backwards compatibility.
elif subcommand == 'version' or self.argv[1:] == ['--version']:
sys.stdout.write(django.get_version() + '\n')
elif self.argv[1:] in (['--help'], ['-h']):
sys.stdout.write(self.main_help_text() + '\n')
else:
self.fetch_command(subcommand).run_from_argv(self.argv) | [
"def",
"execute",
"(",
"self",
")",
":",
"try",
":",
"subcommand",
"=",
"self",
".",
"argv",
"[",
"1",
"]",
"except",
"IndexError",
":",
"subcommand",
"=",
"'help'",
"# Display help if no arguments were given.",
"# Preprocess options to extract --settings and --pythonpath.",
"# These options could affect the commands that are available, so they",
"# must be processed early.",
"parser",
"=",
"CommandParser",
"(",
"prog",
"=",
"self",
".",
"prog_name",
",",
"usage",
"=",
"'%(prog)s subcommand [options] [args]'",
",",
"add_help",
"=",
"False",
",",
"allow_abbrev",
"=",
"False",
",",
")",
"parser",
".",
"add_argument",
"(",
"'--settings'",
")",
"parser",
".",
"add_argument",
"(",
"'--pythonpath'",
")",
"parser",
".",
"add_argument",
"(",
"'args'",
",",
"nargs",
"=",
"'*'",
")",
"# catch-all",
"try",
":",
"options",
",",
"args",
"=",
"parser",
".",
"parse_known_args",
"(",
"self",
".",
"argv",
"[",
"2",
":",
"]",
")",
"handle_default_options",
"(",
"options",
")",
"except",
"CommandError",
":",
"pass",
"# Ignore any option errors at this point.",
"try",
":",
"settings",
".",
"INSTALLED_APPS",
"except",
"ImproperlyConfigured",
"as",
"exc",
":",
"self",
".",
"settings_exception",
"=",
"exc",
"except",
"ImportError",
"as",
"exc",
":",
"self",
".",
"settings_exception",
"=",
"exc",
"if",
"settings",
".",
"configured",
":",
"# Start the auto-reloading dev server even if the code is broken.",
"# The hardcoded condition is a code smell but we can't rely on a",
"# flag on the command class because we haven't located it yet.",
"if",
"subcommand",
"==",
"'runserver'",
"and",
"'--noreload'",
"not",
"in",
"self",
".",
"argv",
":",
"try",
":",
"autoreload",
".",
"check_errors",
"(",
"django",
".",
"setup",
")",
"(",
")",
"except",
"Exception",
":",
"# The exception will be raised later in the child process",
"# started by the autoreloader. Pretend it didn't happen by",
"# loading an empty list of applications.",
"apps",
".",
"all_models",
"=",
"defaultdict",
"(",
"dict",
")",
"apps",
".",
"app_configs",
"=",
"{",
"}",
"apps",
".",
"apps_ready",
"=",
"apps",
".",
"models_ready",
"=",
"apps",
".",
"ready",
"=",
"True",
"# Remove options not compatible with the built-in runserver",
"# (e.g. options for the contrib.staticfiles' runserver).",
"# Changes here require manually testing as described in",
"# #27522.",
"_parser",
"=",
"self",
".",
"fetch_command",
"(",
"'runserver'",
")",
".",
"create_parser",
"(",
"'django'",
",",
"'runserver'",
")",
"_options",
",",
"_args",
"=",
"_parser",
".",
"parse_known_args",
"(",
"self",
".",
"argv",
"[",
"2",
":",
"]",
")",
"for",
"_arg",
"in",
"_args",
":",
"self",
".",
"argv",
".",
"remove",
"(",
"_arg",
")",
"# In all other cases, django.setup() is required to succeed.",
"else",
":",
"django",
".",
"setup",
"(",
")",
"self",
".",
"autocomplete",
"(",
")",
"if",
"subcommand",
"==",
"'help'",
":",
"if",
"'--commands'",
"in",
"args",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"self",
".",
"main_help_text",
"(",
"commands_only",
"=",
"True",
")",
"+",
"'\\n'",
")",
"elif",
"not",
"options",
".",
"args",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"self",
".",
"main_help_text",
"(",
")",
"+",
"'\\n'",
")",
"else",
":",
"self",
".",
"fetch_command",
"(",
"options",
".",
"args",
"[",
"0",
"]",
")",
".",
"print_help",
"(",
"self",
".",
"prog_name",
",",
"options",
".",
"args",
"[",
"0",
"]",
")",
"# Special-cases: We want 'django-admin --version' and",
"# 'django-admin --help' to work, for backwards compatibility.",
"elif",
"subcommand",
"==",
"'version'",
"or",
"self",
".",
"argv",
"[",
"1",
":",
"]",
"==",
"[",
"'--version'",
"]",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"django",
".",
"get_version",
"(",
")",
"+",
"'\\n'",
")",
"elif",
"self",
".",
"argv",
"[",
"1",
":",
"]",
"in",
"(",
"[",
"'--help'",
"]",
",",
"[",
"'-h'",
"]",
")",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"self",
".",
"main_help_text",
"(",
")",
"+",
"'\\n'",
")",
"else",
":",
"self",
".",
"fetch_command",
"(",
"subcommand",
")",
".",
"run_from_argv",
"(",
"self",
".",
"argv",
")"
] | [
333,
4
] | [
412,
67
] | python | en | ['en', 'error', 'th'] | False |
command_translate | (bot, user, channel, args) | Transliterates text with Google Translate to English.
Usage: translate <text>. | Transliterates text with Google Translate to English.
Usage: translate <text>. | def command_translate(bot, user, channel, args):
"""Transliterates text with Google Translate to English.
Usage: translate <text>."""
gtrans = requests.post(gturl, data=gtbody % args, headers=gtheaders)
json = gtrans.json()
translated = json["sentences"][0]["trans"]
bot.say(channel, "From " + json["src"] + ": " + translated) | [
"def",
"command_translate",
"(",
"bot",
",",
"user",
",",
"channel",
",",
"args",
")",
":",
"gtrans",
"=",
"requests",
".",
"post",
"(",
"gturl",
",",
"data",
"=",
"gtbody",
"%",
"args",
",",
"headers",
"=",
"gtheaders",
")",
"json",
"=",
"gtrans",
".",
"json",
"(",
")",
"translated",
"=",
"json",
"[",
"\"sentences\"",
"]",
"[",
"0",
"]",
"[",
"\"trans\"",
"]",
"bot",
".",
"say",
"(",
"channel",
",",
"\"From \"",
"+",
"json",
"[",
"\"src\"",
"]",
"+",
"\": \"",
"+",
"translated",
")"
] | [
24,
0
] | [
30,
63
] | python | en | ['en', 'en', 'en'] | True |
command_transliterate | (bot, user, channel, args) | Transliterates text with Google Translate to English.
Usage: transliterate <text>. | Transliterates text with Google Translate to English.
Usage: transliterate <text>. | def command_transliterate(bot, user, channel, args):
"""Transliterates text with Google Translate to English.
Usage: transliterate <text>."""
gtrans = requests.post(gturl, data=gtbody % args, headers=gtheaders)
json = gtrans.json()
transliterated = json["sentences"][0]["src_translit"]
if transliterated == "":
bot.say(channel, "No transliteration available.")
else:
bot.say(channel, "From " + json["src"] + ": " + transliterated) | [
"def",
"command_transliterate",
"(",
"bot",
",",
"user",
",",
"channel",
",",
"args",
")",
":",
"gtrans",
"=",
"requests",
".",
"post",
"(",
"gturl",
",",
"data",
"=",
"gtbody",
"%",
"args",
",",
"headers",
"=",
"gtheaders",
")",
"json",
"=",
"gtrans",
".",
"json",
"(",
")",
"transliterated",
"=",
"json",
"[",
"\"sentences\"",
"]",
"[",
"0",
"]",
"[",
"\"src_translit\"",
"]",
"if",
"transliterated",
"==",
"\"\"",
":",
"bot",
".",
"say",
"(",
"channel",
",",
"\"No transliteration available.\"",
")",
"else",
":",
"bot",
".",
"say",
"(",
"channel",
",",
"\"From \"",
"+",
"json",
"[",
"\"src\"",
"]",
"+",
"\": \"",
"+",
"transliterated",
")"
] | [
33,
0
] | [
42,
71
] | python | en | ['en', 'en', 'en'] | True |
Timestamp.__init__ | (self, seconds, nanoseconds=0) | Initialize a Timestamp object.
:param int seconds:
Number of seconds since the UNIX epoch (00:00:00 UTC Jan 1 1970, minus leap seconds).
May be negative.
:param int nanoseconds:
Number of nanoseconds to add to `seconds` to get fractional time.
Maximum is 999_999_999. Default is 0.
Note: Negative times (before the UNIX epoch) are represented as negative seconds + positive ns.
| Initialize a Timestamp object. | def __init__(self, seconds, nanoseconds=0):
"""Initialize a Timestamp object.
:param int seconds:
Number of seconds since the UNIX epoch (00:00:00 UTC Jan 1 1970, minus leap seconds).
May be negative.
:param int nanoseconds:
Number of nanoseconds to add to `seconds` to get fractional time.
Maximum is 999_999_999. Default is 0.
Note: Negative times (before the UNIX epoch) are represented as negative seconds + positive ns.
"""
if not isinstance(seconds, int_types):
raise TypeError("seconds must be an interger")
if not isinstance(nanoseconds, int_types):
raise TypeError("nanoseconds must be an integer")
if not (0 <= nanoseconds < 10 ** 9):
raise ValueError(
"nanoseconds must be a non-negative integer less than 999999999."
)
self.seconds = seconds
self.nanoseconds = nanoseconds | [
"def",
"__init__",
"(",
"self",
",",
"seconds",
",",
"nanoseconds",
"=",
"0",
")",
":",
"if",
"not",
"isinstance",
"(",
"seconds",
",",
"int_types",
")",
":",
"raise",
"TypeError",
"(",
"\"seconds must be an interger\"",
")",
"if",
"not",
"isinstance",
"(",
"nanoseconds",
",",
"int_types",
")",
":",
"raise",
"TypeError",
"(",
"\"nanoseconds must be an integer\"",
")",
"if",
"not",
"(",
"0",
"<=",
"nanoseconds",
"<",
"10",
"**",
"9",
")",
":",
"raise",
"ValueError",
"(",
"\"nanoseconds must be a non-negative integer less than 999999999.\"",
")",
"self",
".",
"seconds",
"=",
"seconds",
"self",
".",
"nanoseconds",
"=",
"nanoseconds"
] | [
44,
4
] | [
66,
38
] | python | en | ['en', 'en', 'en'] | True |
Timestamp.__repr__ | (self) | String representation of Timestamp. | String representation of Timestamp. | def __repr__(self):
"""String representation of Timestamp."""
return "Timestamp(seconds={0}, nanoseconds={1})".format(
self.seconds, self.nanoseconds
) | [
"def",
"__repr__",
"(",
"self",
")",
":",
"return",
"\"Timestamp(seconds={0}, nanoseconds={1})\"",
".",
"format",
"(",
"self",
".",
"seconds",
",",
"self",
".",
"nanoseconds",
")"
] | [
68,
4
] | [
72,
9
] | python | en | ['en', 'kk', 'en'] | True |
Timestamp.__eq__ | (self, other) | Check for equality with another Timestamp object | Check for equality with another Timestamp object | def __eq__(self, other):
"""Check for equality with another Timestamp object"""
if type(other) is self.__class__:
return (
self.seconds == other.seconds and self.nanoseconds == other.nanoseconds
)
return False | [
"def",
"__eq__",
"(",
"self",
",",
"other",
")",
":",
"if",
"type",
"(",
"other",
")",
"is",
"self",
".",
"__class__",
":",
"return",
"(",
"self",
".",
"seconds",
"==",
"other",
".",
"seconds",
"and",
"self",
".",
"nanoseconds",
"==",
"other",
".",
"nanoseconds",
")",
"return",
"False"
] | [
74,
4
] | [
80,
20
] | python | en | ['en', 'en', 'en'] | True |
Timestamp.__ne__ | (self, other) | not-equals method (see :func:`__eq__()`) | not-equals method (see :func:`__eq__()`) | def __ne__(self, other):
"""not-equals method (see :func:`__eq__()`)"""
return not self.__eq__(other) | [
"def",
"__ne__",
"(",
"self",
",",
"other",
")",
":",
"return",
"not",
"self",
".",
"__eq__",
"(",
"other",
")"
] | [
82,
4
] | [
84,
37
] | python | en | ['en', 'en', 'en'] | True |
Timestamp.from_bytes | (b) | Unpack bytes into a `Timestamp` object.
Used for pure-Python msgpack unpacking.
:param b: Payload from msgpack ext message with code -1
:type b: bytes
:returns: Timestamp object unpacked from msgpack ext payload
:rtype: Timestamp
| Unpack bytes into a `Timestamp` object. | def from_bytes(b):
"""Unpack bytes into a `Timestamp` object.
Used for pure-Python msgpack unpacking.
:param b: Payload from msgpack ext message with code -1
:type b: bytes
:returns: Timestamp object unpacked from msgpack ext payload
:rtype: Timestamp
"""
if len(b) == 4:
seconds = struct.unpack("!L", b)[0]
nanoseconds = 0
elif len(b) == 8:
data64 = struct.unpack("!Q", b)[0]
seconds = data64 & 0x00000003FFFFFFFF
nanoseconds = data64 >> 34
elif len(b) == 12:
nanoseconds, seconds = struct.unpack("!Iq", b)
else:
raise ValueError(
"Timestamp type can only be created from 32, 64, or 96-bit byte objects"
)
return Timestamp(seconds, nanoseconds) | [
"def",
"from_bytes",
"(",
"b",
")",
":",
"if",
"len",
"(",
"b",
")",
"==",
"4",
":",
"seconds",
"=",
"struct",
".",
"unpack",
"(",
"\"!L\"",
",",
"b",
")",
"[",
"0",
"]",
"nanoseconds",
"=",
"0",
"elif",
"len",
"(",
"b",
")",
"==",
"8",
":",
"data64",
"=",
"struct",
".",
"unpack",
"(",
"\"!Q\"",
",",
"b",
")",
"[",
"0",
"]",
"seconds",
"=",
"data64",
"&",
"0x00000003FFFFFFFF",
"nanoseconds",
"=",
"data64",
">>",
"34",
"elif",
"len",
"(",
"b",
")",
"==",
"12",
":",
"nanoseconds",
",",
"seconds",
"=",
"struct",
".",
"unpack",
"(",
"\"!Iq\"",
",",
"b",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Timestamp type can only be created from 32, 64, or 96-bit byte objects\"",
")",
"return",
"Timestamp",
"(",
"seconds",
",",
"nanoseconds",
")"
] | [
90,
4
] | [
114,
46
] | python | en | ['pt', 'en', 'en'] | True |
Timestamp.to_bytes | (self) | Pack this Timestamp object into bytes.
Used for pure-Python msgpack packing.
:returns data: Payload for EXT message with code -1 (timestamp type)
:rtype: bytes
| Pack this Timestamp object into bytes. | def to_bytes(self):
"""Pack this Timestamp object into bytes.
Used for pure-Python msgpack packing.
:returns data: Payload for EXT message with code -1 (timestamp type)
:rtype: bytes
"""
if (self.seconds >> 34) == 0: # seconds is non-negative and fits in 34 bits
data64 = self.nanoseconds << 34 | self.seconds
if data64 & 0xFFFFFFFF00000000 == 0:
# nanoseconds is zero and seconds < 2**32, so timestamp 32
data = struct.pack("!L", data64)
else:
# timestamp 64
data = struct.pack("!Q", data64)
else:
# timestamp 96
data = struct.pack("!Iq", self.nanoseconds, self.seconds)
return data | [
"def",
"to_bytes",
"(",
"self",
")",
":",
"if",
"(",
"self",
".",
"seconds",
">>",
"34",
")",
"==",
"0",
":",
"# seconds is non-negative and fits in 34 bits",
"data64",
"=",
"self",
".",
"nanoseconds",
"<<",
"34",
"|",
"self",
".",
"seconds",
"if",
"data64",
"&",
"0xFFFFFFFF00000000",
"==",
"0",
":",
"# nanoseconds is zero and seconds < 2**32, so timestamp 32",
"data",
"=",
"struct",
".",
"pack",
"(",
"\"!L\"",
",",
"data64",
")",
"else",
":",
"# timestamp 64",
"data",
"=",
"struct",
".",
"pack",
"(",
"\"!Q\"",
",",
"data64",
")",
"else",
":",
"# timestamp 96",
"data",
"=",
"struct",
".",
"pack",
"(",
"\"!Iq\"",
",",
"self",
".",
"nanoseconds",
",",
"self",
".",
"seconds",
")",
"return",
"data"
] | [
116,
4
] | [
135,
19
] | python | en | ['en', 'en', 'en'] | True |
Timestamp.from_unix | (unix_sec) | Create a Timestamp from posix timestamp in seconds.
:param unix_float: Posix timestamp in seconds.
:type unix_float: int or float.
| Create a Timestamp from posix timestamp in seconds. | def from_unix(unix_sec):
"""Create a Timestamp from posix timestamp in seconds.
:param unix_float: Posix timestamp in seconds.
:type unix_float: int or float.
"""
seconds = int(unix_sec // 1)
nanoseconds = int((unix_sec % 1) * 10 ** 9)
return Timestamp(seconds, nanoseconds) | [
"def",
"from_unix",
"(",
"unix_sec",
")",
":",
"seconds",
"=",
"int",
"(",
"unix_sec",
"//",
"1",
")",
"nanoseconds",
"=",
"int",
"(",
"(",
"unix_sec",
"%",
"1",
")",
"*",
"10",
"**",
"9",
")",
"return",
"Timestamp",
"(",
"seconds",
",",
"nanoseconds",
")"
] | [
138,
4
] | [
146,
46
] | python | en | ['en', 'en', 'en'] | True |
Timestamp.to_unix | (self) | Get the timestamp as a floating-point value.
:returns: posix timestamp
:rtype: float
| Get the timestamp as a floating-point value. | def to_unix(self):
"""Get the timestamp as a floating-point value.
:returns: posix timestamp
:rtype: float
"""
return self.seconds + self.nanoseconds / 1e9 | [
"def",
"to_unix",
"(",
"self",
")",
":",
"return",
"self",
".",
"seconds",
"+",
"self",
".",
"nanoseconds",
"/",
"1e9"
] | [
148,
4
] | [
154,
52
] | python | en | ['en', 'en', 'en'] | True |
Timestamp.from_unix_nano | (unix_ns) | Create a Timestamp from posix timestamp in nanoseconds.
:param int unix_ns: Posix timestamp in nanoseconds.
:rtype: Timestamp
| Create a Timestamp from posix timestamp in nanoseconds. | def from_unix_nano(unix_ns):
"""Create a Timestamp from posix timestamp in nanoseconds.
:param int unix_ns: Posix timestamp in nanoseconds.
:rtype: Timestamp
"""
return Timestamp(*divmod(unix_ns, 10 ** 9)) | [
"def",
"from_unix_nano",
"(",
"unix_ns",
")",
":",
"return",
"Timestamp",
"(",
"*",
"divmod",
"(",
"unix_ns",
",",
"10",
"**",
"9",
")",
")"
] | [
157,
4
] | [
163,
51
] | python | en | ['en', 'en', 'en'] | True |
Timestamp.to_unix_nano | (self) | Get the timestamp as a unixtime in nanoseconds.
:returns: posix timestamp in nanoseconds
:rtype: int
| Get the timestamp as a unixtime in nanoseconds. | def to_unix_nano(self):
"""Get the timestamp as a unixtime in nanoseconds.
:returns: posix timestamp in nanoseconds
:rtype: int
"""
return self.seconds * 10 ** 9 + self.nanoseconds | [
"def",
"to_unix_nano",
"(",
"self",
")",
":",
"return",
"self",
".",
"seconds",
"*",
"10",
"**",
"9",
"+",
"self",
".",
"nanoseconds"
] | [
165,
4
] | [
171,
56
] | python | en | ['en', 'en', 'en'] | True |
Timestamp.to_datetime | (self) | Get the timestamp as a UTC datetime.
Python 2 is not supported.
:rtype: datetime.
| Get the timestamp as a UTC datetime. | def to_datetime(self):
"""Get the timestamp as a UTC datetime.
Python 2 is not supported.
:rtype: datetime.
"""
return datetime.datetime.fromtimestamp(0, _utc) + datetime.timedelta(
seconds=self.to_unix()
) | [
"def",
"to_datetime",
"(",
"self",
")",
":",
"return",
"datetime",
".",
"datetime",
".",
"fromtimestamp",
"(",
"0",
",",
"_utc",
")",
"+",
"datetime",
".",
"timedelta",
"(",
"seconds",
"=",
"self",
".",
"to_unix",
"(",
")",
")"
] | [
173,
4
] | [
182,
9
] | python | en | ['en', 'en', 'en'] | True |
Timestamp.from_datetime | (dt) | Create a Timestamp from datetime with tzinfo.
Python 2 is not supported.
:rtype: Timestamp
| Create a Timestamp from datetime with tzinfo. | def from_datetime(dt):
"""Create a Timestamp from datetime with tzinfo.
Python 2 is not supported.
:rtype: Timestamp
"""
return Timestamp.from_unix(dt.timestamp()) | [
"def",
"from_datetime",
"(",
"dt",
")",
":",
"return",
"Timestamp",
".",
"from_unix",
"(",
"dt",
".",
"timestamp",
"(",
")",
")"
] | [
185,
4
] | [
192,
50
] | python | en | ['en', 'en', 'en'] | True |
_make_catapult | (C, x, y) | Builds a catapult. | Builds a catapult. | def _make_catapult(C, x, y):
"""Builds a catapult."""
# Base of the catapult.
base = C.add('static standingsticks ', scale=0.1) \
.set_bottom(y * C.scene.height) \
.set_center_x(x * C.scene.width)
# Hinge and top line.
bar_center_x = base.left + (base.right - base.left) / 2.
ball = C.add('static ball', scale=0.05) \
.set_bottom(base.top) \
.set_center_x(bar_center_x)
line = C.add_box(height=3, width=120) \
.set_center_x(bar_center_x) \
.set_bottom(ball.top)
# Ball that needs to move.
top_ball = C.add('dynamic ball', scale=0.04) \
.set_bottom(line.top)
top_ball.set_left(line.left)
return top_ball, line | [
"def",
"_make_catapult",
"(",
"C",
",",
"x",
",",
"y",
")",
":",
"# Base of the catapult.",
"base",
"=",
"C",
".",
"add",
"(",
"'static standingsticks '",
",",
"scale",
"=",
"0.1",
")",
".",
"set_bottom",
"(",
"y",
"*",
"C",
".",
"scene",
".",
"height",
")",
".",
"set_center_x",
"(",
"x",
"*",
"C",
".",
"scene",
".",
"width",
")",
"# Hinge and top line.",
"bar_center_x",
"=",
"base",
".",
"left",
"+",
"(",
"base",
".",
"right",
"-",
"base",
".",
"left",
")",
"/",
"2.",
"ball",
"=",
"C",
".",
"add",
"(",
"'static ball'",
",",
"scale",
"=",
"0.05",
")",
".",
"set_bottom",
"(",
"base",
".",
"top",
")",
".",
"set_center_x",
"(",
"bar_center_x",
")",
"line",
"=",
"C",
".",
"add_box",
"(",
"height",
"=",
"3",
",",
"width",
"=",
"120",
")",
".",
"set_center_x",
"(",
"bar_center_x",
")",
".",
"set_bottom",
"(",
"ball",
".",
"top",
")",
"# Ball that needs to move.",
"top_ball",
"=",
"C",
".",
"add",
"(",
"'dynamic ball'",
",",
"scale",
"=",
"0.04",
")",
".",
"set_bottom",
"(",
"line",
".",
"top",
")",
"top_ball",
".",
"set_left",
"(",
"line",
".",
"left",
")",
"return",
"top_ball",
",",
"line"
] | [
45,
0
] | [
67,
25
] | python | en | ['en', 'en', 'en'] | True |
salted_hmac | (key_salt, value, secret=None, *, algorithm='sha1') |
Return the HMAC of 'value', using a key generated from key_salt and a
secret (which defaults to settings.SECRET_KEY). Default algorithm is SHA1,
but any algorithm name supported by hashlib can be passed.
A different key_salt should be passed in for every application of HMAC.
|
Return the HMAC of 'value', using a key generated from key_salt and a
secret (which defaults to settings.SECRET_KEY). Default algorithm is SHA1,
but any algorithm name supported by hashlib can be passed. | def salted_hmac(key_salt, value, secret=None, *, algorithm='sha1'):
"""
Return the HMAC of 'value', using a key generated from key_salt and a
secret (which defaults to settings.SECRET_KEY). Default algorithm is SHA1,
but any algorithm name supported by hashlib can be passed.
A different key_salt should be passed in for every application of HMAC.
"""
if secret is None:
secret = settings.SECRET_KEY
key_salt = force_bytes(key_salt)
secret = force_bytes(secret)
try:
hasher = getattr(hashlib, algorithm)
except AttributeError as e:
raise InvalidAlgorithm(
'%r is not an algorithm accepted by the hashlib module.'
% algorithm
) from e
# We need to generate a derived key from our base key. We can do this by
# passing the key_salt and our base key through a pseudo-random function.
key = hasher(key_salt + secret).digest()
# If len(key_salt + secret) > block size of the hash algorithm, the above
# line is redundant and could be replaced by key = key_salt + secret, since
# the hmac module does the same thing for keys longer than the block size.
# However, we need to ensure that we *always* do this.
return hmac.new(key, msg=force_bytes(value), digestmod=hasher) | [
"def",
"salted_hmac",
"(",
"key_salt",
",",
"value",
",",
"secret",
"=",
"None",
",",
"*",
",",
"algorithm",
"=",
"'sha1'",
")",
":",
"if",
"secret",
"is",
"None",
":",
"secret",
"=",
"settings",
".",
"SECRET_KEY",
"key_salt",
"=",
"force_bytes",
"(",
"key_salt",
")",
"secret",
"=",
"force_bytes",
"(",
"secret",
")",
"try",
":",
"hasher",
"=",
"getattr",
"(",
"hashlib",
",",
"algorithm",
")",
"except",
"AttributeError",
"as",
"e",
":",
"raise",
"InvalidAlgorithm",
"(",
"'%r is not an algorithm accepted by the hashlib module.'",
"%",
"algorithm",
")",
"from",
"e",
"# We need to generate a derived key from our base key. We can do this by",
"# passing the key_salt and our base key through a pseudo-random function.",
"key",
"=",
"hasher",
"(",
"key_salt",
"+",
"secret",
")",
".",
"digest",
"(",
")",
"# If len(key_salt + secret) > block size of the hash algorithm, the above",
"# line is redundant and could be replaced by key = key_salt + secret, since",
"# the hmac module does the same thing for keys longer than the block size.",
"# However, we need to ensure that we *always* do this.",
"return",
"hmac",
".",
"new",
"(",
"key",
",",
"msg",
"=",
"force_bytes",
"(",
"value",
")",
",",
"digestmod",
"=",
"hasher",
")"
] | [
18,
0
] | [
45,
66
] | python | en | ['en', 'error', 'th'] | False |
get_random_string | (length=NOT_PROVIDED, allowed_chars=RANDOM_STRING_CHARS) |
Return a securely generated random string.
The bit length of the returned value can be calculated with the formula:
log_2(len(allowed_chars)^length)
For example, with default `allowed_chars` (26+26+10), this gives:
* length: 12, bit length =~ 71 bits
* length: 22, bit length =~ 131 bits
|
Return a securely generated random string. | def get_random_string(length=NOT_PROVIDED, allowed_chars=RANDOM_STRING_CHARS):
"""
Return a securely generated random string.
The bit length of the returned value can be calculated with the formula:
log_2(len(allowed_chars)^length)
For example, with default `allowed_chars` (26+26+10), this gives:
* length: 12, bit length =~ 71 bits
* length: 22, bit length =~ 131 bits
"""
if length is NOT_PROVIDED:
warnings.warn(
'Not providing a length argument is deprecated.',
RemovedInDjango40Warning,
)
length = 12
return ''.join(secrets.choice(allowed_chars) for i in range(length)) | [
"def",
"get_random_string",
"(",
"length",
"=",
"NOT_PROVIDED",
",",
"allowed_chars",
"=",
"RANDOM_STRING_CHARS",
")",
":",
"if",
"length",
"is",
"NOT_PROVIDED",
":",
"warnings",
".",
"warn",
"(",
"'Not providing a length argument is deprecated.'",
",",
"RemovedInDjango40Warning",
",",
")",
"length",
"=",
"12",
"return",
"''",
".",
"join",
"(",
"secrets",
".",
"choice",
"(",
"allowed_chars",
")",
"for",
"i",
"in",
"range",
"(",
"length",
")",
")"
] | [
54,
0
] | [
71,
72
] | python | en | ['en', 'error', 'th'] | False |
constant_time_compare | (val1, val2) | Return True if the two strings are equal, False otherwise. | Return True if the two strings are equal, False otherwise. | def constant_time_compare(val1, val2):
"""Return True if the two strings are equal, False otherwise."""
return secrets.compare_digest(force_bytes(val1), force_bytes(val2)) | [
"def",
"constant_time_compare",
"(",
"val1",
",",
"val2",
")",
":",
"return",
"secrets",
".",
"compare_digest",
"(",
"force_bytes",
"(",
"val1",
")",
",",
"force_bytes",
"(",
"val2",
")",
")"
] | [
74,
0
] | [
76,
71
] | python | en | ['en', 'en', 'en'] | True |
pbkdf2 | (password, salt, iterations, dklen=0, digest=None) | Return the hash of password using pbkdf2. | Return the hash of password using pbkdf2. | def pbkdf2(password, salt, iterations, dklen=0, digest=None):
"""Return the hash of password using pbkdf2."""
if digest is None:
digest = hashlib.sha256
dklen = dklen or None
password = force_bytes(password)
salt = force_bytes(salt)
return hashlib.pbkdf2_hmac(digest().name, password, salt, iterations, dklen) | [
"def",
"pbkdf2",
"(",
"password",
",",
"salt",
",",
"iterations",
",",
"dklen",
"=",
"0",
",",
"digest",
"=",
"None",
")",
":",
"if",
"digest",
"is",
"None",
":",
"digest",
"=",
"hashlib",
".",
"sha256",
"dklen",
"=",
"dklen",
"or",
"None",
"password",
"=",
"force_bytes",
"(",
"password",
")",
"salt",
"=",
"force_bytes",
"(",
"salt",
")",
"return",
"hashlib",
".",
"pbkdf2_hmac",
"(",
"digest",
"(",
")",
".",
"name",
",",
"password",
",",
"salt",
",",
"iterations",
",",
"dklen",
")"
] | [
79,
0
] | [
86,
80
] | python | en | ['en', 'la', 'en'] | True |
ConditionalGetMiddleware.needs_etag | (self, response) | Return True if an ETag header should be added to response. | Return True if an ETag header should be added to response. | def needs_etag(self, response):
"""Return True if an ETag header should be added to response."""
cache_control_headers = cc_delim_re.split(response.get('Cache-Control', ''))
return all(header.lower() != 'no-store' for header in cache_control_headers) | [
"def",
"needs_etag",
"(",
"self",
",",
"response",
")",
":",
"cache_control_headers",
"=",
"cc_delim_re",
".",
"split",
"(",
"response",
".",
"get",
"(",
"'Cache-Control'",
",",
"''",
")",
")",
"return",
"all",
"(",
"header",
".",
"lower",
"(",
")",
"!=",
"'no-store'",
"for",
"header",
"in",
"cache_control_headers",
")"
] | [
37,
4
] | [
40,
84
] | python | en | ['en', 'en', 'en'] | True |
pretty_name | (name) | Convert 'first_name' to 'First name'. | Convert 'first_name' to 'First name'. | def pretty_name(name):
"""Convert 'first_name' to 'First name'."""
if not name:
return ''
return name.replace('_', ' ').capitalize() | [
"def",
"pretty_name",
"(",
"name",
")",
":",
"if",
"not",
"name",
":",
"return",
"''",
"return",
"name",
".",
"replace",
"(",
"'_'",
",",
"' '",
")",
".",
"capitalize",
"(",
")"
] | [
10,
0
] | [
14,
46
] | python | en | ['en', 'en', 'en'] | True |
flatatt | (attrs) |
Convert a dictionary of attributes to a single string.
The returned string will contain a leading space followed by key="value",
XML-style pairs. In the case of a boolean value, the key will appear
without a value. It is assumed that the keys do not need to be
XML-escaped. If the passed dictionary is empty, then return an empty
string.
The result is passed through 'mark_safe' (by way of 'format_html_join').
|
Convert a dictionary of attributes to a single string.
The returned string will contain a leading space followed by key="value",
XML-style pairs. In the case of a boolean value, the key will appear
without a value. It is assumed that the keys do not need to be
XML-escaped. If the passed dictionary is empty, then return an empty
string. | def flatatt(attrs):
"""
Convert a dictionary of attributes to a single string.
The returned string will contain a leading space followed by key="value",
XML-style pairs. In the case of a boolean value, the key will appear
without a value. It is assumed that the keys do not need to be
XML-escaped. If the passed dictionary is empty, then return an empty
string.
The result is passed through 'mark_safe' (by way of 'format_html_join').
"""
key_value_attrs = []
boolean_attrs = []
for attr, value in attrs.items():
if isinstance(value, bool):
if value:
boolean_attrs.append((attr,))
elif value is not None:
key_value_attrs.append((attr, value))
return (
format_html_join('', ' {}="{}"', sorted(key_value_attrs)) +
format_html_join('', ' {}', sorted(boolean_attrs))
) | [
"def",
"flatatt",
"(",
"attrs",
")",
":",
"key_value_attrs",
"=",
"[",
"]",
"boolean_attrs",
"=",
"[",
"]",
"for",
"attr",
",",
"value",
"in",
"attrs",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"bool",
")",
":",
"if",
"value",
":",
"boolean_attrs",
".",
"append",
"(",
"(",
"attr",
",",
")",
")",
"elif",
"value",
"is",
"not",
"None",
":",
"key_value_attrs",
".",
"append",
"(",
"(",
"attr",
",",
"value",
")",
")",
"return",
"(",
"format_html_join",
"(",
"''",
",",
"' {}=\"{}\"'",
",",
"sorted",
"(",
"key_value_attrs",
")",
")",
"+",
"format_html_join",
"(",
"''",
",",
"' {}'",
",",
"sorted",
"(",
"boolean_attrs",
")",
")",
")"
] | [
17,
0
] | [
40,
5
] | python | en | ['en', 'error', 'th'] | False |
from_current_timezone | (value) |
When time zone support is enabled, convert naive datetimes
entered in the current time zone to aware datetimes.
|
When time zone support is enabled, convert naive datetimes
entered in the current time zone to aware datetimes.
| def from_current_timezone(value):
"""
When time zone support is enabled, convert naive datetimes
entered in the current time zone to aware datetimes.
"""
if settings.USE_TZ and value is not None and timezone.is_naive(value):
current_timezone = timezone.get_current_timezone()
try:
if (
not timezone._is_pytz_zone(current_timezone) and
timezone._datetime_ambiguous_or_imaginary(value, current_timezone)
):
raise ValueError('Ambiguous or non-existent time.')
return timezone.make_aware(value, current_timezone)
except Exception as exc:
raise ValidationError(
_('%(datetime)s couldn’t be interpreted '
'in time zone %(current_timezone)s; it '
'may be ambiguous or it may not exist.'),
code='ambiguous_timezone',
params={'datetime': value, 'current_timezone': current_timezone}
) from exc
return value | [
"def",
"from_current_timezone",
"(",
"value",
")",
":",
"if",
"settings",
".",
"USE_TZ",
"and",
"value",
"is",
"not",
"None",
"and",
"timezone",
".",
"is_naive",
"(",
"value",
")",
":",
"current_timezone",
"=",
"timezone",
".",
"get_current_timezone",
"(",
")",
"try",
":",
"if",
"(",
"not",
"timezone",
".",
"_is_pytz_zone",
"(",
"current_timezone",
")",
"and",
"timezone",
".",
"_datetime_ambiguous_or_imaginary",
"(",
"value",
",",
"current_timezone",
")",
")",
":",
"raise",
"ValueError",
"(",
"'Ambiguous or non-existent time.'",
")",
"return",
"timezone",
".",
"make_aware",
"(",
"value",
",",
"current_timezone",
")",
"except",
"Exception",
"as",
"exc",
":",
"raise",
"ValidationError",
"(",
"_",
"(",
"'%(datetime)s couldn’t be interpreted '",
"'in time zone %(current_timezone)s; it '",
"'may be ambiguous or it may not exist.'",
")",
",",
"code",
"=",
"'ambiguous_timezone'",
",",
"params",
"=",
"{",
"'datetime'",
":",
"value",
",",
"'current_timezone'",
":",
"current_timezone",
"}",
")",
"from",
"exc",
"return",
"value"
] | [
155,
0
] | [
177,
16
] | python | en | ['en', 'error', 'th'] | False |
to_current_timezone | (value) |
When time zone support is enabled, convert aware datetimes
to naive datetimes in the current time zone for display.
|
When time zone support is enabled, convert aware datetimes
to naive datetimes in the current time zone for display.
| def to_current_timezone(value):
"""
When time zone support is enabled, convert aware datetimes
to naive datetimes in the current time zone for display.
"""
if settings.USE_TZ and value is not None and timezone.is_aware(value):
return timezone.make_naive(value)
return value | [
"def",
"to_current_timezone",
"(",
"value",
")",
":",
"if",
"settings",
".",
"USE_TZ",
"and",
"value",
"is",
"not",
"None",
"and",
"timezone",
".",
"is_aware",
"(",
"value",
")",
":",
"return",
"timezone",
".",
"make_naive",
"(",
"value",
")",
"return",
"value"
] | [
180,
0
] | [
187,
16
] | python | en | ['en', 'error', 'th'] | False |
SessionMiddleware.process_response | (self, request, response) |
If request.session was modified, or if the configuration is to save the
session every time, save the changes and set a session cookie or delete
the session cookie if the session has been emptied.
|
If request.session was modified, or if the configuration is to save the
session every time, save the changes and set a session cookie or delete
the session cookie if the session has been emptied.
| def process_response(self, request, response):
"""
If request.session was modified, or if the configuration is to save the
session every time, save the changes and set a session cookie or delete
the session cookie if the session has been emptied.
"""
try:
accessed = request.session.accessed
modified = request.session.modified
empty = request.session.is_empty()
except AttributeError:
return response
# First check if we need to delete this cookie.
# The session should be deleted only if the session is entirely empty.
if settings.SESSION_COOKIE_NAME in request.COOKIES and empty:
response.delete_cookie(
settings.SESSION_COOKIE_NAME,
path=settings.SESSION_COOKIE_PATH,
domain=settings.SESSION_COOKIE_DOMAIN,
samesite=settings.SESSION_COOKIE_SAMESITE,
)
patch_vary_headers(response, ('Cookie',))
else:
if accessed:
patch_vary_headers(response, ('Cookie',))
if (modified or settings.SESSION_SAVE_EVERY_REQUEST) and not empty:
if request.session.get_expire_at_browser_close():
max_age = None
expires = None
else:
max_age = request.session.get_expiry_age()
expires_time = time.time() + max_age
expires = http_date(expires_time)
# Save the session data and refresh the client cookie.
# Skip session save for 500 responses, refs #3881.
if response.status_code != 500:
try:
request.session.save()
except UpdateError:
raise SessionInterrupted(
"The request's session was deleted before the "
"request completed. The user may have logged "
"out in a concurrent request, for example."
)
response.set_cookie(
settings.SESSION_COOKIE_NAME,
request.session.session_key, max_age=max_age,
expires=expires, domain=settings.SESSION_COOKIE_DOMAIN,
path=settings.SESSION_COOKIE_PATH,
secure=settings.SESSION_COOKIE_SECURE or None,
httponly=settings.SESSION_COOKIE_HTTPONLY or None,
samesite=settings.SESSION_COOKIE_SAMESITE,
)
return response | [
"def",
"process_response",
"(",
"self",
",",
"request",
",",
"response",
")",
":",
"try",
":",
"accessed",
"=",
"request",
".",
"session",
".",
"accessed",
"modified",
"=",
"request",
".",
"session",
".",
"modified",
"empty",
"=",
"request",
".",
"session",
".",
"is_empty",
"(",
")",
"except",
"AttributeError",
":",
"return",
"response",
"# First check if we need to delete this cookie.",
"# The session should be deleted only if the session is entirely empty.",
"if",
"settings",
".",
"SESSION_COOKIE_NAME",
"in",
"request",
".",
"COOKIES",
"and",
"empty",
":",
"response",
".",
"delete_cookie",
"(",
"settings",
".",
"SESSION_COOKIE_NAME",
",",
"path",
"=",
"settings",
".",
"SESSION_COOKIE_PATH",
",",
"domain",
"=",
"settings",
".",
"SESSION_COOKIE_DOMAIN",
",",
"samesite",
"=",
"settings",
".",
"SESSION_COOKIE_SAMESITE",
",",
")",
"patch_vary_headers",
"(",
"response",
",",
"(",
"'Cookie'",
",",
")",
")",
"else",
":",
"if",
"accessed",
":",
"patch_vary_headers",
"(",
"response",
",",
"(",
"'Cookie'",
",",
")",
")",
"if",
"(",
"modified",
"or",
"settings",
".",
"SESSION_SAVE_EVERY_REQUEST",
")",
"and",
"not",
"empty",
":",
"if",
"request",
".",
"session",
".",
"get_expire_at_browser_close",
"(",
")",
":",
"max_age",
"=",
"None",
"expires",
"=",
"None",
"else",
":",
"max_age",
"=",
"request",
".",
"session",
".",
"get_expiry_age",
"(",
")",
"expires_time",
"=",
"time",
".",
"time",
"(",
")",
"+",
"max_age",
"expires",
"=",
"http_date",
"(",
"expires_time",
")",
"# Save the session data and refresh the client cookie.",
"# Skip session save for 500 responses, refs #3881.",
"if",
"response",
".",
"status_code",
"!=",
"500",
":",
"try",
":",
"request",
".",
"session",
".",
"save",
"(",
")",
"except",
"UpdateError",
":",
"raise",
"SessionInterrupted",
"(",
"\"The request's session was deleted before the \"",
"\"request completed. The user may have logged \"",
"\"out in a concurrent request, for example.\"",
")",
"response",
".",
"set_cookie",
"(",
"settings",
".",
"SESSION_COOKIE_NAME",
",",
"request",
".",
"session",
".",
"session_key",
",",
"max_age",
"=",
"max_age",
",",
"expires",
"=",
"expires",
",",
"domain",
"=",
"settings",
".",
"SESSION_COOKIE_DOMAIN",
",",
"path",
"=",
"settings",
".",
"SESSION_COOKIE_PATH",
",",
"secure",
"=",
"settings",
".",
"SESSION_COOKIE_SECURE",
"or",
"None",
",",
"httponly",
"=",
"settings",
".",
"SESSION_COOKIE_HTTPONLY",
"or",
"None",
",",
"samesite",
"=",
"settings",
".",
"SESSION_COOKIE_SAMESITE",
",",
")",
"return",
"response"
] | [
23,
4
] | [
76,
23
] | python | en | ['en', 'error', 'th'] | False |
Bucket.reset | (self) | Resets the bucket (unloads the bytecode). | Resets the bucket (unloads the bytecode). | def reset(self):
"""Resets the bucket (unloads the bytecode)."""
self.code = None | [
"def",
"reset",
"(",
"self",
")",
":",
"self",
".",
"code",
"=",
"None"
] | [
74,
4
] | [
76,
24
] | python | en | ['en', 'en', 'en'] | True |
Bucket.load_bytecode | (self, f) | Loads bytecode from a file or file like object. | Loads bytecode from a file or file like object. | def load_bytecode(self, f):
"""Loads bytecode from a file or file like object."""
# make sure the magic header is correct
magic = f.read(len(bc_magic))
if magic != bc_magic:
self.reset()
return
# the source code of the file changed, we need to reload
checksum = pickle.load(f)
if self.checksum != checksum:
self.reset()
return
# if marshal_load fails then we need to reload
try:
self.code = marshal_load(f)
except (EOFError, ValueError, TypeError):
self.reset()
return | [
"def",
"load_bytecode",
"(",
"self",
",",
"f",
")",
":",
"# make sure the magic header is correct",
"magic",
"=",
"f",
".",
"read",
"(",
"len",
"(",
"bc_magic",
")",
")",
"if",
"magic",
"!=",
"bc_magic",
":",
"self",
".",
"reset",
"(",
")",
"return",
"# the source code of the file changed, we need to reload",
"checksum",
"=",
"pickle",
".",
"load",
"(",
"f",
")",
"if",
"self",
".",
"checksum",
"!=",
"checksum",
":",
"self",
".",
"reset",
"(",
")",
"return",
"# if marshal_load fails then we need to reload",
"try",
":",
"self",
".",
"code",
"=",
"marshal_load",
"(",
"f",
")",
"except",
"(",
"EOFError",
",",
"ValueError",
",",
"TypeError",
")",
":",
"self",
".",
"reset",
"(",
")",
"return"
] | [
78,
4
] | [
95,
18
] | python | en | ['en', 'en', 'en'] | True |
Bucket.write_bytecode | (self, f) | Dump the bytecode into the file or file like object passed. | Dump the bytecode into the file or file like object passed. | def write_bytecode(self, f):
"""Dump the bytecode into the file or file like object passed."""
if self.code is None:
raise TypeError('can\'t write empty bucket')
f.write(bc_magic)
pickle.dump(self.checksum, f, 2)
marshal_dump(self.code, f) | [
"def",
"write_bytecode",
"(",
"self",
",",
"f",
")",
":",
"if",
"self",
".",
"code",
"is",
"None",
":",
"raise",
"TypeError",
"(",
"'can\\'t write empty bucket'",
")",
"f",
".",
"write",
"(",
"bc_magic",
")",
"pickle",
".",
"dump",
"(",
"self",
".",
"checksum",
",",
"f",
",",
"2",
")",
"marshal_dump",
"(",
"self",
".",
"code",
",",
"f",
")"
] | [
97,
4
] | [
103,
34
] | python | en | ['en', 'en', 'en'] | True |
Bucket.bytecode_from_string | (self, string) | Load bytecode from a string. | Load bytecode from a string. | def bytecode_from_string(self, string):
"""Load bytecode from a string."""
self.load_bytecode(BytesIO(string)) | [
"def",
"bytecode_from_string",
"(",
"self",
",",
"string",
")",
":",
"self",
".",
"load_bytecode",
"(",
"BytesIO",
"(",
"string",
")",
")"
] | [
105,
4
] | [
107,
43
] | python | en | ['en', 'en', 'en'] | True |
Bucket.bytecode_to_string | (self) | Return the bytecode as string. | Return the bytecode as string. | def bytecode_to_string(self):
"""Return the bytecode as string."""
out = BytesIO()
self.write_bytecode(out)
return out.getvalue() | [
"def",
"bytecode_to_string",
"(",
"self",
")",
":",
"out",
"=",
"BytesIO",
"(",
")",
"self",
".",
"write_bytecode",
"(",
"out",
")",
"return",
"out",
".",
"getvalue",
"(",
")"
] | [
109,
4
] | [
113,
29
] | python | en | ['en', 'no', 'en'] | True |
BytecodeCache.load_bytecode | (self, bucket) | Subclasses have to override this method to load bytecode into a
bucket. If they are not able to find code in the cache for the
bucket, it must not do anything.
| Subclasses have to override this method to load bytecode into a
bucket. If they are not able to find code in the cache for the
bucket, it must not do anything.
| def load_bytecode(self, bucket):
"""Subclasses have to override this method to load bytecode into a
bucket. If they are not able to find code in the cache for the
bucket, it must not do anything.
"""
raise NotImplementedError() | [
"def",
"load_bytecode",
"(",
"self",
",",
"bucket",
")",
":",
"raise",
"NotImplementedError",
"(",
")"
] | [
145,
4
] | [
150,
35
] | python | en | ['en', 'en', 'en'] | True |
BytecodeCache.dump_bytecode | (self, bucket) | Subclasses have to override this method to write the bytecode
from a bucket back to the cache. If it unable to do so it must not
fail silently but raise an exception.
| Subclasses have to override this method to write the bytecode
from a bucket back to the cache. If it unable to do so it must not
fail silently but raise an exception.
| def dump_bytecode(self, bucket):
"""Subclasses have to override this method to write the bytecode
from a bucket back to the cache. If it unable to do so it must not
fail silently but raise an exception.
"""
raise NotImplementedError() | [
"def",
"dump_bytecode",
"(",
"self",
",",
"bucket",
")",
":",
"raise",
"NotImplementedError",
"(",
")"
] | [
152,
4
] | [
157,
35
] | python | en | ['en', 'en', 'en'] | True |
BytecodeCache.clear | (self) | Clears the cache. This method is not used by Jinja2 but should be
implemented to allow applications to clear the bytecode cache used
by a particular environment.
| Clears the cache. This method is not used by Jinja2 but should be
implemented to allow applications to clear the bytecode cache used
by a particular environment.
| def clear(self):
"""Clears the cache. This method is not used by Jinja2 but should be
implemented to allow applications to clear the bytecode cache used
by a particular environment.
""" | [
"def",
"clear",
"(",
"self",
")",
":"
] | [
159,
4
] | [
163,
11
] | python | en | ['en', 'en', 'en'] | True |
BytecodeCache.get_cache_key | (self, name, filename=None) | Returns the unique hash key for this template name. | Returns the unique hash key for this template name. | def get_cache_key(self, name, filename=None):
"""Returns the unique hash key for this template name."""
hash = sha1(name.encode('utf-8'))
if filename is not None:
filename = '|' + filename
if isinstance(filename, text_type):
filename = filename.encode('utf-8')
hash.update(filename)
return hash.hexdigest() | [
"def",
"get_cache_key",
"(",
"self",
",",
"name",
",",
"filename",
"=",
"None",
")",
":",
"hash",
"=",
"sha1",
"(",
"name",
".",
"encode",
"(",
"'utf-8'",
")",
")",
"if",
"filename",
"is",
"not",
"None",
":",
"filename",
"=",
"'|'",
"+",
"filename",
"if",
"isinstance",
"(",
"filename",
",",
"text_type",
")",
":",
"filename",
"=",
"filename",
".",
"encode",
"(",
"'utf-8'",
")",
"hash",
".",
"update",
"(",
"filename",
")",
"return",
"hash",
".",
"hexdigest",
"(",
")"
] | [
165,
4
] | [
173,
31
] | python | en | ['en', 'en', 'en'] | True |
BytecodeCache.get_source_checksum | (self, source) | Returns a checksum for the source. | Returns a checksum for the source. | def get_source_checksum(self, source):
"""Returns a checksum for the source."""
return sha1(source.encode('utf-8')).hexdigest() | [
"def",
"get_source_checksum",
"(",
"self",
",",
"source",
")",
":",
"return",
"sha1",
"(",
"source",
".",
"encode",
"(",
"'utf-8'",
")",
")",
".",
"hexdigest",
"(",
")"
] | [
175,
4
] | [
177,
55
] | python | en | ['en', 'en', 'en'] | True |
BytecodeCache.get_bucket | (self, environment, name, filename, source) | Return a cache bucket for the given template. All arguments are
mandatory but filename may be `None`.
| Return a cache bucket for the given template. All arguments are
mandatory but filename may be `None`.
| def get_bucket(self, environment, name, filename, source):
"""Return a cache bucket for the given template. All arguments are
mandatory but filename may be `None`.
"""
key = self.get_cache_key(name, filename)
checksum = self.get_source_checksum(source)
bucket = Bucket(environment, key, checksum)
self.load_bytecode(bucket)
return bucket | [
"def",
"get_bucket",
"(",
"self",
",",
"environment",
",",
"name",
",",
"filename",
",",
"source",
")",
":",
"key",
"=",
"self",
".",
"get_cache_key",
"(",
"name",
",",
"filename",
")",
"checksum",
"=",
"self",
".",
"get_source_checksum",
"(",
"source",
")",
"bucket",
"=",
"Bucket",
"(",
"environment",
",",
"key",
",",
"checksum",
")",
"self",
".",
"load_bytecode",
"(",
"bucket",
")",
"return",
"bucket"
] | [
179,
4
] | [
187,
21
] | python | en | ['en', 'en', 'en'] | True |
BytecodeCache.set_bucket | (self, bucket) | Put the bucket into the cache. | Put the bucket into the cache. | def set_bucket(self, bucket):
"""Put the bucket into the cache."""
self.dump_bytecode(bucket) | [
"def",
"set_bucket",
"(",
"self",
",",
"bucket",
")",
":",
"self",
".",
"dump_bytecode",
"(",
"bucket",
")"
] | [
189,
4
] | [
191,
34
] | python | en | ['en', 'en', 'en'] | True |
Towers.__init__ | (self, height=1, rods=None, moves=0, verbose=False) |
:param int height:
The height of the towers (ie: max number of disks each one rod can hold).
:param Rods rods:
An existing :class:`Rods` instance to use with this :class:`Towers` (the heights must
match).
:param int moves:
The number of moves already taken.
:param verbose:
True=enable verbose logging mode.
|
:param int height:
The height of the towers (ie: max number of disks each one rod can hold).
:param Rods rods:
An existing :class:`Rods` instance to use with this :class:`Towers` (the heights must
match).
:param int moves:
The number of moves already taken.
:param verbose:
True=enable verbose logging mode.
| def __init__(self, height=1, rods=None, moves=0, verbose=False):
"""
:param int height:
The height of the towers (ie: max number of disks each one rod can hold).
:param Rods rods:
An existing :class:`Rods` instance to use with this :class:`Towers` (the heights must
match).
:param int moves:
The number of moves already taken.
:param verbose:
True=enable verbose logging mode.
"""
validate_height(height)
validate_rods(rods)
validate_moves(moves)
self._rods = rods if rods is not None else Rods(height)
self._moves = moves
self._verbose = bool(verbose) | [
"def",
"__init__",
"(",
"self",
",",
"height",
"=",
"1",
",",
"rods",
"=",
"None",
",",
"moves",
"=",
"0",
",",
"verbose",
"=",
"False",
")",
":",
"validate_height",
"(",
"height",
")",
"validate_rods",
"(",
"rods",
")",
"validate_moves",
"(",
"moves",
")",
"self",
".",
"_rods",
"=",
"rods",
"if",
"rods",
"is",
"not",
"None",
"else",
"Rods",
"(",
"height",
")",
"self",
".",
"_moves",
"=",
"moves",
"self",
".",
"_verbose",
"=",
"bool",
"(",
"verbose",
")"
] | [
43,
4
] | [
60,
37
] | python | en | ['en', 'error', 'th'] | False |
Towers.to_json | (self) |
Return a json serializable representation of this instance.
:rtype: object
|
Return a json serializable representation of this instance. | def to_json(self):
"""
Return a json serializable representation of this instance.
:rtype: object
"""
return {
'height': self.height,
'verbose': self.verbose,
'moves': self.moves,
'rods': self._rods.to_json(),
} | [
"def",
"to_json",
"(",
"self",
")",
":",
"return",
"{",
"'height'",
":",
"self",
".",
"height",
",",
"'verbose'",
":",
"self",
".",
"verbose",
",",
"'moves'",
":",
"self",
".",
"moves",
",",
"'rods'",
":",
"self",
".",
"_rods",
".",
"to_json",
"(",
")",
",",
"}"
] | [
62,
4
] | [
73,
9
] | python | en | ['en', 'error', 'th'] | False |
Towers.from_json | (cls, d) |
Return a class instance from a json serializable representation.
:param str|dict d:
The json or decoded-json from which to create a new instance.
:rtype:
Towers
:raises:
See :class:`Towers`.__new__.
|
Return a class instance from a json serializable representation. | def from_json(cls, d):
"""
Return a class instance from a json serializable representation.
:param str|dict d:
The json or decoded-json from which to create a new instance.
:rtype:
Towers
:raises:
See :class:`Towers`.__new__.
"""
if isinstance(d, six.string_types):
d = json.loads(d)
return cls(
height=d.pop('height'),
verbose=d.pop('verbose'),
moves=d.pop('moves'),
rods=Rods.from_json(d.pop('rods')),
) | [
"def",
"from_json",
"(",
"cls",
",",
"d",
")",
":",
"if",
"isinstance",
"(",
"d",
",",
"six",
".",
"string_types",
")",
":",
"d",
"=",
"json",
".",
"loads",
"(",
"d",
")",
"return",
"cls",
"(",
"height",
"=",
"d",
".",
"pop",
"(",
"'height'",
")",
",",
"verbose",
"=",
"d",
".",
"pop",
"(",
"'verbose'",
")",
",",
"moves",
"=",
"d",
".",
"pop",
"(",
"'moves'",
")",
",",
"rods",
"=",
"Rods",
".",
"from_json",
"(",
"d",
".",
"pop",
"(",
"'rods'",
")",
")",
",",
")"
] | [
76,
4
] | [
94,
9
] | python | en | ['en', 'error', 'th'] | False |
Towers.context | (self, reset_on_success=True, reset_on_error=False) |
Create a temp context for performing moves.
The state of this instance will be reset at context exit.
:param bool reset_on_success:
Reset this instance's state on exit from the context if no error occurred.
Default = True.
:param bool reset_on_error:
Reset this instance's state on exit from the context if an error occurred.
Default = False.
|
Create a temp context for performing moves.
The state of this instance will be reset at context exit. | def context(self, reset_on_success=True, reset_on_error=False):
"""
Create a temp context for performing moves.
The state of this instance will be reset at context exit.
:param bool reset_on_success:
Reset this instance's state on exit from the context if no error occurred.
Default = True.
:param bool reset_on_error:
Reset this instance's state on exit from the context if an error occurred.
Default = False.
"""
self.validate_start()
verbose = self.verbose
moves = self.moves
rods = self._rods.to_json()
try:
yield self
self.validate_end()
except Exception:
# Error inside context or validation:
if reset_on_error:
self._verbose = verbose
self._moves = moves
self._rods = Rods.from_json(rods)
else:
if reset_on_success:
self._verbose = verbose
self._moves = moves
self._rods = Rods.from_json(rods) | [
"def",
"context",
"(",
"self",
",",
"reset_on_success",
"=",
"True",
",",
"reset_on_error",
"=",
"False",
")",
":",
"self",
".",
"validate_start",
"(",
")",
"verbose",
"=",
"self",
".",
"verbose",
"moves",
"=",
"self",
".",
"moves",
"rods",
"=",
"self",
".",
"_rods",
".",
"to_json",
"(",
")",
"try",
":",
"yield",
"self",
"self",
".",
"validate_end",
"(",
")",
"except",
"Exception",
":",
"# Error inside context or validation:",
"if",
"reset_on_error",
":",
"self",
".",
"_verbose",
"=",
"verbose",
"self",
".",
"_moves",
"=",
"moves",
"self",
".",
"_rods",
"=",
"Rods",
".",
"from_json",
"(",
"rods",
")",
"else",
":",
"if",
"reset_on_success",
":",
"self",
".",
"_verbose",
"=",
"verbose",
"self",
".",
"_moves",
"=",
"moves",
"self",
".",
"_rods",
"=",
"Rods",
".",
"from_json",
"(",
"rods",
")"
] | [
97,
4
] | [
128,
49
] | python | en | ['en', 'error', 'th'] | False |
Towers.__bool__ | (self) |
A Towers is considered True if it's state is completed.
:rtype:
bool
|
A Towers is considered True if it's state is completed. | def __bool__(self):
"""
A Towers is considered True if it's state is completed.
:rtype:
bool
"""
return self.__nonzero__() | [
"def",
"__bool__",
"(",
"self",
")",
":",
"return",
"self",
".",
"__nonzero__",
"(",
")"
] | [
130,
4
] | [
137,
33
] | python | en | ['en', 'error', 'th'] | False |
Towers.__nonzero__ | (self) |
A Towers is considered non-zero if it's state is completed.
:rtype:
bool
|
A Towers is considered non-zero if it's state is completed. | def __nonzero__(self):
"""
A Towers is considered non-zero if it's state is completed.
:rtype:
bool
"""
try:
self.validate_end()
return True
except Exception:
return False | [
"def",
"__nonzero__",
"(",
"self",
")",
":",
"try",
":",
"self",
".",
"validate_end",
"(",
")",
"return",
"True",
"except",
"Exception",
":",
"return",
"False"
] | [
139,
4
] | [
150,
24
] | python | en | ['en', 'error', 'th'] | False |
Towers.__copy__ | (self) |
Return a shallow copy of this instance.
:rtype:
:class:`Towers`
|
Return a shallow copy of this instance. | def __copy__(self):
"""
Return a shallow copy of this instance.
:rtype:
:class:`Towers`
"""
return Towers(
height=self.height,
rods=self._rods,
moves=self.moves,
verbose=self.verbose,
) | [
"def",
"__copy__",
"(",
"self",
")",
":",
"return",
"Towers",
"(",
"height",
"=",
"self",
".",
"height",
",",
"rods",
"=",
"self",
".",
"_rods",
",",
"moves",
"=",
"self",
".",
"moves",
",",
"verbose",
"=",
"self",
".",
"verbose",
",",
")"
] | [
152,
4
] | [
164,
9
] | python | en | ['en', 'error', 'th'] | False |
Towers.__deepcopy__ | (self, *d) |
Return a deep copy of this instance.
:param dict d:
Memoisation dict.
:rtype:
:class:`Towers`
|
Return a deep copy of this instance. | def __deepcopy__(self, *d):
"""
Return a deep copy of this instance.
:param dict d:
Memoisation dict.
:rtype:
:class:`Towers`
"""
return Towers.from_json(self.to_json()) | [
"def",
"__deepcopy__",
"(",
"self",
",",
"*",
"d",
")",
":",
"return",
"Towers",
".",
"from_json",
"(",
"self",
".",
"to_json",
"(",
")",
")"
] | [
166,
4
] | [
175,
47
] | python | en | ['en', 'error', 'th'] | False |
Towers.__eq__ | (self, other) |
Compare Towers instances for equivalence.
:param Towers other:
The other :class:`Towers` to compare.
:rtype:
bool
|
Compare Towers instances for equivalence. | def __eq__(self, other):
"""
Compare Towers instances for equivalence.
:param Towers other:
The other :class:`Towers` to compare.
:rtype:
bool
"""
if isinstance(other, Towers):
if other.height == self.height:
if other._rods == self._rods:
return True | [
"def",
"__eq__",
"(",
"self",
",",
"other",
")",
":",
"if",
"isinstance",
"(",
"other",
",",
"Towers",
")",
":",
"if",
"other",
".",
"height",
"==",
"self",
".",
"height",
":",
"if",
"other",
".",
"_rods",
"==",
"self",
".",
"_rods",
":",
"return",
"True"
] | [
177,
4
] | [
189,
31
] | python | en | ['en', 'error', 'th'] | False |
Towers.__getitem__ | (self, index) |
Get the :class:`Rod` at the given index.
:param int index:
The index to get the :class:`Rod` at.
:rtype:
Rod
|
Get the :class:`Rod` at the given index. | def __getitem__(self, index):
"""
Get the :class:`Rod` at the given index.
:param int index:
The index to get the :class:`Rod` at.
:rtype:
Rod
"""
return self._rods[index] | [
"def",
"__getitem__",
"(",
"self",
",",
"index",
")",
":",
"return",
"self",
".",
"_rods",
"[",
"index",
"]"
] | [
191,
4
] | [
200,
32
] | python | en | ['en', 'error', 'th'] | False |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.