Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
DatabaseOperations.format_for_duration_arithmetic | (self, sql) | Do nothing since formatting is handled in the custom function. | Do nothing since formatting is handled in the custom function. | def format_for_duration_arithmetic(self, sql):
"""Do nothing since formatting is handled in the custom function."""
return sql | [
"def",
"format_for_duration_arithmetic",
"(",
"self",
",",
"sql",
")",
":",
"return",
"sql"
] | [
75,
4
] | [
77,
18
] | python | en | ['en', 'en', 'en'] | True |
DatabaseOperations._quote_params_for_last_executed_query | (self, params) |
Only for last_executed_query! Don't use this to execute SQL queries!
|
Only for last_executed_query! Don't use this to execute SQL queries!
| def _quote_params_for_last_executed_query(self, params):
"""
Only for last_executed_query! Don't use this to execute SQL queries!
"""
# This function is limited both by SQLITE_LIMIT_VARIABLE_NUMBER (the
# number of parameters, default = 999) and SQLITE_MAX_COLUMN (the
# number of return values, default = 2000). Since Python's sqlite3
# module doesn't expose the get_limit() C API, assume the default
# limits are in effect and split the work in batches if needed.
BATCH_SIZE = 999
if len(params) > BATCH_SIZE:
results = ()
for index in range(0, len(params), BATCH_SIZE):
chunk = params[index:index + BATCH_SIZE]
results += self._quote_params_for_last_executed_query(chunk)
return results
sql = 'SELECT ' + ', '.join(['QUOTE(?)'] * len(params))
# Bypass Django's wrappers and use the underlying sqlite3 connection
# to avoid logging this query - it would trigger infinite recursion.
cursor = self.connection.connection.cursor()
# Native sqlite3 cursors cannot be used as context managers.
try:
return cursor.execute(sql, params).fetchone()
finally:
cursor.close() | [
"def",
"_quote_params_for_last_executed_query",
"(",
"self",
",",
"params",
")",
":",
"# This function is limited both by SQLITE_LIMIT_VARIABLE_NUMBER (the",
"# number of parameters, default = 999) and SQLITE_MAX_COLUMN (the",
"# number of return values, default = 2000). Since Python's sqlite3",
"# module doesn't expose the get_limit() C API, assume the default",
"# limits are in effect and split the work in batches if needed.",
"BATCH_SIZE",
"=",
"999",
"if",
"len",
"(",
"params",
")",
">",
"BATCH_SIZE",
":",
"results",
"=",
"(",
")",
"for",
"index",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"params",
")",
",",
"BATCH_SIZE",
")",
":",
"chunk",
"=",
"params",
"[",
"index",
":",
"index",
"+",
"BATCH_SIZE",
"]",
"results",
"+=",
"self",
".",
"_quote_params_for_last_executed_query",
"(",
"chunk",
")",
"return",
"results",
"sql",
"=",
"'SELECT '",
"+",
"', '",
".",
"join",
"(",
"[",
"'QUOTE(?)'",
"]",
"*",
"len",
"(",
"params",
")",
")",
"# Bypass Django's wrappers and use the underlying sqlite3 connection",
"# to avoid logging this query - it would trigger infinite recursion.",
"cursor",
"=",
"self",
".",
"connection",
".",
"connection",
".",
"cursor",
"(",
")",
"# Native sqlite3 cursors cannot be used as context managers.",
"try",
":",
"return",
"cursor",
".",
"execute",
"(",
"sql",
",",
"params",
")",
".",
"fetchone",
"(",
")",
"finally",
":",
"cursor",
".",
"close",
"(",
")"
] | [
124,
4
] | [
149,
26
] | python | en | ['en', 'error', 'th'] | False |
get_prediction | (image, server_host='127.0.0.1', server_port=9000,
server_name="server", timeout=10.0) |
Retrieve a prediction from a TensorFlow model server
:param image: a MNIST image represented as a 1x784 array
:param server_host: the address of the TensorFlow server
:param server_port: the port used by the server
:param server_name: the name of the server
:param timeout: the amount of time to wait for a prediction to complete
:return 0: the integer predicted in the MNIST image
:return 1: the confidence scores for all classes
:return 2: the version number of the model handling the request
|
Retrieve a prediction from a TensorFlow model server | def get_prediction(image, server_host='127.0.0.1', server_port=9000,
server_name="server", timeout=10.0):
"""
Retrieve a prediction from a TensorFlow model server
:param image: a MNIST image represented as a 1x784 array
:param server_host: the address of the TensorFlow server
:param server_port: the port used by the server
:param server_name: the name of the server
:param timeout: the amount of time to wait for a prediction to complete
:return 0: the integer predicted in the MNIST image
:return 1: the confidence scores for all classes
:return 2: the version number of the model handling the request
"""
print("connecting to:%s:%i" % (server_host, server_port))
# initialize to server connection
channel = implementations.insecure_channel(server_host, server_port)
stub = prediction_service_pb2.beta_create_PredictionService_stub(channel)
# build request
request = predict_pb2.PredictRequest()
request.model_spec.name = server_name
request.model_spec.signature_name = 'serving_default'
request.inputs['x'].CopyFrom(
tf.contrib.util.make_tensor_proto(image, shape=image.shape))
# retrieve results
result = stub.Predict(request, timeout)
resultVal = result.outputs["classes"].int_val[0]
scores = result.outputs['predictions'].float_val
version = result.outputs["classes"].int_val[0]
return resultVal, scores, version | [
"def",
"get_prediction",
"(",
"image",
",",
"server_host",
"=",
"'127.0.0.1'",
",",
"server_port",
"=",
"9000",
",",
"server_name",
"=",
"\"server\"",
",",
"timeout",
"=",
"10.0",
")",
":",
"print",
"(",
"\"connecting to:%s:%i\"",
"%",
"(",
"server_host",
",",
"server_port",
")",
")",
"# initialize to server connection",
"channel",
"=",
"implementations",
".",
"insecure_channel",
"(",
"server_host",
",",
"server_port",
")",
"stub",
"=",
"prediction_service_pb2",
".",
"beta_create_PredictionService_stub",
"(",
"channel",
")",
"# build request",
"request",
"=",
"predict_pb2",
".",
"PredictRequest",
"(",
")",
"request",
".",
"model_spec",
".",
"name",
"=",
"server_name",
"request",
".",
"model_spec",
".",
"signature_name",
"=",
"'serving_default'",
"request",
".",
"inputs",
"[",
"'x'",
"]",
".",
"CopyFrom",
"(",
"tf",
".",
"contrib",
".",
"util",
".",
"make_tensor_proto",
"(",
"image",
",",
"shape",
"=",
"image",
".",
"shape",
")",
")",
"# retrieve results",
"result",
"=",
"stub",
".",
"Predict",
"(",
"request",
",",
"timeout",
")",
"resultVal",
"=",
"result",
".",
"outputs",
"[",
"\"classes\"",
"]",
".",
"int_val",
"[",
"0",
"]",
"scores",
"=",
"result",
".",
"outputs",
"[",
"'predictions'",
"]",
".",
"float_val",
"version",
"=",
"result",
".",
"outputs",
"[",
"\"classes\"",
"]",
".",
"int_val",
"[",
"0",
"]",
"return",
"resultVal",
",",
"scores",
",",
"version"
] | [
32,
0
] | [
64,
35
] | python | en | ['en', 'error', 'th'] | False |
random_mnist | (save_path=None) |
Pull a random image out of the MNIST test dataset
Optionally save the selected image as a file to disk
:param savePath: the path to save the file to. If None, file is not saved
:return 0: a 1x784 representation of the MNIST image
:return 1: the ground truth label associated with the image
:return 2: a bool representing whether the image file was saved to disk
|
Pull a random image out of the MNIST test dataset
Optionally save the selected image as a file to disk | def random_mnist(save_path=None):
"""
Pull a random image out of the MNIST test dataset
Optionally save the selected image as a file to disk
:param savePath: the path to save the file to. If None, file is not saved
:return 0: a 1x784 representation of the MNIST image
:return 1: the ground truth label associated with the image
:return 2: a bool representing whether the image file was saved to disk
"""
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
batch_size = 1
batch_x, batch_y = mnist.test.next_batch(batch_size)
saved = False
if save_path is not None:
# save image file to disk
try:
data = (batch_x * 255).astype(np.uint8).reshape(28, 28)
img = Image.fromarray(data, 'L')
img.save(save_path)
saved = True
except Exception as e: # pylint: disable=broad-except
logging.error("There was a problem saving the image; %s", e)
return batch_x, np.argmax(batch_y), saved | [
"def",
"random_mnist",
"(",
"save_path",
"=",
"None",
")",
":",
"mnist",
"=",
"input_data",
".",
"read_data_sets",
"(",
"\"MNIST_data/\"",
",",
"one_hot",
"=",
"True",
")",
"batch_size",
"=",
"1",
"batch_x",
",",
"batch_y",
"=",
"mnist",
".",
"test",
".",
"next_batch",
"(",
"batch_size",
")",
"saved",
"=",
"False",
"if",
"save_path",
"is",
"not",
"None",
":",
"# save image file to disk",
"try",
":",
"data",
"=",
"(",
"batch_x",
"*",
"255",
")",
".",
"astype",
"(",
"np",
".",
"uint8",
")",
".",
"reshape",
"(",
"28",
",",
"28",
")",
"img",
"=",
"Image",
".",
"fromarray",
"(",
"data",
",",
"'L'",
")",
"img",
".",
"save",
"(",
"save_path",
")",
"saved",
"=",
"True",
"except",
"Exception",
"as",
"e",
":",
"# pylint: disable=broad-except",
"logging",
".",
"error",
"(",
"\"There was a problem saving the image; %s\"",
",",
"e",
")",
"return",
"batch_x",
",",
"np",
".",
"argmax",
"(",
"batch_y",
")",
",",
"saved"
] | [
67,
0
] | [
91,
43
] | python | en | ['en', 'error', 'th'] | False |
MpoImageFile.adopt | (jpeg_instance, mpheader=None) |
Transform the instance of JpegImageFile into
an instance of MpoImageFile.
After the call, the JpegImageFile is extended
to be an MpoImageFile.
This is essentially useful when opening a JPEG
file that reveals itself as an MPO, to avoid
double call to _open.
|
Transform the instance of JpegImageFile into
an instance of MpoImageFile.
After the call, the JpegImageFile is extended
to be an MpoImageFile. | def adopt(jpeg_instance, mpheader=None):
"""
Transform the instance of JpegImageFile into
an instance of MpoImageFile.
After the call, the JpegImageFile is extended
to be an MpoImageFile.
This is essentially useful when opening a JPEG
file that reveals itself as an MPO, to avoid
double call to _open.
"""
jpeg_instance.__class__ = MpoImageFile
jpeg_instance._after_jpeg_open(mpheader)
return jpeg_instance | [
"def",
"adopt",
"(",
"jpeg_instance",
",",
"mpheader",
"=",
"None",
")",
":",
"jpeg_instance",
".",
"__class__",
"=",
"MpoImageFile",
"jpeg_instance",
".",
"_after_jpeg_open",
"(",
"mpheader",
")",
"return",
"jpeg_instance"
] | [
108,
4
] | [
121,
28
] | python | en | ['en', 'error', 'th'] | False |
memorized_timedelta | (seconds) | Create only one instance of each distinct timedelta | Create only one instance of each distinct timedelta | def memorized_timedelta(seconds):
'''Create only one instance of each distinct timedelta'''
try:
return _timedelta_cache[seconds]
except KeyError:
delta = timedelta(seconds=seconds)
_timedelta_cache[seconds] = delta
return delta | [
"def",
"memorized_timedelta",
"(",
"seconds",
")",
":",
"try",
":",
"return",
"_timedelta_cache",
"[",
"seconds",
"]",
"except",
"KeyError",
":",
"delta",
"=",
"timedelta",
"(",
"seconds",
"=",
"seconds",
")",
"_timedelta_cache",
"[",
"seconds",
"]",
"=",
"delta",
"return",
"delta"
] | [
17,
0
] | [
24,
20
] | python | en | ['en', 'en', 'en'] | True |
memorized_datetime | (seconds) | Create only one instance of each distinct datetime | Create only one instance of each distinct datetime | def memorized_datetime(seconds):
'''Create only one instance of each distinct datetime'''
try:
return _datetime_cache[seconds]
except KeyError:
# NB. We can't just do datetime.utcfromtimestamp(seconds) as this
# fails with negative values under Windows (Bug #90096)
dt = _epoch + timedelta(seconds=seconds)
_datetime_cache[seconds] = dt
return dt | [
"def",
"memorized_datetime",
"(",
"seconds",
")",
":",
"try",
":",
"return",
"_datetime_cache",
"[",
"seconds",
"]",
"except",
"KeyError",
":",
"# NB. We can't just do datetime.utcfromtimestamp(seconds) as this",
"# fails with negative values under Windows (Bug #90096)",
"dt",
"=",
"_epoch",
"+",
"timedelta",
"(",
"seconds",
"=",
"seconds",
")",
"_datetime_cache",
"[",
"seconds",
"]",
"=",
"dt",
"return",
"dt"
] | [
30,
0
] | [
39,
17
] | python | en | ['en', 'en', 'en'] | True |
memorized_ttinfo | (*args) | Create only one instance of each distinct tuple | Create only one instance of each distinct tuple | def memorized_ttinfo(*args):
'''Create only one instance of each distinct tuple'''
try:
return _ttinfo_cache[args]
except KeyError:
ttinfo = (
memorized_timedelta(args[0]),
memorized_timedelta(args[1]),
args[2]
)
_ttinfo_cache[args] = ttinfo
return ttinfo | [
"def",
"memorized_ttinfo",
"(",
"*",
"args",
")",
":",
"try",
":",
"return",
"_ttinfo_cache",
"[",
"args",
"]",
"except",
"KeyError",
":",
"ttinfo",
"=",
"(",
"memorized_timedelta",
"(",
"args",
"[",
"0",
"]",
")",
",",
"memorized_timedelta",
"(",
"args",
"[",
"1",
"]",
")",
",",
"args",
"[",
"2",
"]",
")",
"_ttinfo_cache",
"[",
"args",
"]",
"=",
"ttinfo",
"return",
"ttinfo"
] | [
44,
0
] | [
55,
21
] | python | en | ['en', 'en', 'en'] | True |
_to_seconds | (td) | Convert a timedelta to seconds | Convert a timedelta to seconds | def _to_seconds(td):
'''Convert a timedelta to seconds'''
return td.seconds + td.days * 24 * 60 * 60 | [
"def",
"_to_seconds",
"(",
"td",
")",
":",
"return",
"td",
".",
"seconds",
"+",
"td",
".",
"days",
"*",
"24",
"*",
"60",
"*",
"60"
] | [
60,
0
] | [
62,
46
] | python | en | ['en', 'en', 'en'] | True |
unpickler | (zone, utcoffset=None, dstoffset=None, tzname=None) | Factory function for unpickling pytz tzinfo instances.
This is shared for both StaticTzInfo and DstTzInfo instances, because
database changes could cause a zones implementation to switch between
these two base classes and we can't break pickles on a pytz version
upgrade.
| Factory function for unpickling pytz tzinfo instances. | def unpickler(zone, utcoffset=None, dstoffset=None, tzname=None):
"""Factory function for unpickling pytz tzinfo instances.
This is shared for both StaticTzInfo and DstTzInfo instances, because
database changes could cause a zones implementation to switch between
these two base classes and we can't break pickles on a pytz version
upgrade.
"""
# Raises a KeyError if zone no longer exists, which should never happen
# and would be a bug.
tz = pytz.timezone(zone)
# A StaticTzInfo - just return it
if utcoffset is None:
return tz
# This pickle was created from a DstTzInfo. We need to
# determine which of the list of tzinfo instances for this zone
# to use in order to restore the state of any datetime instances using
# it correctly.
utcoffset = memorized_timedelta(utcoffset)
dstoffset = memorized_timedelta(dstoffset)
try:
return tz._tzinfos[(utcoffset, dstoffset, tzname)]
except KeyError:
# The particular state requested in this timezone no longer exists.
# This indicates a corrupt pickle, or the timezone database has been
# corrected violently enough to make this particular
# (utcoffset,dstoffset) no longer exist in the zone, or the
# abbreviation has been changed.
pass
# See if we can find an entry differing only by tzname. Abbreviations
# get changed from the initial guess by the database maintainers to
# match reality when this information is discovered.
for localized_tz in tz._tzinfos.values():
if (localized_tz._utcoffset == utcoffset and
localized_tz._dst == dstoffset):
return localized_tz
# This (utcoffset, dstoffset) information has been removed from the
# zone. Add it back. This might occur when the database maintainers have
# corrected incorrect information. datetime instances using this
# incorrect information will continue to do so, exactly as they were
# before being pickled. This is purely an overly paranoid safety net - I
# doubt this will ever been needed in real life.
inf = (utcoffset, dstoffset, tzname)
tz._tzinfos[inf] = tz.__class__(inf, tz._tzinfos)
return tz._tzinfos[inf] | [
"def",
"unpickler",
"(",
"zone",
",",
"utcoffset",
"=",
"None",
",",
"dstoffset",
"=",
"None",
",",
"tzname",
"=",
"None",
")",
":",
"# Raises a KeyError if zone no longer exists, which should never happen",
"# and would be a bug.",
"tz",
"=",
"pytz",
".",
"timezone",
"(",
"zone",
")",
"# A StaticTzInfo - just return it",
"if",
"utcoffset",
"is",
"None",
":",
"return",
"tz",
"# This pickle was created from a DstTzInfo. We need to",
"# determine which of the list of tzinfo instances for this zone",
"# to use in order to restore the state of any datetime instances using",
"# it correctly.",
"utcoffset",
"=",
"memorized_timedelta",
"(",
"utcoffset",
")",
"dstoffset",
"=",
"memorized_timedelta",
"(",
"dstoffset",
")",
"try",
":",
"return",
"tz",
".",
"_tzinfos",
"[",
"(",
"utcoffset",
",",
"dstoffset",
",",
"tzname",
")",
"]",
"except",
"KeyError",
":",
"# The particular state requested in this timezone no longer exists.",
"# This indicates a corrupt pickle, or the timezone database has been",
"# corrected violently enough to make this particular",
"# (utcoffset,dstoffset) no longer exist in the zone, or the",
"# abbreviation has been changed.",
"pass",
"# See if we can find an entry differing only by tzname. Abbreviations",
"# get changed from the initial guess by the database maintainers to",
"# match reality when this information is discovered.",
"for",
"localized_tz",
"in",
"tz",
".",
"_tzinfos",
".",
"values",
"(",
")",
":",
"if",
"(",
"localized_tz",
".",
"_utcoffset",
"==",
"utcoffset",
"and",
"localized_tz",
".",
"_dst",
"==",
"dstoffset",
")",
":",
"return",
"localized_tz",
"# This (utcoffset, dstoffset) information has been removed from the",
"# zone. Add it back. This might occur when the database maintainers have",
"# corrected incorrect information. datetime instances using this",
"# incorrect information will continue to do so, exactly as they were",
"# before being pickled. This is purely an overly paranoid safety net - I",
"# doubt this will ever been needed in real life.",
"inf",
"=",
"(",
"utcoffset",
",",
"dstoffset",
",",
"tzname",
")",
"tz",
".",
"_tzinfos",
"[",
"inf",
"]",
"=",
"tz",
".",
"__class__",
"(",
"inf",
",",
"tz",
".",
"_tzinfos",
")",
"return",
"tz",
".",
"_tzinfos",
"[",
"inf",
"]"
] | [
528,
0
] | [
576,
27
] | python | en | ['en', 'fr', 'en'] | True |
StaticTzInfo.fromutc | (self, dt) | See datetime.tzinfo.fromutc | See datetime.tzinfo.fromutc | def fromutc(self, dt):
'''See datetime.tzinfo.fromutc'''
if dt.tzinfo is not None and dt.tzinfo is not self:
raise ValueError('fromutc: dt.tzinfo is not self')
return (dt + self._utcoffset).replace(tzinfo=self) | [
"def",
"fromutc",
"(",
"self",
",",
"dt",
")",
":",
"if",
"dt",
".",
"tzinfo",
"is",
"not",
"None",
"and",
"dt",
".",
"tzinfo",
"is",
"not",
"self",
":",
"raise",
"ValueError",
"(",
"'fromutc: dt.tzinfo is not self'",
")",
"return",
"(",
"dt",
"+",
"self",
".",
"_utcoffset",
")",
".",
"replace",
"(",
"tzinfo",
"=",
"self",
")"
] | [
81,
4
] | [
85,
58
] | python | en | ['en', 'en', 'de'] | False |
StaticTzInfo.utcoffset | (self, dt, is_dst=None) | See datetime.tzinfo.utcoffset
is_dst is ignored for StaticTzInfo, and exists only to
retain compatibility with DstTzInfo.
| See datetime.tzinfo.utcoffset | def utcoffset(self, dt, is_dst=None):
'''See datetime.tzinfo.utcoffset
is_dst is ignored for StaticTzInfo, and exists only to
retain compatibility with DstTzInfo.
'''
return self._utcoffset | [
"def",
"utcoffset",
"(",
"self",
",",
"dt",
",",
"is_dst",
"=",
"None",
")",
":",
"return",
"self",
".",
"_utcoffset"
] | [
87,
4
] | [
93,
30
] | python | de | ['en', 'ny', 'de'] | False |
StaticTzInfo.dst | (self, dt, is_dst=None) | See datetime.tzinfo.dst
is_dst is ignored for StaticTzInfo, and exists only to
retain compatibility with DstTzInfo.
| See datetime.tzinfo.dst | def dst(self, dt, is_dst=None):
'''See datetime.tzinfo.dst
is_dst is ignored for StaticTzInfo, and exists only to
retain compatibility with DstTzInfo.
'''
return _notime | [
"def",
"dst",
"(",
"self",
",",
"dt",
",",
"is_dst",
"=",
"None",
")",
":",
"return",
"_notime"
] | [
95,
4
] | [
101,
22
] | python | en | ['en', 'en', 'de'] | False |
StaticTzInfo.tzname | (self, dt, is_dst=None) | See datetime.tzinfo.tzname
is_dst is ignored for StaticTzInfo, and exists only to
retain compatibility with DstTzInfo.
| See datetime.tzinfo.tzname | def tzname(self, dt, is_dst=None):
'''See datetime.tzinfo.tzname
is_dst is ignored for StaticTzInfo, and exists only to
retain compatibility with DstTzInfo.
'''
return self._tzname | [
"def",
"tzname",
"(",
"self",
",",
"dt",
",",
"is_dst",
"=",
"None",
")",
":",
"return",
"self",
".",
"_tzname"
] | [
103,
4
] | [
109,
27
] | python | de | ['de', 'en', 'de'] | False |
StaticTzInfo.localize | (self, dt, is_dst=False) | Convert naive time to local time | Convert naive time to local time | def localize(self, dt, is_dst=False):
'''Convert naive time to local time'''
if dt.tzinfo is not None:
raise ValueError('Not naive datetime (tzinfo is already set)')
return dt.replace(tzinfo=self) | [
"def",
"localize",
"(",
"self",
",",
"dt",
",",
"is_dst",
"=",
"False",
")",
":",
"if",
"dt",
".",
"tzinfo",
"is",
"not",
"None",
":",
"raise",
"ValueError",
"(",
"'Not naive datetime (tzinfo is already set)'",
")",
"return",
"dt",
".",
"replace",
"(",
"tzinfo",
"=",
"self",
")"
] | [
111,
4
] | [
115,
38
] | python | en | ['en', 'en', 'en'] | True |
StaticTzInfo.normalize | (self, dt, is_dst=False) | Correct the timezone information on the given datetime.
This is normally a no-op, as StaticTzInfo timezones never have
ambiguous cases to correct:
>>> from pytz import timezone
>>> gmt = timezone('GMT')
>>> isinstance(gmt, StaticTzInfo)
True
>>> dt = datetime(2011, 5, 8, 1, 2, 3, tzinfo=gmt)
>>> gmt.normalize(dt) is dt
True
The supported method of converting between timezones is to use
datetime.astimezone(). Currently normalize() also works:
>>> la = timezone('America/Los_Angeles')
>>> dt = la.localize(datetime(2011, 5, 7, 1, 2, 3))
>>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'
>>> gmt.normalize(dt).strftime(fmt)
'2011-05-07 08:02:03 GMT (+0000)'
| Correct the timezone information on the given datetime. | def normalize(self, dt, is_dst=False):
'''Correct the timezone information on the given datetime.
This is normally a no-op, as StaticTzInfo timezones never have
ambiguous cases to correct:
>>> from pytz import timezone
>>> gmt = timezone('GMT')
>>> isinstance(gmt, StaticTzInfo)
True
>>> dt = datetime(2011, 5, 8, 1, 2, 3, tzinfo=gmt)
>>> gmt.normalize(dt) is dt
True
The supported method of converting between timezones is to use
datetime.astimezone(). Currently normalize() also works:
>>> la = timezone('America/Los_Angeles')
>>> dt = la.localize(datetime(2011, 5, 7, 1, 2, 3))
>>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'
>>> gmt.normalize(dt).strftime(fmt)
'2011-05-07 08:02:03 GMT (+0000)'
'''
if dt.tzinfo is self:
return dt
if dt.tzinfo is None:
raise ValueError('Naive time - no tzinfo set')
return dt.astimezone(self) | [
"def",
"normalize",
"(",
"self",
",",
"dt",
",",
"is_dst",
"=",
"False",
")",
":",
"if",
"dt",
".",
"tzinfo",
"is",
"self",
":",
"return",
"dt",
"if",
"dt",
".",
"tzinfo",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'Naive time - no tzinfo set'",
")",
"return",
"dt",
".",
"astimezone",
"(",
"self",
")"
] | [
117,
4
] | [
144,
34
] | python | en | ['en', 'en', 'en'] | True |
DstTzInfo.fromutc | (self, dt) | See datetime.tzinfo.fromutc | See datetime.tzinfo.fromutc | def fromutc(self, dt):
'''See datetime.tzinfo.fromutc'''
if (dt.tzinfo is not None and
getattr(dt.tzinfo, '_tzinfos', None) is not self._tzinfos):
raise ValueError('fromutc: dt.tzinfo is not self')
dt = dt.replace(tzinfo=None)
idx = max(0, bisect_right(self._utc_transition_times, dt) - 1)
inf = self._transition_info[idx]
return (dt + inf[0]).replace(tzinfo=self._tzinfos[inf]) | [
"def",
"fromutc",
"(",
"self",
",",
"dt",
")",
":",
"if",
"(",
"dt",
".",
"tzinfo",
"is",
"not",
"None",
"and",
"getattr",
"(",
"dt",
".",
"tzinfo",
",",
"'_tzinfos'",
",",
"None",
")",
"is",
"not",
"self",
".",
"_tzinfos",
")",
":",
"raise",
"ValueError",
"(",
"'fromutc: dt.tzinfo is not self'",
")",
"dt",
"=",
"dt",
".",
"replace",
"(",
"tzinfo",
"=",
"None",
")",
"idx",
"=",
"max",
"(",
"0",
",",
"bisect_right",
"(",
"self",
".",
"_utc_transition_times",
",",
"dt",
")",
"-",
"1",
")",
"inf",
"=",
"self",
".",
"_transition_info",
"[",
"idx",
"]",
"return",
"(",
"dt",
"+",
"inf",
"[",
"0",
"]",
")",
".",
"replace",
"(",
"tzinfo",
"=",
"self",
".",
"_tzinfos",
"[",
"inf",
"]",
")"
] | [
192,
4
] | [
200,
63
] | python | en | ['en', 'en', 'de'] | False |
DstTzInfo.normalize | (self, dt) | Correct the timezone information on the given datetime
If date arithmetic crosses DST boundaries, the tzinfo
is not magically adjusted. This method normalizes the
tzinfo to the correct one.
To test, first we need to do some setup
>>> from pytz import timezone
>>> utc = timezone('UTC')
>>> eastern = timezone('US/Eastern')
>>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'
We next create a datetime right on an end-of-DST transition point,
the instant when the wallclocks are wound back one hour.
>>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc)
>>> loc_dt = utc_dt.astimezone(eastern)
>>> loc_dt.strftime(fmt)
'2002-10-27 01:00:00 EST (-0500)'
Now, if we subtract a few minutes from it, note that the timezone
information has not changed.
>>> before = loc_dt - timedelta(minutes=10)
>>> before.strftime(fmt)
'2002-10-27 00:50:00 EST (-0500)'
But we can fix that by calling the normalize method
>>> before = eastern.normalize(before)
>>> before.strftime(fmt)
'2002-10-27 01:50:00 EDT (-0400)'
The supported method of converting between timezones is to use
datetime.astimezone(). Currently, normalize() also works:
>>> th = timezone('Asia/Bangkok')
>>> am = timezone('Europe/Amsterdam')
>>> dt = th.localize(datetime(2011, 5, 7, 1, 2, 3))
>>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'
>>> am.normalize(dt).strftime(fmt)
'2011-05-06 20:02:03 CEST (+0200)'
| Correct the timezone information on the given datetime | def normalize(self, dt):
'''Correct the timezone information on the given datetime
If date arithmetic crosses DST boundaries, the tzinfo
is not magically adjusted. This method normalizes the
tzinfo to the correct one.
To test, first we need to do some setup
>>> from pytz import timezone
>>> utc = timezone('UTC')
>>> eastern = timezone('US/Eastern')
>>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'
We next create a datetime right on an end-of-DST transition point,
the instant when the wallclocks are wound back one hour.
>>> utc_dt = datetime(2002, 10, 27, 6, 0, 0, tzinfo=utc)
>>> loc_dt = utc_dt.astimezone(eastern)
>>> loc_dt.strftime(fmt)
'2002-10-27 01:00:00 EST (-0500)'
Now, if we subtract a few minutes from it, note that the timezone
information has not changed.
>>> before = loc_dt - timedelta(minutes=10)
>>> before.strftime(fmt)
'2002-10-27 00:50:00 EST (-0500)'
But we can fix that by calling the normalize method
>>> before = eastern.normalize(before)
>>> before.strftime(fmt)
'2002-10-27 01:50:00 EDT (-0400)'
The supported method of converting between timezones is to use
datetime.astimezone(). Currently, normalize() also works:
>>> th = timezone('Asia/Bangkok')
>>> am = timezone('Europe/Amsterdam')
>>> dt = th.localize(datetime(2011, 5, 7, 1, 2, 3))
>>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'
>>> am.normalize(dt).strftime(fmt)
'2011-05-06 20:02:03 CEST (+0200)'
'''
if dt.tzinfo is None:
raise ValueError('Naive time - no tzinfo set')
# Convert dt in localtime to UTC
offset = dt.tzinfo._utcoffset
dt = dt.replace(tzinfo=None)
dt = dt - offset
# convert it back, and return it
return self.fromutc(dt) | [
"def",
"normalize",
"(",
"self",
",",
"dt",
")",
":",
"if",
"dt",
".",
"tzinfo",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'Naive time - no tzinfo set'",
")",
"# Convert dt in localtime to UTC",
"offset",
"=",
"dt",
".",
"tzinfo",
".",
"_utcoffset",
"dt",
"=",
"dt",
".",
"replace",
"(",
"tzinfo",
"=",
"None",
")",
"dt",
"=",
"dt",
"-",
"offset",
"# convert it back, and return it",
"return",
"self",
".",
"fromutc",
"(",
"dt",
")"
] | [
202,
4
] | [
255,
31
] | python | en | ['en', 'en', 'en'] | True |
DstTzInfo.localize | (self, dt, is_dst=False) | Convert naive time to local time.
This method should be used to construct localtimes, rather
than passing a tzinfo argument to a datetime constructor.
is_dst is used to determine the correct timezone in the ambigous
period at the end of daylight saving time.
>>> from pytz import timezone
>>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'
>>> amdam = timezone('Europe/Amsterdam')
>>> dt = datetime(2004, 10, 31, 2, 0, 0)
>>> loc_dt1 = amdam.localize(dt, is_dst=True)
>>> loc_dt2 = amdam.localize(dt, is_dst=False)
>>> loc_dt1.strftime(fmt)
'2004-10-31 02:00:00 CEST (+0200)'
>>> loc_dt2.strftime(fmt)
'2004-10-31 02:00:00 CET (+0100)'
>>> str(loc_dt2 - loc_dt1)
'1:00:00'
Use is_dst=None to raise an AmbiguousTimeError for ambiguous
times at the end of daylight saving time
>>> try:
... loc_dt1 = amdam.localize(dt, is_dst=None)
... except AmbiguousTimeError:
... print('Ambiguous')
Ambiguous
is_dst defaults to False
>>> amdam.localize(dt) == amdam.localize(dt, False)
True
is_dst is also used to determine the correct timezone in the
wallclock times jumped over at the start of daylight saving time.
>>> pacific = timezone('US/Pacific')
>>> dt = datetime(2008, 3, 9, 2, 0, 0)
>>> ploc_dt1 = pacific.localize(dt, is_dst=True)
>>> ploc_dt2 = pacific.localize(dt, is_dst=False)
>>> ploc_dt1.strftime(fmt)
'2008-03-09 02:00:00 PDT (-0700)'
>>> ploc_dt2.strftime(fmt)
'2008-03-09 02:00:00 PST (-0800)'
>>> str(ploc_dt2 - ploc_dt1)
'1:00:00'
Use is_dst=None to raise a NonExistentTimeError for these skipped
times.
>>> try:
... loc_dt1 = pacific.localize(dt, is_dst=None)
... except NonExistentTimeError:
... print('Non-existent')
Non-existent
| Convert naive time to local time. | def localize(self, dt, is_dst=False):
'''Convert naive time to local time.
This method should be used to construct localtimes, rather
than passing a tzinfo argument to a datetime constructor.
is_dst is used to determine the correct timezone in the ambigous
period at the end of daylight saving time.
>>> from pytz import timezone
>>> fmt = '%Y-%m-%d %H:%M:%S %Z (%z)'
>>> amdam = timezone('Europe/Amsterdam')
>>> dt = datetime(2004, 10, 31, 2, 0, 0)
>>> loc_dt1 = amdam.localize(dt, is_dst=True)
>>> loc_dt2 = amdam.localize(dt, is_dst=False)
>>> loc_dt1.strftime(fmt)
'2004-10-31 02:00:00 CEST (+0200)'
>>> loc_dt2.strftime(fmt)
'2004-10-31 02:00:00 CET (+0100)'
>>> str(loc_dt2 - loc_dt1)
'1:00:00'
Use is_dst=None to raise an AmbiguousTimeError for ambiguous
times at the end of daylight saving time
>>> try:
... loc_dt1 = amdam.localize(dt, is_dst=None)
... except AmbiguousTimeError:
... print('Ambiguous')
Ambiguous
is_dst defaults to False
>>> amdam.localize(dt) == amdam.localize(dt, False)
True
is_dst is also used to determine the correct timezone in the
wallclock times jumped over at the start of daylight saving time.
>>> pacific = timezone('US/Pacific')
>>> dt = datetime(2008, 3, 9, 2, 0, 0)
>>> ploc_dt1 = pacific.localize(dt, is_dst=True)
>>> ploc_dt2 = pacific.localize(dt, is_dst=False)
>>> ploc_dt1.strftime(fmt)
'2008-03-09 02:00:00 PDT (-0700)'
>>> ploc_dt2.strftime(fmt)
'2008-03-09 02:00:00 PST (-0800)'
>>> str(ploc_dt2 - ploc_dt1)
'1:00:00'
Use is_dst=None to raise a NonExistentTimeError for these skipped
times.
>>> try:
... loc_dt1 = pacific.localize(dt, is_dst=None)
... except NonExistentTimeError:
... print('Non-existent')
Non-existent
'''
if dt.tzinfo is not None:
raise ValueError('Not naive datetime (tzinfo is already set)')
# Find the two best possibilities.
possible_loc_dt = set()
for delta in [timedelta(days=-1), timedelta(days=1)]:
loc_dt = dt + delta
idx = max(0, bisect_right(
self._utc_transition_times, loc_dt) - 1)
inf = self._transition_info[idx]
tzinfo = self._tzinfos[inf]
loc_dt = tzinfo.normalize(dt.replace(tzinfo=tzinfo))
if loc_dt.replace(tzinfo=None) == dt:
possible_loc_dt.add(loc_dt)
if len(possible_loc_dt) == 1:
return possible_loc_dt.pop()
# If there are no possibly correct timezones, we are attempting
# to convert a time that never happened - the time period jumped
# during the start-of-DST transition period.
if len(possible_loc_dt) == 0:
# If we refuse to guess, raise an exception.
if is_dst is None:
raise NonExistentTimeError(dt)
# If we are forcing the pre-DST side of the DST transition, we
# obtain the correct timezone by winding the clock forward a few
# hours.
elif is_dst:
return self.localize(
dt + timedelta(hours=6), is_dst=True) - timedelta(hours=6)
# If we are forcing the post-DST side of the DST transition, we
# obtain the correct timezone by winding the clock back.
else:
return self.localize(
dt - timedelta(hours=6),
is_dst=False) + timedelta(hours=6)
# If we get this far, we have multiple possible timezones - this
# is an ambiguous case occuring during the end-of-DST transition.
# If told to be strict, raise an exception since we have an
# ambiguous case
if is_dst is None:
raise AmbiguousTimeError(dt)
# Filter out the possiblilities that don't match the requested
# is_dst
filtered_possible_loc_dt = [
p for p in possible_loc_dt if bool(p.tzinfo._dst) == is_dst
]
# Hopefully we only have one possibility left. Return it.
if len(filtered_possible_loc_dt) == 1:
return filtered_possible_loc_dt[0]
if len(filtered_possible_loc_dt) == 0:
filtered_possible_loc_dt = list(possible_loc_dt)
# If we get this far, we have in a wierd timezone transition
# where the clocks have been wound back but is_dst is the same
# in both (eg. Europe/Warsaw 1915 when they switched to CET).
# At this point, we just have to guess unless we allow more
# hints to be passed in (such as the UTC offset or abbreviation),
# but that is just getting silly.
#
# Choose the earliest (by UTC) applicable timezone if is_dst=True
# Choose the latest (by UTC) applicable timezone if is_dst=False
# i.e., behave like end-of-DST transition
dates = {} # utc -> local
for local_dt in filtered_possible_loc_dt:
utc_time = (
local_dt.replace(tzinfo=None) - local_dt.tzinfo._utcoffset)
assert utc_time not in dates
dates[utc_time] = local_dt
return dates[[min, max][not is_dst](dates)] | [
"def",
"localize",
"(",
"self",
",",
"dt",
",",
"is_dst",
"=",
"False",
")",
":",
"if",
"dt",
".",
"tzinfo",
"is",
"not",
"None",
":",
"raise",
"ValueError",
"(",
"'Not naive datetime (tzinfo is already set)'",
")",
"# Find the two best possibilities.",
"possible_loc_dt",
"=",
"set",
"(",
")",
"for",
"delta",
"in",
"[",
"timedelta",
"(",
"days",
"=",
"-",
"1",
")",
",",
"timedelta",
"(",
"days",
"=",
"1",
")",
"]",
":",
"loc_dt",
"=",
"dt",
"+",
"delta",
"idx",
"=",
"max",
"(",
"0",
",",
"bisect_right",
"(",
"self",
".",
"_utc_transition_times",
",",
"loc_dt",
")",
"-",
"1",
")",
"inf",
"=",
"self",
".",
"_transition_info",
"[",
"idx",
"]",
"tzinfo",
"=",
"self",
".",
"_tzinfos",
"[",
"inf",
"]",
"loc_dt",
"=",
"tzinfo",
".",
"normalize",
"(",
"dt",
".",
"replace",
"(",
"tzinfo",
"=",
"tzinfo",
")",
")",
"if",
"loc_dt",
".",
"replace",
"(",
"tzinfo",
"=",
"None",
")",
"==",
"dt",
":",
"possible_loc_dt",
".",
"add",
"(",
"loc_dt",
")",
"if",
"len",
"(",
"possible_loc_dt",
")",
"==",
"1",
":",
"return",
"possible_loc_dt",
".",
"pop",
"(",
")",
"# If there are no possibly correct timezones, we are attempting",
"# to convert a time that never happened - the time period jumped",
"# during the start-of-DST transition period.",
"if",
"len",
"(",
"possible_loc_dt",
")",
"==",
"0",
":",
"# If we refuse to guess, raise an exception.",
"if",
"is_dst",
"is",
"None",
":",
"raise",
"NonExistentTimeError",
"(",
"dt",
")",
"# If we are forcing the pre-DST side of the DST transition, we",
"# obtain the correct timezone by winding the clock forward a few",
"# hours.",
"elif",
"is_dst",
":",
"return",
"self",
".",
"localize",
"(",
"dt",
"+",
"timedelta",
"(",
"hours",
"=",
"6",
")",
",",
"is_dst",
"=",
"True",
")",
"-",
"timedelta",
"(",
"hours",
"=",
"6",
")",
"# If we are forcing the post-DST side of the DST transition, we",
"# obtain the correct timezone by winding the clock back.",
"else",
":",
"return",
"self",
".",
"localize",
"(",
"dt",
"-",
"timedelta",
"(",
"hours",
"=",
"6",
")",
",",
"is_dst",
"=",
"False",
")",
"+",
"timedelta",
"(",
"hours",
"=",
"6",
")",
"# If we get this far, we have multiple possible timezones - this",
"# is an ambiguous case occuring during the end-of-DST transition.",
"# If told to be strict, raise an exception since we have an",
"# ambiguous case",
"if",
"is_dst",
"is",
"None",
":",
"raise",
"AmbiguousTimeError",
"(",
"dt",
")",
"# Filter out the possiblilities that don't match the requested",
"# is_dst",
"filtered_possible_loc_dt",
"=",
"[",
"p",
"for",
"p",
"in",
"possible_loc_dt",
"if",
"bool",
"(",
"p",
".",
"tzinfo",
".",
"_dst",
")",
"==",
"is_dst",
"]",
"# Hopefully we only have one possibility left. Return it.",
"if",
"len",
"(",
"filtered_possible_loc_dt",
")",
"==",
"1",
":",
"return",
"filtered_possible_loc_dt",
"[",
"0",
"]",
"if",
"len",
"(",
"filtered_possible_loc_dt",
")",
"==",
"0",
":",
"filtered_possible_loc_dt",
"=",
"list",
"(",
"possible_loc_dt",
")",
"# If we get this far, we have in a wierd timezone transition",
"# where the clocks have been wound back but is_dst is the same",
"# in both (eg. Europe/Warsaw 1915 when they switched to CET).",
"# At this point, we just have to guess unless we allow more",
"# hints to be passed in (such as the UTC offset or abbreviation),",
"# but that is just getting silly.",
"#",
"# Choose the earliest (by UTC) applicable timezone if is_dst=True",
"# Choose the latest (by UTC) applicable timezone if is_dst=False",
"# i.e., behave like end-of-DST transition",
"dates",
"=",
"{",
"}",
"# utc -> local",
"for",
"local_dt",
"in",
"filtered_possible_loc_dt",
":",
"utc_time",
"=",
"(",
"local_dt",
".",
"replace",
"(",
"tzinfo",
"=",
"None",
")",
"-",
"local_dt",
".",
"tzinfo",
".",
"_utcoffset",
")",
"assert",
"utc_time",
"not",
"in",
"dates",
"dates",
"[",
"utc_time",
"]",
"=",
"local_dt",
"return",
"dates",
"[",
"[",
"min",
",",
"max",
"]",
"[",
"not",
"is_dst",
"]",
"(",
"dates",
")",
"]"
] | [
257,
4
] | [
393,
51
] | python | en | ['en', 'en', 'en'] | True |
DstTzInfo.utcoffset | (self, dt, is_dst=None) | See datetime.tzinfo.utcoffset
The is_dst parameter may be used to remove ambiguity during DST
transitions.
>>> from pytz import timezone
>>> tz = timezone('America/St_Johns')
>>> ambiguous = datetime(2009, 10, 31, 23, 30)
>>> str(tz.utcoffset(ambiguous, is_dst=False))
'-1 day, 20:30:00'
>>> str(tz.utcoffset(ambiguous, is_dst=True))
'-1 day, 21:30:00'
>>> try:
... tz.utcoffset(ambiguous)
... except AmbiguousTimeError:
... print('Ambiguous')
Ambiguous
| See datetime.tzinfo.utcoffset | def utcoffset(self, dt, is_dst=None):
'''See datetime.tzinfo.utcoffset
The is_dst parameter may be used to remove ambiguity during DST
transitions.
>>> from pytz import timezone
>>> tz = timezone('America/St_Johns')
>>> ambiguous = datetime(2009, 10, 31, 23, 30)
>>> str(tz.utcoffset(ambiguous, is_dst=False))
'-1 day, 20:30:00'
>>> str(tz.utcoffset(ambiguous, is_dst=True))
'-1 day, 21:30:00'
>>> try:
... tz.utcoffset(ambiguous)
... except AmbiguousTimeError:
... print('Ambiguous')
Ambiguous
'''
if dt is None:
return None
elif dt.tzinfo is not self:
dt = self.localize(dt, is_dst)
return dt.tzinfo._utcoffset
else:
return self._utcoffset | [
"def",
"utcoffset",
"(",
"self",
",",
"dt",
",",
"is_dst",
"=",
"None",
")",
":",
"if",
"dt",
"is",
"None",
":",
"return",
"None",
"elif",
"dt",
".",
"tzinfo",
"is",
"not",
"self",
":",
"dt",
"=",
"self",
".",
"localize",
"(",
"dt",
",",
"is_dst",
")",
"return",
"dt",
".",
"tzinfo",
".",
"_utcoffset",
"else",
":",
"return",
"self",
".",
"_utcoffset"
] | [
395,
4
] | [
424,
34
] | python | de | ['en', 'ny', 'de'] | False |
DstTzInfo.dst | (self, dt, is_dst=None) | See datetime.tzinfo.dst
The is_dst parameter may be used to remove ambiguity during DST
transitions.
>>> from pytz import timezone
>>> tz = timezone('America/St_Johns')
>>> normal = datetime(2009, 9, 1)
>>> str(tz.dst(normal))
'1:00:00'
>>> str(tz.dst(normal, is_dst=False))
'1:00:00'
>>> str(tz.dst(normal, is_dst=True))
'1:00:00'
>>> ambiguous = datetime(2009, 10, 31, 23, 30)
>>> str(tz.dst(ambiguous, is_dst=False))
'0:00:00'
>>> str(tz.dst(ambiguous, is_dst=True))
'1:00:00'
>>> try:
... tz.dst(ambiguous)
... except AmbiguousTimeError:
... print('Ambiguous')
Ambiguous
| See datetime.tzinfo.dst | def dst(self, dt, is_dst=None):
'''See datetime.tzinfo.dst
The is_dst parameter may be used to remove ambiguity during DST
transitions.
>>> from pytz import timezone
>>> tz = timezone('America/St_Johns')
>>> normal = datetime(2009, 9, 1)
>>> str(tz.dst(normal))
'1:00:00'
>>> str(tz.dst(normal, is_dst=False))
'1:00:00'
>>> str(tz.dst(normal, is_dst=True))
'1:00:00'
>>> ambiguous = datetime(2009, 10, 31, 23, 30)
>>> str(tz.dst(ambiguous, is_dst=False))
'0:00:00'
>>> str(tz.dst(ambiguous, is_dst=True))
'1:00:00'
>>> try:
... tz.dst(ambiguous)
... except AmbiguousTimeError:
... print('Ambiguous')
Ambiguous
'''
if dt is None:
return None
elif dt.tzinfo is not self:
dt = self.localize(dt, is_dst)
return dt.tzinfo._dst
else:
return self._dst | [
"def",
"dst",
"(",
"self",
",",
"dt",
",",
"is_dst",
"=",
"None",
")",
":",
"if",
"dt",
"is",
"None",
":",
"return",
"None",
"elif",
"dt",
".",
"tzinfo",
"is",
"not",
"self",
":",
"dt",
"=",
"self",
".",
"localize",
"(",
"dt",
",",
"is_dst",
")",
"return",
"dt",
".",
"tzinfo",
".",
"_dst",
"else",
":",
"return",
"self",
".",
"_dst"
] | [
426,
4
] | [
463,
28
] | python | en | ['en', 'en', 'de'] | False |
DstTzInfo.tzname | (self, dt, is_dst=None) | See datetime.tzinfo.tzname
The is_dst parameter may be used to remove ambiguity during DST
transitions.
>>> from pytz import timezone
>>> tz = timezone('America/St_Johns')
>>> normal = datetime(2009, 9, 1)
>>> tz.tzname(normal)
'NDT'
>>> tz.tzname(normal, is_dst=False)
'NDT'
>>> tz.tzname(normal, is_dst=True)
'NDT'
>>> ambiguous = datetime(2009, 10, 31, 23, 30)
>>> tz.tzname(ambiguous, is_dst=False)
'NST'
>>> tz.tzname(ambiguous, is_dst=True)
'NDT'
>>> try:
... tz.tzname(ambiguous)
... except AmbiguousTimeError:
... print('Ambiguous')
Ambiguous
| See datetime.tzinfo.tzname | def tzname(self, dt, is_dst=None):
'''See datetime.tzinfo.tzname
The is_dst parameter may be used to remove ambiguity during DST
transitions.
>>> from pytz import timezone
>>> tz = timezone('America/St_Johns')
>>> normal = datetime(2009, 9, 1)
>>> tz.tzname(normal)
'NDT'
>>> tz.tzname(normal, is_dst=False)
'NDT'
>>> tz.tzname(normal, is_dst=True)
'NDT'
>>> ambiguous = datetime(2009, 10, 31, 23, 30)
>>> tz.tzname(ambiguous, is_dst=False)
'NST'
>>> tz.tzname(ambiguous, is_dst=True)
'NDT'
>>> try:
... tz.tzname(ambiguous)
... except AmbiguousTimeError:
... print('Ambiguous')
Ambiguous
'''
if dt is None:
return self.zone
elif dt.tzinfo is not self:
dt = self.localize(dt, is_dst)
return dt.tzinfo._tzname
else:
return self._tzname | [
"def",
"tzname",
"(",
"self",
",",
"dt",
",",
"is_dst",
"=",
"None",
")",
":",
"if",
"dt",
"is",
"None",
":",
"return",
"self",
".",
"zone",
"elif",
"dt",
".",
"tzinfo",
"is",
"not",
"self",
":",
"dt",
"=",
"self",
".",
"localize",
"(",
"dt",
",",
"is_dst",
")",
"return",
"dt",
".",
"tzinfo",
".",
"_tzname",
"else",
":",
"return",
"self",
".",
"_tzname"
] | [
465,
4
] | [
501,
31
] | python | de | ['de', 'en', 'de'] | False |
reset_urlconf | (sender, **kwargs) | Reset the URLconf after each request is finished. | Reset the URLconf after each request is finished. | def reset_urlconf(sender, **kwargs):
"""Reset the URLconf after each request is finished."""
set_urlconf(None) | [
"def",
"reset_urlconf",
"(",
"sender",
",",
"*",
"*",
"kwargs",
")",
":",
"set_urlconf",
"(",
"None",
")"
] | [
344,
0
] | [
346,
21
] | python | en | ['en', 'en', 'en'] | True |
BaseHandler.load_middleware | (self, is_async=False) |
Populate middleware lists from settings.MIDDLEWARE.
Must be called after the environment is fixed (see __call__ in subclasses).
|
Populate middleware lists from settings.MIDDLEWARE. | def load_middleware(self, is_async=False):
"""
Populate middleware lists from settings.MIDDLEWARE.
Must be called after the environment is fixed (see __call__ in subclasses).
"""
self._view_middleware = []
self._template_response_middleware = []
self._exception_middleware = []
get_response = self._get_response_async if is_async else self._get_response
handler = convert_exception_to_response(get_response)
handler_is_async = is_async
for middleware_path in reversed(settings.MIDDLEWARE):
middleware = import_string(middleware_path)
middleware_can_sync = getattr(middleware, 'sync_capable', True)
middleware_can_async = getattr(middleware, 'async_capable', False)
if not middleware_can_sync and not middleware_can_async:
raise RuntimeError(
'Middleware %s must have at least one of '
'sync_capable/async_capable set to True.' % middleware_path
)
elif not handler_is_async and middleware_can_sync:
middleware_is_async = False
else:
middleware_is_async = middleware_can_async
try:
# Adapt handler, if needed.
adapted_handler = self.adapt_method_mode(
middleware_is_async, handler, handler_is_async,
debug=settings.DEBUG, name='middleware %s' % middleware_path,
)
mw_instance = middleware(adapted_handler)
except MiddlewareNotUsed as exc:
if settings.DEBUG:
if str(exc):
logger.debug('MiddlewareNotUsed(%r): %s', middleware_path, exc)
else:
logger.debug('MiddlewareNotUsed: %r', middleware_path)
continue
else:
handler = adapted_handler
if mw_instance is None:
raise ImproperlyConfigured(
'Middleware factory %s returned None.' % middleware_path
)
if hasattr(mw_instance, 'process_view'):
self._view_middleware.insert(
0,
self.adapt_method_mode(is_async, mw_instance.process_view),
)
if hasattr(mw_instance, 'process_template_response'):
self._template_response_middleware.append(
self.adapt_method_mode(is_async, mw_instance.process_template_response),
)
if hasattr(mw_instance, 'process_exception'):
# The exception-handling stack is still always synchronous for
# now, so adapt that way.
self._exception_middleware.append(
self.adapt_method_mode(False, mw_instance.process_exception),
)
handler = convert_exception_to_response(mw_instance)
handler_is_async = middleware_is_async
# Adapt the top of the stack, if needed.
handler = self.adapt_method_mode(is_async, handler, handler_is_async)
# We only assign to this when initialization is complete as it is used
# as a flag for initialization being complete.
self._middleware_chain = handler | [
"def",
"load_middleware",
"(",
"self",
",",
"is_async",
"=",
"False",
")",
":",
"self",
".",
"_view_middleware",
"=",
"[",
"]",
"self",
".",
"_template_response_middleware",
"=",
"[",
"]",
"self",
".",
"_exception_middleware",
"=",
"[",
"]",
"get_response",
"=",
"self",
".",
"_get_response_async",
"if",
"is_async",
"else",
"self",
".",
"_get_response",
"handler",
"=",
"convert_exception_to_response",
"(",
"get_response",
")",
"handler_is_async",
"=",
"is_async",
"for",
"middleware_path",
"in",
"reversed",
"(",
"settings",
".",
"MIDDLEWARE",
")",
":",
"middleware",
"=",
"import_string",
"(",
"middleware_path",
")",
"middleware_can_sync",
"=",
"getattr",
"(",
"middleware",
",",
"'sync_capable'",
",",
"True",
")",
"middleware_can_async",
"=",
"getattr",
"(",
"middleware",
",",
"'async_capable'",
",",
"False",
")",
"if",
"not",
"middleware_can_sync",
"and",
"not",
"middleware_can_async",
":",
"raise",
"RuntimeError",
"(",
"'Middleware %s must have at least one of '",
"'sync_capable/async_capable set to True.'",
"%",
"middleware_path",
")",
"elif",
"not",
"handler_is_async",
"and",
"middleware_can_sync",
":",
"middleware_is_async",
"=",
"False",
"else",
":",
"middleware_is_async",
"=",
"middleware_can_async",
"try",
":",
"# Adapt handler, if needed.",
"adapted_handler",
"=",
"self",
".",
"adapt_method_mode",
"(",
"middleware_is_async",
",",
"handler",
",",
"handler_is_async",
",",
"debug",
"=",
"settings",
".",
"DEBUG",
",",
"name",
"=",
"'middleware %s'",
"%",
"middleware_path",
",",
")",
"mw_instance",
"=",
"middleware",
"(",
"adapted_handler",
")",
"except",
"MiddlewareNotUsed",
"as",
"exc",
":",
"if",
"settings",
".",
"DEBUG",
":",
"if",
"str",
"(",
"exc",
")",
":",
"logger",
".",
"debug",
"(",
"'MiddlewareNotUsed(%r): %s'",
",",
"middleware_path",
",",
"exc",
")",
"else",
":",
"logger",
".",
"debug",
"(",
"'MiddlewareNotUsed: %r'",
",",
"middleware_path",
")",
"continue",
"else",
":",
"handler",
"=",
"adapted_handler",
"if",
"mw_instance",
"is",
"None",
":",
"raise",
"ImproperlyConfigured",
"(",
"'Middleware factory %s returned None.'",
"%",
"middleware_path",
")",
"if",
"hasattr",
"(",
"mw_instance",
",",
"'process_view'",
")",
":",
"self",
".",
"_view_middleware",
".",
"insert",
"(",
"0",
",",
"self",
".",
"adapt_method_mode",
"(",
"is_async",
",",
"mw_instance",
".",
"process_view",
")",
",",
")",
"if",
"hasattr",
"(",
"mw_instance",
",",
"'process_template_response'",
")",
":",
"self",
".",
"_template_response_middleware",
".",
"append",
"(",
"self",
".",
"adapt_method_mode",
"(",
"is_async",
",",
"mw_instance",
".",
"process_template_response",
")",
",",
")",
"if",
"hasattr",
"(",
"mw_instance",
",",
"'process_exception'",
")",
":",
"# The exception-handling stack is still always synchronous for",
"# now, so adapt that way.",
"self",
".",
"_exception_middleware",
".",
"append",
"(",
"self",
".",
"adapt_method_mode",
"(",
"False",
",",
"mw_instance",
".",
"process_exception",
")",
",",
")",
"handler",
"=",
"convert_exception_to_response",
"(",
"mw_instance",
")",
"handler_is_async",
"=",
"middleware_is_async",
"# Adapt the top of the stack, if needed.",
"handler",
"=",
"self",
".",
"adapt_method_mode",
"(",
"is_async",
",",
"handler",
",",
"handler_is_async",
")",
"# We only assign to this when initialization is complete as it is used",
"# as a flag for initialization being complete.",
"self",
".",
"_middleware_chain",
"=",
"handler"
] | [
25,
4
] | [
96,
40
] | python | en | ['en', 'error', 'th'] | False |
BaseHandler.adapt_method_mode | (
self, is_async, method, method_is_async=None, debug=False, name=None,
) |
Adapt a method to be in the correct "mode":
- If is_async is False:
- Synchronous methods are left alone
- Asynchronous methods are wrapped with async_to_sync
- If is_async is True:
- Synchronous methods are wrapped with sync_to_async()
- Asynchronous methods are left alone
|
Adapt a method to be in the correct "mode":
- If is_async is False:
- Synchronous methods are left alone
- Asynchronous methods are wrapped with async_to_sync
- If is_async is True:
- Synchronous methods are wrapped with sync_to_async()
- Asynchronous methods are left alone
| def adapt_method_mode(
self, is_async, method, method_is_async=None, debug=False, name=None,
):
"""
Adapt a method to be in the correct "mode":
- If is_async is False:
- Synchronous methods are left alone
- Asynchronous methods are wrapped with async_to_sync
- If is_async is True:
- Synchronous methods are wrapped with sync_to_async()
- Asynchronous methods are left alone
"""
if method_is_async is None:
method_is_async = asyncio.iscoroutinefunction(method)
if debug and not name:
name = name or 'method %s()' % method.__qualname__
if is_async:
if not method_is_async:
if debug:
logger.debug('Synchronous %s adapted.', name)
return sync_to_async(method, thread_sensitive=True)
elif method_is_async:
if debug:
logger.debug('Asynchronous %s adapted.', name)
return async_to_sync(method)
return method | [
"def",
"adapt_method_mode",
"(",
"self",
",",
"is_async",
",",
"method",
",",
"method_is_async",
"=",
"None",
",",
"debug",
"=",
"False",
",",
"name",
"=",
"None",
",",
")",
":",
"if",
"method_is_async",
"is",
"None",
":",
"method_is_async",
"=",
"asyncio",
".",
"iscoroutinefunction",
"(",
"method",
")",
"if",
"debug",
"and",
"not",
"name",
":",
"name",
"=",
"name",
"or",
"'method %s()'",
"%",
"method",
".",
"__qualname__",
"if",
"is_async",
":",
"if",
"not",
"method_is_async",
":",
"if",
"debug",
":",
"logger",
".",
"debug",
"(",
"'Synchronous %s adapted.'",
",",
"name",
")",
"return",
"sync_to_async",
"(",
"method",
",",
"thread_sensitive",
"=",
"True",
")",
"elif",
"method_is_async",
":",
"if",
"debug",
":",
"logger",
".",
"debug",
"(",
"'Asynchronous %s adapted.'",
",",
"name",
")",
"return",
"async_to_sync",
"(",
"method",
")",
"return",
"method"
] | [
98,
4
] | [
123,
21
] | python | en | ['en', 'error', 'th'] | False |
BaseHandler.get_response | (self, request) | Return an HttpResponse object for the given HttpRequest. | Return an HttpResponse object for the given HttpRequest. | def get_response(self, request):
"""Return an HttpResponse object for the given HttpRequest."""
# Setup default url resolver for this thread
set_urlconf(settings.ROOT_URLCONF)
response = self._middleware_chain(request)
response._resource_closers.append(request.close)
if response.status_code >= 400:
log_response(
'%s: %s', response.reason_phrase, request.path,
response=response,
request=request,
)
return response | [
"def",
"get_response",
"(",
"self",
",",
"request",
")",
":",
"# Setup default url resolver for this thread",
"set_urlconf",
"(",
"settings",
".",
"ROOT_URLCONF",
")",
"response",
"=",
"self",
".",
"_middleware_chain",
"(",
"request",
")",
"response",
".",
"_resource_closers",
".",
"append",
"(",
"request",
".",
"close",
")",
"if",
"response",
".",
"status_code",
">=",
"400",
":",
"log_response",
"(",
"'%s: %s'",
",",
"response",
".",
"reason_phrase",
",",
"request",
".",
"path",
",",
"response",
"=",
"response",
",",
"request",
"=",
"request",
",",
")",
"return",
"response"
] | [
125,
4
] | [
137,
23
] | python | en | ['en', 'en', 'en'] | True |
BaseHandler.get_response_async | (self, request) |
Asynchronous version of get_response.
Funneling everything, including WSGI, into a single async
get_response() is too slow. Avoid the context switch by using
a separate async response path.
|
Asynchronous version of get_response. | async def get_response_async(self, request):
"""
Asynchronous version of get_response.
Funneling everything, including WSGI, into a single async
get_response() is too slow. Avoid the context switch by using
a separate async response path.
"""
# Setup default url resolver for this thread.
set_urlconf(settings.ROOT_URLCONF)
response = await self._middleware_chain(request)
response._resource_closers.append(request.close)
if response.status_code >= 400:
await sync_to_async(log_response, thread_sensitive=False)(
'%s: %s', response.reason_phrase, request.path,
response=response,
request=request,
)
return response | [
"async",
"def",
"get_response_async",
"(",
"self",
",",
"request",
")",
":",
"# Setup default url resolver for this thread.",
"set_urlconf",
"(",
"settings",
".",
"ROOT_URLCONF",
")",
"response",
"=",
"await",
"self",
".",
"_middleware_chain",
"(",
"request",
")",
"response",
".",
"_resource_closers",
".",
"append",
"(",
"request",
".",
"close",
")",
"if",
"response",
".",
"status_code",
">=",
"400",
":",
"await",
"sync_to_async",
"(",
"log_response",
",",
"thread_sensitive",
"=",
"False",
")",
"(",
"'%s: %s'",
",",
"response",
".",
"reason_phrase",
",",
"request",
".",
"path",
",",
"response",
"=",
"response",
",",
"request",
"=",
"request",
",",
")",
"return",
"response"
] | [
139,
4
] | [
157,
23
] | python | en | ['en', 'error', 'th'] | False |
BaseHandler._get_response | (self, request) |
Resolve and call the view, then apply view, exception, and
template_response middleware. This method is everything that happens
inside the request/response middleware.
|
Resolve and call the view, then apply view, exception, and
template_response middleware. This method is everything that happens
inside the request/response middleware.
| def _get_response(self, request):
"""
Resolve and call the view, then apply view, exception, and
template_response middleware. This method is everything that happens
inside the request/response middleware.
"""
response = None
callback, callback_args, callback_kwargs = self.resolve_request(request)
# Apply view middleware
for middleware_method in self._view_middleware:
response = middleware_method(request, callback, callback_args, callback_kwargs)
if response:
break
if response is None:
wrapped_callback = self.make_view_atomic(callback)
# If it is an asynchronous view, run it in a subthread.
if asyncio.iscoroutinefunction(wrapped_callback):
wrapped_callback = async_to_sync(wrapped_callback)
try:
response = wrapped_callback(request, *callback_args, **callback_kwargs)
except Exception as e:
response = self.process_exception_by_middleware(e, request)
if response is None:
raise
# Complain if the view returned None (a common error).
self.check_response(response, callback)
# If the response supports deferred rendering, apply template
# response middleware and then render the response
if hasattr(response, 'render') and callable(response.render):
for middleware_method in self._template_response_middleware:
response = middleware_method(request, response)
# Complain if the template response middleware returned None (a common error).
self.check_response(
response,
middleware_method,
name='%s.process_template_response' % (
middleware_method.__self__.__class__.__name__,
)
)
try:
response = response.render()
except Exception as e:
response = self.process_exception_by_middleware(e, request)
if response is None:
raise
return response | [
"def",
"_get_response",
"(",
"self",
",",
"request",
")",
":",
"response",
"=",
"None",
"callback",
",",
"callback_args",
",",
"callback_kwargs",
"=",
"self",
".",
"resolve_request",
"(",
"request",
")",
"# Apply view middleware",
"for",
"middleware_method",
"in",
"self",
".",
"_view_middleware",
":",
"response",
"=",
"middleware_method",
"(",
"request",
",",
"callback",
",",
"callback_args",
",",
"callback_kwargs",
")",
"if",
"response",
":",
"break",
"if",
"response",
"is",
"None",
":",
"wrapped_callback",
"=",
"self",
".",
"make_view_atomic",
"(",
"callback",
")",
"# If it is an asynchronous view, run it in a subthread.",
"if",
"asyncio",
".",
"iscoroutinefunction",
"(",
"wrapped_callback",
")",
":",
"wrapped_callback",
"=",
"async_to_sync",
"(",
"wrapped_callback",
")",
"try",
":",
"response",
"=",
"wrapped_callback",
"(",
"request",
",",
"*",
"callback_args",
",",
"*",
"*",
"callback_kwargs",
")",
"except",
"Exception",
"as",
"e",
":",
"response",
"=",
"self",
".",
"process_exception_by_middleware",
"(",
"e",
",",
"request",
")",
"if",
"response",
"is",
"None",
":",
"raise",
"# Complain if the view returned None (a common error).",
"self",
".",
"check_response",
"(",
"response",
",",
"callback",
")",
"# If the response supports deferred rendering, apply template",
"# response middleware and then render the response",
"if",
"hasattr",
"(",
"response",
",",
"'render'",
")",
"and",
"callable",
"(",
"response",
".",
"render",
")",
":",
"for",
"middleware_method",
"in",
"self",
".",
"_template_response_middleware",
":",
"response",
"=",
"middleware_method",
"(",
"request",
",",
"response",
")",
"# Complain if the template response middleware returned None (a common error).",
"self",
".",
"check_response",
"(",
"response",
",",
"middleware_method",
",",
"name",
"=",
"'%s.process_template_response'",
"%",
"(",
"middleware_method",
".",
"__self__",
".",
"__class__",
".",
"__name__",
",",
")",
")",
"try",
":",
"response",
"=",
"response",
".",
"render",
"(",
")",
"except",
"Exception",
"as",
"e",
":",
"response",
"=",
"self",
".",
"process_exception_by_middleware",
"(",
"e",
",",
"request",
")",
"if",
"response",
"is",
"None",
":",
"raise",
"return",
"response"
] | [
159,
4
] | [
209,
23
] | python | en | ['en', 'error', 'th'] | False |
BaseHandler._get_response_async | (self, request) |
Resolve and call the view, then apply view, exception, and
template_response middleware. This method is everything that happens
inside the request/response middleware.
|
Resolve and call the view, then apply view, exception, and
template_response middleware. This method is everything that happens
inside the request/response middleware.
| async def _get_response_async(self, request):
"""
Resolve and call the view, then apply view, exception, and
template_response middleware. This method is everything that happens
inside the request/response middleware.
"""
response = None
callback, callback_args, callback_kwargs = self.resolve_request(request)
# Apply view middleware.
for middleware_method in self._view_middleware:
response = await middleware_method(request, callback, callback_args, callback_kwargs)
if response:
break
if response is None:
wrapped_callback = self.make_view_atomic(callback)
# If it is a synchronous view, run it in a subthread
if not asyncio.iscoroutinefunction(wrapped_callback):
wrapped_callback = sync_to_async(wrapped_callback, thread_sensitive=True)
try:
response = await wrapped_callback(request, *callback_args, **callback_kwargs)
except Exception as e:
response = await sync_to_async(
self.process_exception_by_middleware,
thread_sensitive=True,
)(e, request)
if response is None:
raise
# Complain if the view returned None or an uncalled coroutine.
self.check_response(response, callback)
# If the response supports deferred rendering, apply template
# response middleware and then render the response
if hasattr(response, 'render') and callable(response.render):
for middleware_method in self._template_response_middleware:
response = await middleware_method(request, response)
# Complain if the template response middleware returned None or
# an uncalled coroutine.
self.check_response(
response,
middleware_method,
name='%s.process_template_response' % (
middleware_method.__self__.__class__.__name__,
)
)
try:
if asyncio.iscoroutinefunction(response.render):
response = await response.render()
else:
response = await sync_to_async(response.render, thread_sensitive=True)()
except Exception as e:
response = await sync_to_async(
self.process_exception_by_middleware,
thread_sensitive=True,
)(e, request)
if response is None:
raise
# Make sure the response is not a coroutine
if asyncio.iscoroutine(response):
raise RuntimeError('Response is still a coroutine.')
return response | [
"async",
"def",
"_get_response_async",
"(",
"self",
",",
"request",
")",
":",
"response",
"=",
"None",
"callback",
",",
"callback_args",
",",
"callback_kwargs",
"=",
"self",
".",
"resolve_request",
"(",
"request",
")",
"# Apply view middleware.",
"for",
"middleware_method",
"in",
"self",
".",
"_view_middleware",
":",
"response",
"=",
"await",
"middleware_method",
"(",
"request",
",",
"callback",
",",
"callback_args",
",",
"callback_kwargs",
")",
"if",
"response",
":",
"break",
"if",
"response",
"is",
"None",
":",
"wrapped_callback",
"=",
"self",
".",
"make_view_atomic",
"(",
"callback",
")",
"# If it is a synchronous view, run it in a subthread",
"if",
"not",
"asyncio",
".",
"iscoroutinefunction",
"(",
"wrapped_callback",
")",
":",
"wrapped_callback",
"=",
"sync_to_async",
"(",
"wrapped_callback",
",",
"thread_sensitive",
"=",
"True",
")",
"try",
":",
"response",
"=",
"await",
"wrapped_callback",
"(",
"request",
",",
"*",
"callback_args",
",",
"*",
"*",
"callback_kwargs",
")",
"except",
"Exception",
"as",
"e",
":",
"response",
"=",
"await",
"sync_to_async",
"(",
"self",
".",
"process_exception_by_middleware",
",",
"thread_sensitive",
"=",
"True",
",",
")",
"(",
"e",
",",
"request",
")",
"if",
"response",
"is",
"None",
":",
"raise",
"# Complain if the view returned None or an uncalled coroutine.",
"self",
".",
"check_response",
"(",
"response",
",",
"callback",
")",
"# If the response supports deferred rendering, apply template",
"# response middleware and then render the response",
"if",
"hasattr",
"(",
"response",
",",
"'render'",
")",
"and",
"callable",
"(",
"response",
".",
"render",
")",
":",
"for",
"middleware_method",
"in",
"self",
".",
"_template_response_middleware",
":",
"response",
"=",
"await",
"middleware_method",
"(",
"request",
",",
"response",
")",
"# Complain if the template response middleware returned None or",
"# an uncalled coroutine.",
"self",
".",
"check_response",
"(",
"response",
",",
"middleware_method",
",",
"name",
"=",
"'%s.process_template_response'",
"%",
"(",
"middleware_method",
".",
"__self__",
".",
"__class__",
".",
"__name__",
",",
")",
")",
"try",
":",
"if",
"asyncio",
".",
"iscoroutinefunction",
"(",
"response",
".",
"render",
")",
":",
"response",
"=",
"await",
"response",
".",
"render",
"(",
")",
"else",
":",
"response",
"=",
"await",
"sync_to_async",
"(",
"response",
".",
"render",
",",
"thread_sensitive",
"=",
"True",
")",
"(",
")",
"except",
"Exception",
"as",
"e",
":",
"response",
"=",
"await",
"sync_to_async",
"(",
"self",
".",
"process_exception_by_middleware",
",",
"thread_sensitive",
"=",
"True",
",",
")",
"(",
"e",
",",
"request",
")",
"if",
"response",
"is",
"None",
":",
"raise",
"# Make sure the response is not a coroutine",
"if",
"asyncio",
".",
"iscoroutine",
"(",
"response",
")",
":",
"raise",
"RuntimeError",
"(",
"'Response is still a coroutine.'",
")",
"return",
"response"
] | [
211,
4
] | [
274,
23
] | python | en | ['en', 'error', 'th'] | False |
BaseHandler.resolve_request | (self, request) |
Retrieve/set the urlconf for the request. Return the view resolved,
with its args and kwargs.
|
Retrieve/set the urlconf for the request. Return the view resolved,
with its args and kwargs.
| def resolve_request(self, request):
"""
Retrieve/set the urlconf for the request. Return the view resolved,
with its args and kwargs.
"""
# Work out the resolver.
if hasattr(request, 'urlconf'):
urlconf = request.urlconf
set_urlconf(urlconf)
resolver = get_resolver(urlconf)
else:
resolver = get_resolver()
# Resolve the view, and assign the match object back to the request.
resolver_match = resolver.resolve(request.path_info)
request.resolver_match = resolver_match
return resolver_match | [
"def",
"resolve_request",
"(",
"self",
",",
"request",
")",
":",
"# Work out the resolver.",
"if",
"hasattr",
"(",
"request",
",",
"'urlconf'",
")",
":",
"urlconf",
"=",
"request",
".",
"urlconf",
"set_urlconf",
"(",
"urlconf",
")",
"resolver",
"=",
"get_resolver",
"(",
"urlconf",
")",
"else",
":",
"resolver",
"=",
"get_resolver",
"(",
")",
"# Resolve the view, and assign the match object back to the request.",
"resolver_match",
"=",
"resolver",
".",
"resolve",
"(",
"request",
".",
"path_info",
")",
"request",
".",
"resolver_match",
"=",
"resolver_match",
"return",
"resolver_match"
] | [
276,
4
] | [
291,
29
] | python | en | ['en', 'error', 'th'] | False |
BaseHandler.check_response | (self, response, callback, name=None) |
Raise an error if the view returned None or an uncalled coroutine.
|
Raise an error if the view returned None or an uncalled coroutine.
| def check_response(self, response, callback, name=None):
"""
Raise an error if the view returned None or an uncalled coroutine.
"""
if not(response is None or asyncio.iscoroutine(response)):
return
if not name:
if isinstance(callback, types.FunctionType): # FBV
name = 'The view %s.%s' % (callback.__module__, callback.__name__)
else: # CBV
name = 'The view %s.%s.__call__' % (
callback.__module__,
callback.__class__.__name__,
)
if response is None:
raise ValueError(
"%s didn't return an HttpResponse object. It returned None "
"instead." % name
)
elif asyncio.iscoroutine(response):
raise ValueError(
"%s didn't return an HttpResponse object. It returned an "
"unawaited coroutine instead. You may need to add an 'await' "
"into your view." % name
) | [
"def",
"check_response",
"(",
"self",
",",
"response",
",",
"callback",
",",
"name",
"=",
"None",
")",
":",
"if",
"not",
"(",
"response",
"is",
"None",
"or",
"asyncio",
".",
"iscoroutine",
"(",
"response",
")",
")",
":",
"return",
"if",
"not",
"name",
":",
"if",
"isinstance",
"(",
"callback",
",",
"types",
".",
"FunctionType",
")",
":",
"# FBV",
"name",
"=",
"'The view %s.%s'",
"%",
"(",
"callback",
".",
"__module__",
",",
"callback",
".",
"__name__",
")",
"else",
":",
"# CBV",
"name",
"=",
"'The view %s.%s.__call__'",
"%",
"(",
"callback",
".",
"__module__",
",",
"callback",
".",
"__class__",
".",
"__name__",
",",
")",
"if",
"response",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"%s didn't return an HttpResponse object. It returned None \"",
"\"instead.\"",
"%",
"name",
")",
"elif",
"asyncio",
".",
"iscoroutine",
"(",
"response",
")",
":",
"raise",
"ValueError",
"(",
"\"%s didn't return an HttpResponse object. It returned an \"",
"\"unawaited coroutine instead. You may need to add an 'await' \"",
"\"into your view.\"",
"%",
"name",
")"
] | [
293,
4
] | [
317,
13
] | python | en | ['en', 'error', 'th'] | False |
BaseHandler.process_exception_by_middleware | (self, exception, request) |
Pass the exception to the exception middleware. If no middleware
return a response for this exception, return None.
|
Pass the exception to the exception middleware. If no middleware
return a response for this exception, return None.
| def process_exception_by_middleware(self, exception, request):
"""
Pass the exception to the exception middleware. If no middleware
return a response for this exception, return None.
"""
for middleware_method in self._exception_middleware:
response = middleware_method(request, exception)
if response:
return response
return None | [
"def",
"process_exception_by_middleware",
"(",
"self",
",",
"exception",
",",
"request",
")",
":",
"for",
"middleware_method",
"in",
"self",
".",
"_exception_middleware",
":",
"response",
"=",
"middleware_method",
"(",
"request",
",",
"exception",
")",
"if",
"response",
":",
"return",
"response",
"return",
"None"
] | [
332,
4
] | [
341,
19
] | python | en | ['en', 'error', 'th'] | False |
sleep | (seconds: float) |
Sleep strategy that delays execution for a given number of seconds.
This is the default strategy, and may be mocked out for unit testing.
|
Sleep strategy that delays execution for a given number of seconds. | def sleep(seconds: float) -> None:
"""
Sleep strategy that delays execution for a given number of seconds.
This is the default strategy, and may be mocked out for unit testing.
"""
time.sleep(seconds) | [
"def",
"sleep",
"(",
"seconds",
":",
"float",
")",
"->",
"None",
":",
"time",
".",
"sleep",
"(",
"seconds",
")"
] | [
24,
0
] | [
30,
23
] | python | en | ['en', 'error', 'th'] | False |
csrf_failure | (request, reason="", template_name=CSRF_FAILURE_TEMPLATE_NAME) |
Default view used when request fails CSRF protection
|
Default view used when request fails CSRF protection
| def csrf_failure(request, reason="", template_name=CSRF_FAILURE_TEMPLATE_NAME):
"""
Default view used when request fails CSRF protection
"""
from django.middleware.csrf import REASON_NO_CSRF_COOKIE, REASON_NO_REFERER
c = {
'title': _("Forbidden"),
'main': _("CSRF verification failed. Request aborted."),
'reason': reason,
'no_referer': reason == REASON_NO_REFERER,
'no_referer1': _(
'You are seeing this message because this HTTPS site requires a '
'“Referer header” to be sent by your Web browser, but none was '
'sent. This header is required for security reasons, to ensure '
'that your browser is not being hijacked by third parties.'),
'no_referer2': _(
'If you have configured your browser to disable “Referer” headers, '
'please re-enable them, at least for this site, or for HTTPS '
'connections, or for “same-origin” requests.'),
'no_referer3': _(
'If you are using the <meta name="referrer" '
'content=\"no-referrer\"> tag or including the “Referrer-Policy: '
'no-referrer” header, please remove them. The CSRF protection '
'requires the “Referer” header to do strict referer checking. If '
'you’re concerned about privacy, use alternatives like '
'<a rel=\"noreferrer\" …> for links to third-party sites.'),
'no_cookie': reason == REASON_NO_CSRF_COOKIE,
'no_cookie1': _(
"You are seeing this message because this site requires a CSRF "
"cookie when submitting forms. This cookie is required for "
"security reasons, to ensure that your browser is not being "
"hijacked by third parties."),
'no_cookie2': _(
'If you have configured your browser to disable cookies, please '
're-enable them, at least for this site, or for “same-origin” '
'requests.'),
'DEBUG': settings.DEBUG,
'docs_version': get_docs_version(),
'more': _("More information is available with DEBUG=True."),
}
try:
t = loader.get_template(template_name)
except TemplateDoesNotExist:
if template_name == CSRF_FAILURE_TEMPLATE_NAME:
# If the default template doesn't exist, use the string template.
t = Engine().from_string(CSRF_FAILURE_TEMPLATE)
c = Context(c)
else:
# Raise if a developer-specified template doesn't exist.
raise
return HttpResponseForbidden(t.render(c), content_type='text/html') | [
"def",
"csrf_failure",
"(",
"request",
",",
"reason",
"=",
"\"\"",
",",
"template_name",
"=",
"CSRF_FAILURE_TEMPLATE_NAME",
")",
":",
"from",
"django",
".",
"middleware",
".",
"csrf",
"import",
"REASON_NO_CSRF_COOKIE",
",",
"REASON_NO_REFERER",
"c",
"=",
"{",
"'title'",
":",
"_",
"(",
"\"Forbidden\"",
")",
",",
"'main'",
":",
"_",
"(",
"\"CSRF verification failed. Request aborted.\"",
")",
",",
"'reason'",
":",
"reason",
",",
"'no_referer'",
":",
"reason",
"==",
"REASON_NO_REFERER",
",",
"'no_referer1'",
":",
"_",
"(",
"'You are seeing this message because this HTTPS site requires a '",
"'“Referer header” to be sent by your Web browser, but none was '",
"'sent. This header is required for security reasons, to ensure '",
"'that your browser is not being hijacked by third parties.'",
")",
",",
"'no_referer2'",
":",
"_",
"(",
"'If you have configured your browser to disable “Referer” headers, '",
"'please re-enable them, at least for this site, or for HTTPS '",
"'connections, or for “same-origin” requests.'),",
"",
"",
"'no_referer3'",
":",
"_",
"(",
"'If you are using the <meta name=\"referrer\" '",
"'content=\\\"no-referrer\\\"> tag or including the “Referrer-Policy: '",
"'no-referrer” header, please remove them. The CSRF protection '",
"'requires the “Referer” header to do strict referer checking. If '",
"'you’re concerned about privacy, use alternatives like '",
"'<a rel=\\\"noreferrer\\\" …> for links to third-party sites.'),",
"",
"",
"'no_cookie'",
":",
"reason",
"==",
"REASON_NO_CSRF_COOKIE",
",",
"'no_cookie1'",
":",
"_",
"(",
"\"You are seeing this message because this site requires a CSRF \"",
"\"cookie when submitting forms. This cookie is required for \"",
"\"security reasons, to ensure that your browser is not being \"",
"\"hijacked by third parties.\"",
")",
",",
"'no_cookie2'",
":",
"_",
"(",
"'If you have configured your browser to disable cookies, please '",
"'re-enable them, at least for this site, or for “same-origin” '",
"'requests.'",
")",
",",
"'DEBUG'",
":",
"settings",
".",
"DEBUG",
",",
"'docs_version'",
":",
"get_docs_version",
"(",
")",
",",
"'more'",
":",
"_",
"(",
"\"More information is available with DEBUG=True.\"",
")",
",",
"}",
"try",
":",
"t",
"=",
"loader",
".",
"get_template",
"(",
"template_name",
")",
"except",
"TemplateDoesNotExist",
":",
"if",
"template_name",
"==",
"CSRF_FAILURE_TEMPLATE_NAME",
":",
"# If the default template doesn't exist, use the string template.",
"t",
"=",
"Engine",
"(",
")",
".",
"from_string",
"(",
"CSRF_FAILURE_TEMPLATE",
")",
"c",
"=",
"Context",
"(",
"c",
")",
"else",
":",
"# Raise if a developer-specified template doesn't exist.",
"raise",
"return",
"HttpResponseForbidden",
"(",
"t",
".",
"render",
"(",
"c",
")",
",",
"content_type",
"=",
"'text/html'",
")"
] | [
103,
0
] | [
153,
71
] | python | en | ['en', 'error', 'th'] | False |
Photo.validate_image_size | (f: File) | validate uploaded image size is within range | validate uploaded image size is within range | def validate_image_size(f: File): # noqa
""" validate uploaded image size is within range """
filesize = f.size
size_limit = int(settings.MAX_IMAGE_SIZE) * 1024 * 1024
if filesize > size_limit:
raise ValidationError(f"Max file size is {settings.MAX_IMAGE_SIZE}MB") | [
"def",
"validate_image_size",
"(",
"f",
":",
"File",
")",
":",
"# noqa",
"filesize",
"=",
"f",
".",
"size",
"size_limit",
"=",
"int",
"(",
"settings",
".",
"MAX_IMAGE_SIZE",
")",
"*",
"1024",
"*",
"1024",
"if",
"filesize",
">",
"size_limit",
":",
"raise",
"ValidationError",
"(",
"f\"Max file size is {settings.MAX_IMAGE_SIZE}MB\"",
")"
] | [
18,
4
] | [
23,
82
] | python | en | ['en', 'zu', 'en'] | True |
load | (fp: TextIO, *, parse_float: ParseFloat = float) | Parse TOML from a file object. | Parse TOML from a file object. | def load(fp: TextIO, *, parse_float: ParseFloat = float) -> Dict[str, Any]:
"""Parse TOML from a file object."""
s = fp.read()
return loads(s, parse_float=parse_float) | [
"def",
"load",
"(",
"fp",
":",
"TextIO",
",",
"*",
",",
"parse_float",
":",
"ParseFloat",
"=",
"float",
")",
"->",
"Dict",
"[",
"str",
",",
"Any",
"]",
":",
"s",
"=",
"fp",
".",
"read",
"(",
")",
"return",
"loads",
"(",
"s",
",",
"parse_float",
"=",
"parse_float",
")"
] | [
69,
0
] | [
72,
44
] | python | en | ['en', 'en', 'en'] | True |
loads | (s: str, *, parse_float: ParseFloat = float) | Parse TOML from a string. | Parse TOML from a string. | def loads(s: str, *, parse_float: ParseFloat = float) -> Dict[str, Any]: # noqa: C901
"""Parse TOML from a string."""
# The spec allows converting "\r\n" to "\n", even in string
# literals. Let's do so to simplify parsing.
src = s.replace("\r\n", "\n")
pos = 0
state = State()
# Parse one statement at a time
# (typically means one line in TOML source)
while True:
# 1. Skip line leading whitespace
pos = skip_chars(src, pos, TOML_WS)
# 2. Parse rules. Expect one of the following:
# - end of file
# - end of line
# - comment
# - key/value pair
# - append dict to list (and move to its namespace)
# - create dict (and move to its namespace)
# Skip trailing whitespace when applicable.
try:
char = src[pos]
except IndexError:
break
if char == "\n":
pos += 1
continue
if char in KEY_INITIAL_CHARS:
pos = key_value_rule(src, pos, state, parse_float)
pos = skip_chars(src, pos, TOML_WS)
elif char == "[":
try:
second_char: Optional[str] = src[pos + 1]
except IndexError:
second_char = None
if second_char == "[":
pos = create_list_rule(src, pos, state)
else:
pos = create_dict_rule(src, pos, state)
pos = skip_chars(src, pos, TOML_WS)
elif char != "#":
raise suffixed_err(src, pos, "Invalid statement")
# 3. Skip comment
pos = skip_comment(src, pos)
# 4. Expect end of line or end of file
try:
char = src[pos]
except IndexError:
break
if char != "\n":
raise suffixed_err(
src, pos, "Expected newline or end of document after a statement"
)
pos += 1
return state.out.dict | [
"def",
"loads",
"(",
"s",
":",
"str",
",",
"*",
",",
"parse_float",
":",
"ParseFloat",
"=",
"float",
")",
"->",
"Dict",
"[",
"str",
",",
"Any",
"]",
":",
"# noqa: C901",
"# The spec allows converting \"\\r\\n\" to \"\\n\", even in string",
"# literals. Let's do so to simplify parsing.",
"src",
"=",
"s",
".",
"replace",
"(",
"\"\\r\\n\"",
",",
"\"\\n\"",
")",
"pos",
"=",
"0",
"state",
"=",
"State",
"(",
")",
"# Parse one statement at a time",
"# (typically means one line in TOML source)",
"while",
"True",
":",
"# 1. Skip line leading whitespace",
"pos",
"=",
"skip_chars",
"(",
"src",
",",
"pos",
",",
"TOML_WS",
")",
"# 2. Parse rules. Expect one of the following:",
"# - end of file",
"# - end of line",
"# - comment",
"# - key/value pair",
"# - append dict to list (and move to its namespace)",
"# - create dict (and move to its namespace)",
"# Skip trailing whitespace when applicable.",
"try",
":",
"char",
"=",
"src",
"[",
"pos",
"]",
"except",
"IndexError",
":",
"break",
"if",
"char",
"==",
"\"\\n\"",
":",
"pos",
"+=",
"1",
"continue",
"if",
"char",
"in",
"KEY_INITIAL_CHARS",
":",
"pos",
"=",
"key_value_rule",
"(",
"src",
",",
"pos",
",",
"state",
",",
"parse_float",
")",
"pos",
"=",
"skip_chars",
"(",
"src",
",",
"pos",
",",
"TOML_WS",
")",
"elif",
"char",
"==",
"\"[\"",
":",
"try",
":",
"second_char",
":",
"Optional",
"[",
"str",
"]",
"=",
"src",
"[",
"pos",
"+",
"1",
"]",
"except",
"IndexError",
":",
"second_char",
"=",
"None",
"if",
"second_char",
"==",
"\"[\"",
":",
"pos",
"=",
"create_list_rule",
"(",
"src",
",",
"pos",
",",
"state",
")",
"else",
":",
"pos",
"=",
"create_dict_rule",
"(",
"src",
",",
"pos",
",",
"state",
")",
"pos",
"=",
"skip_chars",
"(",
"src",
",",
"pos",
",",
"TOML_WS",
")",
"elif",
"char",
"!=",
"\"#\"",
":",
"raise",
"suffixed_err",
"(",
"src",
",",
"pos",
",",
"\"Invalid statement\"",
")",
"# 3. Skip comment",
"pos",
"=",
"skip_comment",
"(",
"src",
",",
"pos",
")",
"# 4. Expect end of line or end of file",
"try",
":",
"char",
"=",
"src",
"[",
"pos",
"]",
"except",
"IndexError",
":",
"break",
"if",
"char",
"!=",
"\"\\n\"",
":",
"raise",
"suffixed_err",
"(",
"src",
",",
"pos",
",",
"\"Expected newline or end of document after a statement\"",
")",
"pos",
"+=",
"1",
"return",
"state",
".",
"out",
".",
"dict"
] | [
75,
0
] | [
135,
25
] | python | en | ['en', 'en', 'en'] | True |
suffixed_err | (src: str, pos: Pos, msg: str) | Return a `TOMLDecodeError` where error message is suffixed with
coordinates in source. | Return a `TOMLDecodeError` where error message is suffixed with
coordinates in source. | def suffixed_err(src: str, pos: Pos, msg: str) -> TOMLDecodeError:
"""Return a `TOMLDecodeError` where error message is suffixed with
coordinates in source."""
def coord_repr(src: str, pos: Pos) -> str:
if pos >= len(src):
return "end of document"
line = src.count("\n", 0, pos) + 1
if line == 1:
column = pos + 1
else:
column = pos - src.rindex("\n", 0, pos)
return f"line {line}, column {column}"
return TOMLDecodeError(f"{msg} (at {coord_repr(src, pos)})") | [
"def",
"suffixed_err",
"(",
"src",
":",
"str",
",",
"pos",
":",
"Pos",
",",
"msg",
":",
"str",
")",
"->",
"TOMLDecodeError",
":",
"def",
"coord_repr",
"(",
"src",
":",
"str",
",",
"pos",
":",
"Pos",
")",
"->",
"str",
":",
"if",
"pos",
">=",
"len",
"(",
"src",
")",
":",
"return",
"\"end of document\"",
"line",
"=",
"src",
".",
"count",
"(",
"\"\\n\"",
",",
"0",
",",
"pos",
")",
"+",
"1",
"if",
"line",
"==",
"1",
":",
"column",
"=",
"pos",
"+",
"1",
"else",
":",
"column",
"=",
"pos",
"-",
"src",
".",
"rindex",
"(",
"\"\\n\"",
",",
"0",
",",
"pos",
")",
"return",
"f\"line {line}, column {column}\"",
"return",
"TOMLDecodeError",
"(",
"f\"{msg} (at {coord_repr(src, pos)})\"",
")"
] | [
684,
0
] | [
698,
64
] | python | en | ['en', 'en', 'en'] | True |
ip_address_validators | (protocol, unpack_ipv4) |
Depending on the given parameters, return the appropriate validators for
the GenericIPAddressField.
|
Depending on the given parameters, return the appropriate validators for
the GenericIPAddressField.
| def ip_address_validators(protocol, unpack_ipv4):
"""
Depending on the given parameters, return the appropriate validators for
the GenericIPAddressField.
"""
if protocol != 'both' and unpack_ipv4:
raise ValueError(
"You can only use `unpack_ipv4` if `protocol` is set to 'both'")
try:
return ip_address_validator_map[protocol.lower()]
except KeyError:
raise ValueError("The protocol '%s' is unknown. Supported: %s"
% (protocol, list(ip_address_validator_map))) | [
"def",
"ip_address_validators",
"(",
"protocol",
",",
"unpack_ipv4",
")",
":",
"if",
"protocol",
"!=",
"'both'",
"and",
"unpack_ipv4",
":",
"raise",
"ValueError",
"(",
"\"You can only use `unpack_ipv4` if `protocol` is set to 'both'\"",
")",
"try",
":",
"return",
"ip_address_validator_map",
"[",
"protocol",
".",
"lower",
"(",
")",
"]",
"except",
"KeyError",
":",
"raise",
"ValueError",
"(",
"\"The protocol '%s' is unknown. Supported: %s\"",
"%",
"(",
"protocol",
",",
"list",
"(",
"ip_address_validator_map",
")",
")",
")"
] | [
315,
0
] | [
327,
70
] | python | en | ['en', 'error', 'th'] | False |
publish_messages | (project, topic_name) | Publishes multiple messages to a Pub/Sub topic. | Publishes multiple messages to a Pub/Sub topic. | def publish_messages(project, topic_name):
"""Publishes multiple messages to a Pub/Sub topic."""
# [START pubsub_quickstart_publisher]
# [START pubsub_publish]
publisher = pubsub_v1.PublisherClient()
topic_path = publisher.topic_path(project, topic_name)
f = open("sensorData.txt","r")
#for n in range(1, 10):
while True:
#data = u'Message number {}'.format(n)
data=f.readline()
if data == '':
break
else:
# Data must be a bytestring
data = data.encode('utf-8')
publisher.publish(topic_path, data=data)
print(data)
time.sleep(30)
print('Published messages.') | [
"def",
"publish_messages",
"(",
"project",
",",
"topic_name",
")",
":",
"# [START pubsub_quickstart_publisher]",
"# [START pubsub_publish]",
"publisher",
"=",
"pubsub_v1",
".",
"PublisherClient",
"(",
")",
"topic_path",
"=",
"publisher",
".",
"topic_path",
"(",
"project",
",",
"topic_name",
")",
"f",
"=",
"open",
"(",
"\"sensorData.txt\"",
",",
"\"r\"",
")",
"#for n in range(1, 10):",
"while",
"True",
":",
"#data = u'Message number {}'.format(n)",
"data",
"=",
"f",
".",
"readline",
"(",
")",
"if",
"data",
"==",
"''",
":",
"break",
"else",
":",
"# Data must be a bytestring",
"data",
"=",
"data",
".",
"encode",
"(",
"'utf-8'",
")",
"publisher",
".",
"publish",
"(",
"topic_path",
",",
"data",
"=",
"data",
")",
"print",
"(",
"data",
")",
"time",
".",
"sleep",
"(",
"30",
")",
"print",
"(",
"'Published messages.'",
")"
] | [
6,
0
] | [
26,
32
] | python | en | ['en', 'en', 'en'] | True |
_is_relevant_relation | (relation, altered_field) |
When altering the given field, must constraints on its model from the given
relation be temporarily dropped?
|
When altering the given field, must constraints on its model from the given
relation be temporarily dropped?
| def _is_relevant_relation(relation, altered_field):
"""
When altering the given field, must constraints on its model from the given
relation be temporarily dropped?
"""
field = relation.field
if field.many_to_many:
# M2M reverse field
return False
if altered_field.primary_key and field.to_fields == [None]:
# Foreign key constraint on the primary key, which is being altered.
return True
# Is the constraint targeting the field being altered?
return altered_field.name in field.to_fields | [
"def",
"_is_relevant_relation",
"(",
"relation",
",",
"altered_field",
")",
":",
"field",
"=",
"relation",
".",
"field",
"if",
"field",
".",
"many_to_many",
":",
"# M2M reverse field",
"return",
"False",
"if",
"altered_field",
".",
"primary_key",
"and",
"field",
".",
"to_fields",
"==",
"[",
"None",
"]",
":",
"# Foreign key constraint on the primary key, which is being altered.",
"return",
"True",
"# Is the constraint targeting the field being altered?",
"return",
"altered_field",
".",
"name",
"in",
"field",
".",
"to_fields"
] | [
15,
0
] | [
28,
48
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseSchemaEditor.execute | (self, sql, params=()) | Execute the given SQL statement, with optional parameters. | Execute the given SQL statement, with optional parameters. | def execute(self, sql, params=()):
"""Execute the given SQL statement, with optional parameters."""
# Don't perform the transactional DDL check if SQL is being collected
# as it's not going to be executed anyway.
if not self.collect_sql and self.connection.in_atomic_block and not self.connection.features.can_rollback_ddl:
raise TransactionManagementError(
"Executing DDL statements while in a transaction on databases "
"that can't perform a rollback is prohibited."
)
# Account for non-string statement objects.
sql = str(sql)
# Log the command we're running, then run it
logger.debug("%s; (params %r)", sql, params, extra={'params': params, 'sql': sql})
if self.collect_sql:
ending = "" if sql.endswith(";") else ";"
if params is not None:
self.collected_sql.append((sql % tuple(map(self.quote_value, params))) + ending)
else:
self.collected_sql.append(sql + ending)
else:
with self.connection.cursor() as cursor:
cursor.execute(sql, params) | [
"def",
"execute",
"(",
"self",
",",
"sql",
",",
"params",
"=",
"(",
")",
")",
":",
"# Don't perform the transactional DDL check if SQL is being collected",
"# as it's not going to be executed anyway.",
"if",
"not",
"self",
".",
"collect_sql",
"and",
"self",
".",
"connection",
".",
"in_atomic_block",
"and",
"not",
"self",
".",
"connection",
".",
"features",
".",
"can_rollback_ddl",
":",
"raise",
"TransactionManagementError",
"(",
"\"Executing DDL statements while in a transaction on databases \"",
"\"that can't perform a rollback is prohibited.\"",
")",
"# Account for non-string statement objects.",
"sql",
"=",
"str",
"(",
"sql",
")",
"# Log the command we're running, then run it",
"logger",
".",
"debug",
"(",
"\"%s; (params %r)\"",
",",
"sql",
",",
"params",
",",
"extra",
"=",
"{",
"'params'",
":",
"params",
",",
"'sql'",
":",
"sql",
"}",
")",
"if",
"self",
".",
"collect_sql",
":",
"ending",
"=",
"\"\"",
"if",
"sql",
".",
"endswith",
"(",
"\";\"",
")",
"else",
"\";\"",
"if",
"params",
"is",
"not",
"None",
":",
"self",
".",
"collected_sql",
".",
"append",
"(",
"(",
"sql",
"%",
"tuple",
"(",
"map",
"(",
"self",
".",
"quote_value",
",",
"params",
")",
")",
")",
"+",
"ending",
")",
"else",
":",
"self",
".",
"collected_sql",
".",
"append",
"(",
"sql",
"+",
"ending",
")",
"else",
":",
"with",
"self",
".",
"connection",
".",
"cursor",
"(",
")",
"as",
"cursor",
":",
"cursor",
".",
"execute",
"(",
"sql",
",",
"params",
")"
] | [
123,
4
] | [
144,
43
] | python | en | ['en', 'en', 'en'] | True |
BaseDatabaseSchemaEditor.table_sql | (self, model) | Take a model and return its table definition. | Take a model and return its table definition. | def table_sql(self, model):
"""Take a model and return its table definition."""
# Add any unique_togethers (always deferred, as some fields might be
# created afterwards, like geometry fields with some backends).
for fields in model._meta.unique_together:
columns = [model._meta.get_field(field).column for field in fields]
self.deferred_sql.append(self._create_unique_sql(model, columns))
# Create column SQL, add FK deferreds if needed.
column_sqls = []
params = []
for field in model._meta.local_fields:
# SQL.
definition, extra_params = self.column_sql(model, field)
if definition is None:
continue
# Check constraints can go on the column SQL here.
db_params = field.db_parameters(connection=self.connection)
if db_params['check']:
definition += ' ' + self.sql_check_constraint % db_params
# Autoincrement SQL (for backends with inline variant).
col_type_suffix = field.db_type_suffix(connection=self.connection)
if col_type_suffix:
definition += ' %s' % col_type_suffix
params.extend(extra_params)
# FK.
if field.remote_field and field.db_constraint:
to_table = field.remote_field.model._meta.db_table
to_column = field.remote_field.model._meta.get_field(field.remote_field.field_name).column
if self.sql_create_inline_fk:
definition += ' ' + self.sql_create_inline_fk % {
'to_table': self.quote_name(to_table),
'to_column': self.quote_name(to_column),
}
elif self.connection.features.supports_foreign_keys:
self.deferred_sql.append(self._create_fk_sql(model, field, '_fk_%(to_table)s_%(to_column)s'))
# Add the SQL to our big list.
column_sqls.append('%s %s' % (
self.quote_name(field.column),
definition,
))
# Autoincrement SQL (for backends with post table definition
# variant).
if field.get_internal_type() in ('AutoField', 'BigAutoField', 'SmallAutoField'):
autoinc_sql = self.connection.ops.autoinc_sql(model._meta.db_table, field.column)
if autoinc_sql:
self.deferred_sql.extend(autoinc_sql)
constraints = [constraint.constraint_sql(model, self) for constraint in model._meta.constraints]
sql = self.sql_create_table % {
'table': self.quote_name(model._meta.db_table),
'definition': ', '.join(constraint for constraint in (*column_sqls, *constraints) if constraint),
}
if model._meta.db_tablespace:
tablespace_sql = self.connection.ops.tablespace_sql(model._meta.db_tablespace)
if tablespace_sql:
sql += ' ' + tablespace_sql
return sql, params | [
"def",
"table_sql",
"(",
"self",
",",
"model",
")",
":",
"# Add any unique_togethers (always deferred, as some fields might be",
"# created afterwards, like geometry fields with some backends).",
"for",
"fields",
"in",
"model",
".",
"_meta",
".",
"unique_together",
":",
"columns",
"=",
"[",
"model",
".",
"_meta",
".",
"get_field",
"(",
"field",
")",
".",
"column",
"for",
"field",
"in",
"fields",
"]",
"self",
".",
"deferred_sql",
".",
"append",
"(",
"self",
".",
"_create_unique_sql",
"(",
"model",
",",
"columns",
")",
")",
"# Create column SQL, add FK deferreds if needed.",
"column_sqls",
"=",
"[",
"]",
"params",
"=",
"[",
"]",
"for",
"field",
"in",
"model",
".",
"_meta",
".",
"local_fields",
":",
"# SQL.",
"definition",
",",
"extra_params",
"=",
"self",
".",
"column_sql",
"(",
"model",
",",
"field",
")",
"if",
"definition",
"is",
"None",
":",
"continue",
"# Check constraints can go on the column SQL here.",
"db_params",
"=",
"field",
".",
"db_parameters",
"(",
"connection",
"=",
"self",
".",
"connection",
")",
"if",
"db_params",
"[",
"'check'",
"]",
":",
"definition",
"+=",
"' '",
"+",
"self",
".",
"sql_check_constraint",
"%",
"db_params",
"# Autoincrement SQL (for backends with inline variant).",
"col_type_suffix",
"=",
"field",
".",
"db_type_suffix",
"(",
"connection",
"=",
"self",
".",
"connection",
")",
"if",
"col_type_suffix",
":",
"definition",
"+=",
"' %s'",
"%",
"col_type_suffix",
"params",
".",
"extend",
"(",
"extra_params",
")",
"# FK.",
"if",
"field",
".",
"remote_field",
"and",
"field",
".",
"db_constraint",
":",
"to_table",
"=",
"field",
".",
"remote_field",
".",
"model",
".",
"_meta",
".",
"db_table",
"to_column",
"=",
"field",
".",
"remote_field",
".",
"model",
".",
"_meta",
".",
"get_field",
"(",
"field",
".",
"remote_field",
".",
"field_name",
")",
".",
"column",
"if",
"self",
".",
"sql_create_inline_fk",
":",
"definition",
"+=",
"' '",
"+",
"self",
".",
"sql_create_inline_fk",
"%",
"{",
"'to_table'",
":",
"self",
".",
"quote_name",
"(",
"to_table",
")",
",",
"'to_column'",
":",
"self",
".",
"quote_name",
"(",
"to_column",
")",
",",
"}",
"elif",
"self",
".",
"connection",
".",
"features",
".",
"supports_foreign_keys",
":",
"self",
".",
"deferred_sql",
".",
"append",
"(",
"self",
".",
"_create_fk_sql",
"(",
"model",
",",
"field",
",",
"'_fk_%(to_table)s_%(to_column)s'",
")",
")",
"# Add the SQL to our big list.",
"column_sqls",
".",
"append",
"(",
"'%s %s'",
"%",
"(",
"self",
".",
"quote_name",
"(",
"field",
".",
"column",
")",
",",
"definition",
",",
")",
")",
"# Autoincrement SQL (for backends with post table definition",
"# variant).",
"if",
"field",
".",
"get_internal_type",
"(",
")",
"in",
"(",
"'AutoField'",
",",
"'BigAutoField'",
",",
"'SmallAutoField'",
")",
":",
"autoinc_sql",
"=",
"self",
".",
"connection",
".",
"ops",
".",
"autoinc_sql",
"(",
"model",
".",
"_meta",
".",
"db_table",
",",
"field",
".",
"column",
")",
"if",
"autoinc_sql",
":",
"self",
".",
"deferred_sql",
".",
"extend",
"(",
"autoinc_sql",
")",
"constraints",
"=",
"[",
"constraint",
".",
"constraint_sql",
"(",
"model",
",",
"self",
")",
"for",
"constraint",
"in",
"model",
".",
"_meta",
".",
"constraints",
"]",
"sql",
"=",
"self",
".",
"sql_create_table",
"%",
"{",
"'table'",
":",
"self",
".",
"quote_name",
"(",
"model",
".",
"_meta",
".",
"db_table",
")",
",",
"'definition'",
":",
"', '",
".",
"join",
"(",
"constraint",
"for",
"constraint",
"in",
"(",
"*",
"column_sqls",
",",
"*",
"constraints",
")",
"if",
"constraint",
")",
",",
"}",
"if",
"model",
".",
"_meta",
".",
"db_tablespace",
":",
"tablespace_sql",
"=",
"self",
".",
"connection",
".",
"ops",
".",
"tablespace_sql",
"(",
"model",
".",
"_meta",
".",
"db_tablespace",
")",
"if",
"tablespace_sql",
":",
"sql",
"+=",
"' '",
"+",
"tablespace_sql",
"return",
"sql",
",",
"params"
] | [
149,
4
] | [
204,
26
] | python | en | ['en', 'en', 'en'] | True |
BaseDatabaseSchemaEditor.column_sql | (self, model, field, include_default=False) |
Take a field and return its column definition.
The field must already have had set_attributes_from_name() called.
|
Take a field and return its column definition.
The field must already have had set_attributes_from_name() called.
| def column_sql(self, model, field, include_default=False):
"""
Take a field and return its column definition.
The field must already have had set_attributes_from_name() called.
"""
# Get the column's type and use that as the basis of the SQL
db_params = field.db_parameters(connection=self.connection)
sql = db_params['type']
params = []
# Check for fields that aren't actually columns (e.g. M2M)
if sql is None:
return None, None
# Collation.
collation = getattr(field, 'db_collation', None)
if collation:
sql += self._collate_sql(collation)
# Work out nullability
null = field.null
# If we were told to include a default value, do so
include_default = (
include_default and
not self.skip_default(field) and
# Don't include a default value if it's a nullable field and the
# default cannot be dropped in the ALTER COLUMN statement (e.g.
# MySQL longtext and longblob).
not (null and self.skip_default_on_alter(field))
)
if include_default:
default_value = self.effective_default(field)
column_default = ' DEFAULT ' + self._column_default_sql(field)
if default_value is not None:
if self.connection.features.requires_literal_defaults:
# Some databases can't take defaults as a parameter (oracle)
# If this is the case, the individual schema backend should
# implement prepare_default
sql += column_default % self.prepare_default(default_value)
else:
sql += column_default
params += [default_value]
# Oracle treats the empty string ('') as null, so coerce the null
# option whenever '' is a possible value.
if (field.empty_strings_allowed and not field.primary_key and
self.connection.features.interprets_empty_strings_as_nulls):
null = True
if null and not self.connection.features.implied_column_null:
sql += " NULL"
elif not null:
sql += " NOT NULL"
# Primary key/unique outputs
if field.primary_key:
sql += " PRIMARY KEY"
elif field.unique:
sql += " UNIQUE"
# Optionally add the tablespace if it's an implicitly indexed column
tablespace = field.db_tablespace or model._meta.db_tablespace
if tablespace and self.connection.features.supports_tablespaces and field.unique:
sql += " %s" % self.connection.ops.tablespace_sql(tablespace, inline=True)
# Return the sql
return sql, params | [
"def",
"column_sql",
"(",
"self",
",",
"model",
",",
"field",
",",
"include_default",
"=",
"False",
")",
":",
"# Get the column's type and use that as the basis of the SQL",
"db_params",
"=",
"field",
".",
"db_parameters",
"(",
"connection",
"=",
"self",
".",
"connection",
")",
"sql",
"=",
"db_params",
"[",
"'type'",
"]",
"params",
"=",
"[",
"]",
"# Check for fields that aren't actually columns (e.g. M2M)",
"if",
"sql",
"is",
"None",
":",
"return",
"None",
",",
"None",
"# Collation.",
"collation",
"=",
"getattr",
"(",
"field",
",",
"'db_collation'",
",",
"None",
")",
"if",
"collation",
":",
"sql",
"+=",
"self",
".",
"_collate_sql",
"(",
"collation",
")",
"# Work out nullability",
"null",
"=",
"field",
".",
"null",
"# If we were told to include a default value, do so",
"include_default",
"=",
"(",
"include_default",
"and",
"not",
"self",
".",
"skip_default",
"(",
"field",
")",
"and",
"# Don't include a default value if it's a nullable field and the",
"# default cannot be dropped in the ALTER COLUMN statement (e.g.",
"# MySQL longtext and longblob).",
"not",
"(",
"null",
"and",
"self",
".",
"skip_default_on_alter",
"(",
"field",
")",
")",
")",
"if",
"include_default",
":",
"default_value",
"=",
"self",
".",
"effective_default",
"(",
"field",
")",
"column_default",
"=",
"' DEFAULT '",
"+",
"self",
".",
"_column_default_sql",
"(",
"field",
")",
"if",
"default_value",
"is",
"not",
"None",
":",
"if",
"self",
".",
"connection",
".",
"features",
".",
"requires_literal_defaults",
":",
"# Some databases can't take defaults as a parameter (oracle)",
"# If this is the case, the individual schema backend should",
"# implement prepare_default",
"sql",
"+=",
"column_default",
"%",
"self",
".",
"prepare_default",
"(",
"default_value",
")",
"else",
":",
"sql",
"+=",
"column_default",
"params",
"+=",
"[",
"default_value",
"]",
"# Oracle treats the empty string ('') as null, so coerce the null",
"# option whenever '' is a possible value.",
"if",
"(",
"field",
".",
"empty_strings_allowed",
"and",
"not",
"field",
".",
"primary_key",
"and",
"self",
".",
"connection",
".",
"features",
".",
"interprets_empty_strings_as_nulls",
")",
":",
"null",
"=",
"True",
"if",
"null",
"and",
"not",
"self",
".",
"connection",
".",
"features",
".",
"implied_column_null",
":",
"sql",
"+=",
"\" NULL\"",
"elif",
"not",
"null",
":",
"sql",
"+=",
"\" NOT NULL\"",
"# Primary key/unique outputs",
"if",
"field",
".",
"primary_key",
":",
"sql",
"+=",
"\" PRIMARY KEY\"",
"elif",
"field",
".",
"unique",
":",
"sql",
"+=",
"\" UNIQUE\"",
"# Optionally add the tablespace if it's an implicitly indexed column",
"tablespace",
"=",
"field",
".",
"db_tablespace",
"or",
"model",
".",
"_meta",
".",
"db_tablespace",
"if",
"tablespace",
"and",
"self",
".",
"connection",
".",
"features",
".",
"supports_tablespaces",
"and",
"field",
".",
"unique",
":",
"sql",
"+=",
"\" %s\"",
"%",
"self",
".",
"connection",
".",
"ops",
".",
"tablespace_sql",
"(",
"tablespace",
",",
"inline",
"=",
"True",
")",
"# Return the sql",
"return",
"sql",
",",
"params"
] | [
208,
4
] | [
266,
26
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseSchemaEditor.skip_default | (self, field) |
Some backends don't accept default values for certain columns types
(i.e. MySQL longtext and longblob).
|
Some backends don't accept default values for certain columns types
(i.e. MySQL longtext and longblob).
| def skip_default(self, field):
"""
Some backends don't accept default values for certain columns types
(i.e. MySQL longtext and longblob).
"""
return False | [
"def",
"skip_default",
"(",
"self",
",",
"field",
")",
":",
"return",
"False"
] | [
268,
4
] | [
273,
20
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseSchemaEditor.skip_default_on_alter | (self, field) |
Some backends don't accept default values for certain columns types
(i.e. MySQL longtext and longblob) in the ALTER COLUMN statement.
|
Some backends don't accept default values for certain columns types
(i.e. MySQL longtext and longblob) in the ALTER COLUMN statement.
| def skip_default_on_alter(self, field):
"""
Some backends don't accept default values for certain columns types
(i.e. MySQL longtext and longblob) in the ALTER COLUMN statement.
"""
return False | [
"def",
"skip_default_on_alter",
"(",
"self",
",",
"field",
")",
":",
"return",
"False"
] | [
275,
4
] | [
280,
20
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseSchemaEditor.prepare_default | (self, value) |
Only used for backends which have requires_literal_defaults feature
|
Only used for backends which have requires_literal_defaults feature
| def prepare_default(self, value):
"""
Only used for backends which have requires_literal_defaults feature
"""
raise NotImplementedError(
'subclasses of BaseDatabaseSchemaEditor for backends which have '
'requires_literal_defaults must provide a prepare_default() method'
) | [
"def",
"prepare_default",
"(",
"self",
",",
"value",
")",
":",
"raise",
"NotImplementedError",
"(",
"'subclasses of BaseDatabaseSchemaEditor for backends which have '",
"'requires_literal_defaults must provide a prepare_default() method'",
")"
] | [
282,
4
] | [
289,
9
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseSchemaEditor._column_default_sql | (self, field) |
Return the SQL to use in a DEFAULT clause. The resulting string should
contain a '%s' placeholder for a default value.
|
Return the SQL to use in a DEFAULT clause. The resulting string should
contain a '%s' placeholder for a default value.
| def _column_default_sql(self, field):
"""
Return the SQL to use in a DEFAULT clause. The resulting string should
contain a '%s' placeholder for a default value.
"""
return '%s' | [
"def",
"_column_default_sql",
"(",
"self",
",",
"field",
")",
":",
"return",
"'%s'"
] | [
291,
4
] | [
296,
19
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseSchemaEditor.effective_default | (self, field) | Return a field's effective database default value. | Return a field's effective database default value. | def effective_default(self, field):
"""Return a field's effective database default value."""
return field.get_db_prep_save(self._effective_default(field), self.connection) | [
"def",
"effective_default",
"(",
"self",
",",
"field",
")",
":",
"return",
"field",
".",
"get_db_prep_save",
"(",
"self",
".",
"_effective_default",
"(",
"field",
")",
",",
"self",
".",
"connection",
")"
] | [
321,
4
] | [
323,
86
] | python | da | ['ro', 'da', 'en'] | False |
BaseDatabaseSchemaEditor.quote_value | (self, value) |
Return a quoted version of the value so it's safe to use in an SQL
string. This is not safe against injection from user code; it is
intended only for use in making SQL scripts or preparing default values
for particularly tricky backends (defaults are not user-defined, though,
so this is safe).
|
Return a quoted version of the value so it's safe to use in an SQL
string. This is not safe against injection from user code; it is
intended only for use in making SQL scripts or preparing default values
for particularly tricky backends (defaults are not user-defined, though,
so this is safe).
| def quote_value(self, value):
"""
Return a quoted version of the value so it's safe to use in an SQL
string. This is not safe against injection from user code; it is
intended only for use in making SQL scripts or preparing default values
for particularly tricky backends (defaults are not user-defined, though,
so this is safe).
"""
raise NotImplementedError() | [
"def",
"quote_value",
"(",
"self",
",",
"value",
")",
":",
"raise",
"NotImplementedError",
"(",
")"
] | [
325,
4
] | [
333,
35
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseSchemaEditor.create_model | (self, model) |
Create a table and any accompanying indexes or unique constraints for
the given `model`.
|
Create a table and any accompanying indexes or unique constraints for
the given `model`.
| def create_model(self, model):
"""
Create a table and any accompanying indexes or unique constraints for
the given `model`.
"""
sql, params = self.table_sql(model)
# Prevent using [] as params, in the case a literal '%' is used in the definition
self.execute(sql, params or None)
# Add any field index and index_together's (deferred as SQLite _remake_table needs it)
self.deferred_sql.extend(self._model_indexes_sql(model))
# Make M2M tables
for field in model._meta.local_many_to_many:
if field.remote_field.through._meta.auto_created:
self.create_model(field.remote_field.through) | [
"def",
"create_model",
"(",
"self",
",",
"model",
")",
":",
"sql",
",",
"params",
"=",
"self",
".",
"table_sql",
"(",
"model",
")",
"# Prevent using [] as params, in the case a literal '%' is used in the definition",
"self",
".",
"execute",
"(",
"sql",
",",
"params",
"or",
"None",
")",
"# Add any field index and index_together's (deferred as SQLite _remake_table needs it)",
"self",
".",
"deferred_sql",
".",
"extend",
"(",
"self",
".",
"_model_indexes_sql",
"(",
"model",
")",
")",
"# Make M2M tables",
"for",
"field",
"in",
"model",
".",
"_meta",
".",
"local_many_to_many",
":",
"if",
"field",
".",
"remote_field",
".",
"through",
".",
"_meta",
".",
"auto_created",
":",
"self",
".",
"create_model",
"(",
"field",
".",
"remote_field",
".",
"through",
")"
] | [
337,
4
] | [
352,
61
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseSchemaEditor.delete_model | (self, model) | Delete a model from the database. | Delete a model from the database. | def delete_model(self, model):
"""Delete a model from the database."""
# Handle auto-created intermediary models
for field in model._meta.local_many_to_many:
if field.remote_field.through._meta.auto_created:
self.delete_model(field.remote_field.through)
# Delete the table
self.execute(self.sql_delete_table % {
"table": self.quote_name(model._meta.db_table),
})
# Remove all deferred statements referencing the deleted table.
for sql in list(self.deferred_sql):
if isinstance(sql, Statement) and sql.references_table(model._meta.db_table):
self.deferred_sql.remove(sql) | [
"def",
"delete_model",
"(",
"self",
",",
"model",
")",
":",
"# Handle auto-created intermediary models",
"for",
"field",
"in",
"model",
".",
"_meta",
".",
"local_many_to_many",
":",
"if",
"field",
".",
"remote_field",
".",
"through",
".",
"_meta",
".",
"auto_created",
":",
"self",
".",
"delete_model",
"(",
"field",
".",
"remote_field",
".",
"through",
")",
"# Delete the table",
"self",
".",
"execute",
"(",
"self",
".",
"sql_delete_table",
"%",
"{",
"\"table\"",
":",
"self",
".",
"quote_name",
"(",
"model",
".",
"_meta",
".",
"db_table",
")",
",",
"}",
")",
"# Remove all deferred statements referencing the deleted table.",
"for",
"sql",
"in",
"list",
"(",
"self",
".",
"deferred_sql",
")",
":",
"if",
"isinstance",
"(",
"sql",
",",
"Statement",
")",
"and",
"sql",
".",
"references_table",
"(",
"model",
".",
"_meta",
".",
"db_table",
")",
":",
"self",
".",
"deferred_sql",
".",
"remove",
"(",
"sql",
")"
] | [
354,
4
] | [
368,
45
] | python | en | ['en', 'en', 'en'] | True |
BaseDatabaseSchemaEditor.add_index | (self, model, index) | Add an index on a model. | Add an index on a model. | def add_index(self, model, index):
"""Add an index on a model."""
if (
index.contains_expressions and
not self.connection.features.supports_expression_indexes
):
return None
# Index.create_sql returns interpolated SQL which makes params=None a
# necessity to avoid escaping attempts on execution.
self.execute(index.create_sql(model, self), params=None) | [
"def",
"add_index",
"(",
"self",
",",
"model",
",",
"index",
")",
":",
"if",
"(",
"index",
".",
"contains_expressions",
"and",
"not",
"self",
".",
"connection",
".",
"features",
".",
"supports_expression_indexes",
")",
":",
"return",
"None",
"# Index.create_sql returns interpolated SQL which makes params=None a",
"# necessity to avoid escaping attempts on execution.",
"self",
".",
"execute",
"(",
"index",
".",
"create_sql",
"(",
"model",
",",
"self",
")",
",",
"params",
"=",
"None",
")"
] | [
370,
4
] | [
379,
64
] | python | en | ['en', 'en', 'en'] | True |
BaseDatabaseSchemaEditor.remove_index | (self, model, index) | Remove an index from a model. | Remove an index from a model. | def remove_index(self, model, index):
"""Remove an index from a model."""
if (
index.contains_expressions and
not self.connection.features.supports_expression_indexes
):
return None
self.execute(index.remove_sql(model, self)) | [
"def",
"remove_index",
"(",
"self",
",",
"model",
",",
"index",
")",
":",
"if",
"(",
"index",
".",
"contains_expressions",
"and",
"not",
"self",
".",
"connection",
".",
"features",
".",
"supports_expression_indexes",
")",
":",
"return",
"None",
"self",
".",
"execute",
"(",
"index",
".",
"remove_sql",
"(",
"model",
",",
"self",
")",
")"
] | [
381,
4
] | [
388,
51
] | python | en | ['en', 'en', 'en'] | True |
BaseDatabaseSchemaEditor.add_constraint | (self, model, constraint) | Add a constraint to a model. | Add a constraint to a model. | def add_constraint(self, model, constraint):
"""Add a constraint to a model."""
sql = constraint.create_sql(model, self)
if sql:
# Constraint.create_sql returns interpolated SQL which makes
# params=None a necessity to avoid escaping attempts on execution.
self.execute(sql, params=None) | [
"def",
"add_constraint",
"(",
"self",
",",
"model",
",",
"constraint",
")",
":",
"sql",
"=",
"constraint",
".",
"create_sql",
"(",
"model",
",",
"self",
")",
"if",
"sql",
":",
"# Constraint.create_sql returns interpolated SQL which makes",
"# params=None a necessity to avoid escaping attempts on execution.",
"self",
".",
"execute",
"(",
"sql",
",",
"params",
"=",
"None",
")"
] | [
390,
4
] | [
396,
42
] | python | en | ['en', 'en', 'en'] | True |
BaseDatabaseSchemaEditor.remove_constraint | (self, model, constraint) | Remove a constraint from a model. | Remove a constraint from a model. | def remove_constraint(self, model, constraint):
"""Remove a constraint from a model."""
sql = constraint.remove_sql(model, self)
if sql:
self.execute(sql) | [
"def",
"remove_constraint",
"(",
"self",
",",
"model",
",",
"constraint",
")",
":",
"sql",
"=",
"constraint",
".",
"remove_sql",
"(",
"model",
",",
"self",
")",
"if",
"sql",
":",
"self",
".",
"execute",
"(",
"sql",
")"
] | [
398,
4
] | [
402,
29
] | python | en | ['en', 'en', 'en'] | True |
BaseDatabaseSchemaEditor.alter_unique_together | (self, model, old_unique_together, new_unique_together) |
Deal with a model changing its unique_together. The input
unique_togethers must be doubly-nested, not the single-nested
["foo", "bar"] format.
|
Deal with a model changing its unique_together. The input
unique_togethers must be doubly-nested, not the single-nested
["foo", "bar"] format.
| def alter_unique_together(self, model, old_unique_together, new_unique_together):
"""
Deal with a model changing its unique_together. The input
unique_togethers must be doubly-nested, not the single-nested
["foo", "bar"] format.
"""
olds = {tuple(fields) for fields in old_unique_together}
news = {tuple(fields) for fields in new_unique_together}
# Deleted uniques
for fields in olds.difference(news):
self._delete_composed_index(model, fields, {'unique': True}, self.sql_delete_unique)
# Created uniques
for fields in news.difference(olds):
columns = [model._meta.get_field(field).column for field in fields]
self.execute(self._create_unique_sql(model, columns)) | [
"def",
"alter_unique_together",
"(",
"self",
",",
"model",
",",
"old_unique_together",
",",
"new_unique_together",
")",
":",
"olds",
"=",
"{",
"tuple",
"(",
"fields",
")",
"for",
"fields",
"in",
"old_unique_together",
"}",
"news",
"=",
"{",
"tuple",
"(",
"fields",
")",
"for",
"fields",
"in",
"new_unique_together",
"}",
"# Deleted uniques",
"for",
"fields",
"in",
"olds",
".",
"difference",
"(",
"news",
")",
":",
"self",
".",
"_delete_composed_index",
"(",
"model",
",",
"fields",
",",
"{",
"'unique'",
":",
"True",
"}",
",",
"self",
".",
"sql_delete_unique",
")",
"# Created uniques",
"for",
"fields",
"in",
"news",
".",
"difference",
"(",
"olds",
")",
":",
"columns",
"=",
"[",
"model",
".",
"_meta",
".",
"get_field",
"(",
"field",
")",
".",
"column",
"for",
"field",
"in",
"fields",
"]",
"self",
".",
"execute",
"(",
"self",
".",
"_create_unique_sql",
"(",
"model",
",",
"columns",
")",
")"
] | [
404,
4
] | [
418,
65
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseSchemaEditor.alter_index_together | (self, model, old_index_together, new_index_together) |
Deal with a model changing its index_together. The input
index_togethers must be doubly-nested, not the single-nested
["foo", "bar"] format.
|
Deal with a model changing its index_together. The input
index_togethers must be doubly-nested, not the single-nested
["foo", "bar"] format.
| def alter_index_together(self, model, old_index_together, new_index_together):
"""
Deal with a model changing its index_together. The input
index_togethers must be doubly-nested, not the single-nested
["foo", "bar"] format.
"""
olds = {tuple(fields) for fields in old_index_together}
news = {tuple(fields) for fields in new_index_together}
# Deleted indexes
for fields in olds.difference(news):
self._delete_composed_index(
model,
fields,
{'index': True, 'unique': False},
self.sql_delete_index,
)
# Created indexes
for field_names in news.difference(olds):
fields = [model._meta.get_field(field) for field in field_names]
self.execute(self._create_index_sql(model, fields=fields, suffix='_idx')) | [
"def",
"alter_index_together",
"(",
"self",
",",
"model",
",",
"old_index_together",
",",
"new_index_together",
")",
":",
"olds",
"=",
"{",
"tuple",
"(",
"fields",
")",
"for",
"fields",
"in",
"old_index_together",
"}",
"news",
"=",
"{",
"tuple",
"(",
"fields",
")",
"for",
"fields",
"in",
"new_index_together",
"}",
"# Deleted indexes",
"for",
"fields",
"in",
"olds",
".",
"difference",
"(",
"news",
")",
":",
"self",
".",
"_delete_composed_index",
"(",
"model",
",",
"fields",
",",
"{",
"'index'",
":",
"True",
",",
"'unique'",
":",
"False",
"}",
",",
"self",
".",
"sql_delete_index",
",",
")",
"# Created indexes",
"for",
"field_names",
"in",
"news",
".",
"difference",
"(",
"olds",
")",
":",
"fields",
"=",
"[",
"model",
".",
"_meta",
".",
"get_field",
"(",
"field",
")",
"for",
"field",
"in",
"field_names",
"]",
"self",
".",
"execute",
"(",
"self",
".",
"_create_index_sql",
"(",
"model",
",",
"fields",
"=",
"fields",
",",
"suffix",
"=",
"'_idx'",
")",
")"
] | [
420,
4
] | [
439,
85
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseSchemaEditor.alter_db_table | (self, model, old_db_table, new_db_table) | Rename the table a model points to. | Rename the table a model points to. | def alter_db_table(self, model, old_db_table, new_db_table):
"""Rename the table a model points to."""
if (old_db_table == new_db_table or
(self.connection.features.ignores_table_name_case and
old_db_table.lower() == new_db_table.lower())):
return
self.execute(self.sql_rename_table % {
"old_table": self.quote_name(old_db_table),
"new_table": self.quote_name(new_db_table),
})
# Rename all references to the old table name.
for sql in self.deferred_sql:
if isinstance(sql, Statement):
sql.rename_table_references(old_db_table, new_db_table) | [
"def",
"alter_db_table",
"(",
"self",
",",
"model",
",",
"old_db_table",
",",
"new_db_table",
")",
":",
"if",
"(",
"old_db_table",
"==",
"new_db_table",
"or",
"(",
"self",
".",
"connection",
".",
"features",
".",
"ignores_table_name_case",
"and",
"old_db_table",
".",
"lower",
"(",
")",
"==",
"new_db_table",
".",
"lower",
"(",
")",
")",
")",
":",
"return",
"self",
".",
"execute",
"(",
"self",
".",
"sql_rename_table",
"%",
"{",
"\"old_table\"",
":",
"self",
".",
"quote_name",
"(",
"old_db_table",
")",
",",
"\"new_table\"",
":",
"self",
".",
"quote_name",
"(",
"new_db_table",
")",
",",
"}",
")",
"# Rename all references to the old table name.",
"for",
"sql",
"in",
"self",
".",
"deferred_sql",
":",
"if",
"isinstance",
"(",
"sql",
",",
"Statement",
")",
":",
"sql",
".",
"rename_table_references",
"(",
"old_db_table",
",",
"new_db_table",
")"
] | [
457,
4
] | [
470,
71
] | python | en | ['en', 'en', 'en'] | True |
BaseDatabaseSchemaEditor.alter_db_tablespace | (self, model, old_db_tablespace, new_db_tablespace) | Move a model's table between tablespaces. | Move a model's table between tablespaces. | def alter_db_tablespace(self, model, old_db_tablespace, new_db_tablespace):
"""Move a model's table between tablespaces."""
self.execute(self.sql_retablespace_table % {
"table": self.quote_name(model._meta.db_table),
"old_tablespace": self.quote_name(old_db_tablespace),
"new_tablespace": self.quote_name(new_db_tablespace),
}) | [
"def",
"alter_db_tablespace",
"(",
"self",
",",
"model",
",",
"old_db_tablespace",
",",
"new_db_tablespace",
")",
":",
"self",
".",
"execute",
"(",
"self",
".",
"sql_retablespace_table",
"%",
"{",
"\"table\"",
":",
"self",
".",
"quote_name",
"(",
"model",
".",
"_meta",
".",
"db_table",
")",
",",
"\"old_tablespace\"",
":",
"self",
".",
"quote_name",
"(",
"old_db_tablespace",
")",
",",
"\"new_tablespace\"",
":",
"self",
".",
"quote_name",
"(",
"new_db_tablespace",
")",
",",
"}",
")"
] | [
472,
4
] | [
478,
10
] | python | en | ['en', 'en', 'en'] | True |
BaseDatabaseSchemaEditor.add_field | (self, model, field) |
Create a field on a model. Usually involves adding a column, but may
involve adding a table instead (for M2M fields).
|
Create a field on a model. Usually involves adding a column, but may
involve adding a table instead (for M2M fields).
| def add_field(self, model, field):
"""
Create a field on a model. Usually involves adding a column, but may
involve adding a table instead (for M2M fields).
"""
# Special-case implicit M2M tables
if field.many_to_many and field.remote_field.through._meta.auto_created:
return self.create_model(field.remote_field.through)
# Get the column's definition
definition, params = self.column_sql(model, field, include_default=True)
# It might not actually have a column behind it
if definition is None:
return
# Check constraints can go on the column SQL here
db_params = field.db_parameters(connection=self.connection)
if db_params['check']:
definition += " " + self.sql_check_constraint % db_params
if field.remote_field and self.connection.features.supports_foreign_keys and field.db_constraint:
constraint_suffix = '_fk_%(to_table)s_%(to_column)s'
# Add FK constraint inline, if supported.
if self.sql_create_column_inline_fk:
to_table = field.remote_field.model._meta.db_table
to_column = field.remote_field.model._meta.get_field(field.remote_field.field_name).column
namespace, _ = split_identifier(model._meta.db_table)
definition += " " + self.sql_create_column_inline_fk % {
'name': self._fk_constraint_name(model, field, constraint_suffix),
'namespace': '%s.' % self.quote_name(namespace) if namespace else '',
'column': self.quote_name(field.column),
'to_table': self.quote_name(to_table),
'to_column': self.quote_name(to_column),
'deferrable': self.connection.ops.deferrable_sql()
}
# Otherwise, add FK constraints later.
else:
self.deferred_sql.append(self._create_fk_sql(model, field, constraint_suffix))
# Build the SQL and run it
sql = self.sql_create_column % {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
"definition": definition,
}
self.execute(sql, params)
# Drop the default if we need to
# (Django usually does not use in-database defaults)
if not self.skip_default_on_alter(field) and self.effective_default(field) is not None:
changes_sql, params = self._alter_column_default_sql(model, None, field, drop=True)
sql = self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": changes_sql,
}
self.execute(sql, params)
# Add an index, if required
self.deferred_sql.extend(self._field_indexes_sql(model, field))
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close() | [
"def",
"add_field",
"(",
"self",
",",
"model",
",",
"field",
")",
":",
"# Special-case implicit M2M tables",
"if",
"field",
".",
"many_to_many",
"and",
"field",
".",
"remote_field",
".",
"through",
".",
"_meta",
".",
"auto_created",
":",
"return",
"self",
".",
"create_model",
"(",
"field",
".",
"remote_field",
".",
"through",
")",
"# Get the column's definition",
"definition",
",",
"params",
"=",
"self",
".",
"column_sql",
"(",
"model",
",",
"field",
",",
"include_default",
"=",
"True",
")",
"# It might not actually have a column behind it",
"if",
"definition",
"is",
"None",
":",
"return",
"# Check constraints can go on the column SQL here",
"db_params",
"=",
"field",
".",
"db_parameters",
"(",
"connection",
"=",
"self",
".",
"connection",
")",
"if",
"db_params",
"[",
"'check'",
"]",
":",
"definition",
"+=",
"\" \"",
"+",
"self",
".",
"sql_check_constraint",
"%",
"db_params",
"if",
"field",
".",
"remote_field",
"and",
"self",
".",
"connection",
".",
"features",
".",
"supports_foreign_keys",
"and",
"field",
".",
"db_constraint",
":",
"constraint_suffix",
"=",
"'_fk_%(to_table)s_%(to_column)s'",
"# Add FK constraint inline, if supported.",
"if",
"self",
".",
"sql_create_column_inline_fk",
":",
"to_table",
"=",
"field",
".",
"remote_field",
".",
"model",
".",
"_meta",
".",
"db_table",
"to_column",
"=",
"field",
".",
"remote_field",
".",
"model",
".",
"_meta",
".",
"get_field",
"(",
"field",
".",
"remote_field",
".",
"field_name",
")",
".",
"column",
"namespace",
",",
"_",
"=",
"split_identifier",
"(",
"model",
".",
"_meta",
".",
"db_table",
")",
"definition",
"+=",
"\" \"",
"+",
"self",
".",
"sql_create_column_inline_fk",
"%",
"{",
"'name'",
":",
"self",
".",
"_fk_constraint_name",
"(",
"model",
",",
"field",
",",
"constraint_suffix",
")",
",",
"'namespace'",
":",
"'%s.'",
"%",
"self",
".",
"quote_name",
"(",
"namespace",
")",
"if",
"namespace",
"else",
"''",
",",
"'column'",
":",
"self",
".",
"quote_name",
"(",
"field",
".",
"column",
")",
",",
"'to_table'",
":",
"self",
".",
"quote_name",
"(",
"to_table",
")",
",",
"'to_column'",
":",
"self",
".",
"quote_name",
"(",
"to_column",
")",
",",
"'deferrable'",
":",
"self",
".",
"connection",
".",
"ops",
".",
"deferrable_sql",
"(",
")",
"}",
"# Otherwise, add FK constraints later.",
"else",
":",
"self",
".",
"deferred_sql",
".",
"append",
"(",
"self",
".",
"_create_fk_sql",
"(",
"model",
",",
"field",
",",
"constraint_suffix",
")",
")",
"# Build the SQL and run it",
"sql",
"=",
"self",
".",
"sql_create_column",
"%",
"{",
"\"table\"",
":",
"self",
".",
"quote_name",
"(",
"model",
".",
"_meta",
".",
"db_table",
")",
",",
"\"column\"",
":",
"self",
".",
"quote_name",
"(",
"field",
".",
"column",
")",
",",
"\"definition\"",
":",
"definition",
",",
"}",
"self",
".",
"execute",
"(",
"sql",
",",
"params",
")",
"# Drop the default if we need to",
"# (Django usually does not use in-database defaults)",
"if",
"not",
"self",
".",
"skip_default_on_alter",
"(",
"field",
")",
"and",
"self",
".",
"effective_default",
"(",
"field",
")",
"is",
"not",
"None",
":",
"changes_sql",
",",
"params",
"=",
"self",
".",
"_alter_column_default_sql",
"(",
"model",
",",
"None",
",",
"field",
",",
"drop",
"=",
"True",
")",
"sql",
"=",
"self",
".",
"sql_alter_column",
"%",
"{",
"\"table\"",
":",
"self",
".",
"quote_name",
"(",
"model",
".",
"_meta",
".",
"db_table",
")",
",",
"\"changes\"",
":",
"changes_sql",
",",
"}",
"self",
".",
"execute",
"(",
"sql",
",",
"params",
")",
"# Add an index, if required",
"self",
".",
"deferred_sql",
".",
"extend",
"(",
"self",
".",
"_field_indexes_sql",
"(",
"model",
",",
"field",
")",
")",
"# Reset connection if required",
"if",
"self",
".",
"connection",
".",
"features",
".",
"connection_persists_old_columns",
":",
"self",
".",
"connection",
".",
"close",
"(",
")"
] | [
480,
4
] | [
535,
35
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseSchemaEditor.remove_field | (self, model, field) |
Remove a field from a model. Usually involves deleting a column,
but for M2Ms may involve deleting a table.
|
Remove a field from a model. Usually involves deleting a column,
but for M2Ms may involve deleting a table.
| def remove_field(self, model, field):
"""
Remove a field from a model. Usually involves deleting a column,
but for M2Ms may involve deleting a table.
"""
# Special-case implicit M2M tables
if field.many_to_many and field.remote_field.through._meta.auto_created:
return self.delete_model(field.remote_field.through)
# It might not actually have a column behind it
if field.db_parameters(connection=self.connection)['type'] is None:
return
# Drop any FK constraints, MySQL requires explicit deletion
if field.remote_field:
fk_names = self._constraint_names(model, [field.column], foreign_key=True)
for fk_name in fk_names:
self.execute(self._delete_fk_sql(model, fk_name))
# Delete the column
sql = self.sql_delete_column % {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
}
self.execute(sql)
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
# Remove all deferred statements referencing the deleted column.
for sql in list(self.deferred_sql):
if isinstance(sql, Statement) and sql.references_column(model._meta.db_table, field.column):
self.deferred_sql.remove(sql) | [
"def",
"remove_field",
"(",
"self",
",",
"model",
",",
"field",
")",
":",
"# Special-case implicit M2M tables",
"if",
"field",
".",
"many_to_many",
"and",
"field",
".",
"remote_field",
".",
"through",
".",
"_meta",
".",
"auto_created",
":",
"return",
"self",
".",
"delete_model",
"(",
"field",
".",
"remote_field",
".",
"through",
")",
"# It might not actually have a column behind it",
"if",
"field",
".",
"db_parameters",
"(",
"connection",
"=",
"self",
".",
"connection",
")",
"[",
"'type'",
"]",
"is",
"None",
":",
"return",
"# Drop any FK constraints, MySQL requires explicit deletion",
"if",
"field",
".",
"remote_field",
":",
"fk_names",
"=",
"self",
".",
"_constraint_names",
"(",
"model",
",",
"[",
"field",
".",
"column",
"]",
",",
"foreign_key",
"=",
"True",
")",
"for",
"fk_name",
"in",
"fk_names",
":",
"self",
".",
"execute",
"(",
"self",
".",
"_delete_fk_sql",
"(",
"model",
",",
"fk_name",
")",
")",
"# Delete the column",
"sql",
"=",
"self",
".",
"sql_delete_column",
"%",
"{",
"\"table\"",
":",
"self",
".",
"quote_name",
"(",
"model",
".",
"_meta",
".",
"db_table",
")",
",",
"\"column\"",
":",
"self",
".",
"quote_name",
"(",
"field",
".",
"column",
")",
",",
"}",
"self",
".",
"execute",
"(",
"sql",
")",
"# Reset connection if required",
"if",
"self",
".",
"connection",
".",
"features",
".",
"connection_persists_old_columns",
":",
"self",
".",
"connection",
".",
"close",
"(",
")",
"# Remove all deferred statements referencing the deleted column.",
"for",
"sql",
"in",
"list",
"(",
"self",
".",
"deferred_sql",
")",
":",
"if",
"isinstance",
"(",
"sql",
",",
"Statement",
")",
"and",
"sql",
".",
"references_column",
"(",
"model",
".",
"_meta",
".",
"db_table",
",",
"field",
".",
"column",
")",
":",
"self",
".",
"deferred_sql",
".",
"remove",
"(",
"sql",
")"
] | [
537,
4
] | [
565,
45
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseSchemaEditor.alter_field | (self, model, old_field, new_field, strict=False) |
Allow a field's type, uniqueness, nullability, default, column,
constraints, etc. to be modified.
`old_field` is required to compute the necessary changes.
If `strict` is True, raise errors if the old column does not match
`old_field` precisely.
|
Allow a field's type, uniqueness, nullability, default, column,
constraints, etc. to be modified.
`old_field` is required to compute the necessary changes.
If `strict` is True, raise errors if the old column does not match
`old_field` precisely.
| def alter_field(self, model, old_field, new_field, strict=False):
"""
Allow a field's type, uniqueness, nullability, default, column,
constraints, etc. to be modified.
`old_field` is required to compute the necessary changes.
If `strict` is True, raise errors if the old column does not match
`old_field` precisely.
"""
if not self._field_should_be_altered(old_field, new_field):
return
# Ensure this field is even column-based
old_db_params = old_field.db_parameters(connection=self.connection)
old_type = old_db_params['type']
new_db_params = new_field.db_parameters(connection=self.connection)
new_type = new_db_params['type']
if ((old_type is None and old_field.remote_field is None) or
(new_type is None and new_field.remote_field is None)):
raise ValueError(
"Cannot alter field %s into %s - they do not properly define "
"db_type (are you using a badly-written custom field?)" %
(old_field, new_field),
)
elif old_type is None and new_type is None and (
old_field.remote_field.through and new_field.remote_field.through and
old_field.remote_field.through._meta.auto_created and
new_field.remote_field.through._meta.auto_created):
return self._alter_many_to_many(model, old_field, new_field, strict)
elif old_type is None and new_type is None and (
old_field.remote_field.through and new_field.remote_field.through and
not old_field.remote_field.through._meta.auto_created and
not new_field.remote_field.through._meta.auto_created):
# Both sides have through models; this is a no-op.
return
elif old_type is None or new_type is None:
raise ValueError(
"Cannot alter field %s into %s - they are not compatible types "
"(you cannot alter to or from M2M fields, or add or remove "
"through= on M2M fields)" % (old_field, new_field)
)
self._alter_field(model, old_field, new_field, old_type, new_type,
old_db_params, new_db_params, strict) | [
"def",
"alter_field",
"(",
"self",
",",
"model",
",",
"old_field",
",",
"new_field",
",",
"strict",
"=",
"False",
")",
":",
"if",
"not",
"self",
".",
"_field_should_be_altered",
"(",
"old_field",
",",
"new_field",
")",
":",
"return",
"# Ensure this field is even column-based",
"old_db_params",
"=",
"old_field",
".",
"db_parameters",
"(",
"connection",
"=",
"self",
".",
"connection",
")",
"old_type",
"=",
"old_db_params",
"[",
"'type'",
"]",
"new_db_params",
"=",
"new_field",
".",
"db_parameters",
"(",
"connection",
"=",
"self",
".",
"connection",
")",
"new_type",
"=",
"new_db_params",
"[",
"'type'",
"]",
"if",
"(",
"(",
"old_type",
"is",
"None",
"and",
"old_field",
".",
"remote_field",
"is",
"None",
")",
"or",
"(",
"new_type",
"is",
"None",
"and",
"new_field",
".",
"remote_field",
"is",
"None",
")",
")",
":",
"raise",
"ValueError",
"(",
"\"Cannot alter field %s into %s - they do not properly define \"",
"\"db_type (are you using a badly-written custom field?)\"",
"%",
"(",
"old_field",
",",
"new_field",
")",
",",
")",
"elif",
"old_type",
"is",
"None",
"and",
"new_type",
"is",
"None",
"and",
"(",
"old_field",
".",
"remote_field",
".",
"through",
"and",
"new_field",
".",
"remote_field",
".",
"through",
"and",
"old_field",
".",
"remote_field",
".",
"through",
".",
"_meta",
".",
"auto_created",
"and",
"new_field",
".",
"remote_field",
".",
"through",
".",
"_meta",
".",
"auto_created",
")",
":",
"return",
"self",
".",
"_alter_many_to_many",
"(",
"model",
",",
"old_field",
",",
"new_field",
",",
"strict",
")",
"elif",
"old_type",
"is",
"None",
"and",
"new_type",
"is",
"None",
"and",
"(",
"old_field",
".",
"remote_field",
".",
"through",
"and",
"new_field",
".",
"remote_field",
".",
"through",
"and",
"not",
"old_field",
".",
"remote_field",
".",
"through",
".",
"_meta",
".",
"auto_created",
"and",
"not",
"new_field",
".",
"remote_field",
".",
"through",
".",
"_meta",
".",
"auto_created",
")",
":",
"# Both sides have through models; this is a no-op.",
"return",
"elif",
"old_type",
"is",
"None",
"or",
"new_type",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Cannot alter field %s into %s - they are not compatible types \"",
"\"(you cannot alter to or from M2M fields, or add or remove \"",
"\"through= on M2M fields)\"",
"%",
"(",
"old_field",
",",
"new_field",
")",
")",
"self",
".",
"_alter_field",
"(",
"model",
",",
"old_field",
",",
"new_field",
",",
"old_type",
",",
"new_type",
",",
"old_db_params",
",",
"new_db_params",
",",
"strict",
")"
] | [
567,
4
] | [
608,
63
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseSchemaEditor._alter_field | (self, model, old_field, new_field, old_type, new_type,
old_db_params, new_db_params, strict=False) | Perform a "physical" (non-ManyToMany) field update. | Perform a "physical" (non-ManyToMany) field update. | def _alter_field(self, model, old_field, new_field, old_type, new_type,
old_db_params, new_db_params, strict=False):
"""Perform a "physical" (non-ManyToMany) field update."""
# Drop any FK constraints, we'll remake them later
fks_dropped = set()
if (
self.connection.features.supports_foreign_keys and
old_field.remote_field and
old_field.db_constraint
):
fk_names = self._constraint_names(model, [old_field.column], foreign_key=True)
if strict and len(fk_names) != 1:
raise ValueError("Found wrong number (%s) of foreign key constraints for %s.%s" % (
len(fk_names),
model._meta.db_table,
old_field.column,
))
for fk_name in fk_names:
fks_dropped.add((old_field.column,))
self.execute(self._delete_fk_sql(model, fk_name))
# Has unique been removed?
if old_field.unique and (not new_field.unique or self._field_became_primary_key(old_field, new_field)):
# Find the unique constraint for this field
meta_constraint_names = {constraint.name for constraint in model._meta.constraints}
constraint_names = self._constraint_names(
model, [old_field.column], unique=True, primary_key=False,
exclude=meta_constraint_names,
)
if strict and len(constraint_names) != 1:
raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % (
len(constraint_names),
model._meta.db_table,
old_field.column,
))
for constraint_name in constraint_names:
self.execute(self._delete_unique_sql(model, constraint_name))
# Drop incoming FK constraints if the field is a primary key or unique,
# which might be a to_field target, and things are going to change.
drop_foreign_keys = (
self.connection.features.supports_foreign_keys and (
(old_field.primary_key and new_field.primary_key) or
(old_field.unique and new_field.unique)
) and old_type != new_type
)
if drop_foreign_keys:
# '_meta.related_field' also contains M2M reverse fields, these
# will be filtered out
for _old_rel, new_rel in _related_non_m2m_objects(old_field, new_field):
rel_fk_names = self._constraint_names(
new_rel.related_model, [new_rel.field.column], foreign_key=True
)
for fk_name in rel_fk_names:
self.execute(self._delete_fk_sql(new_rel.related_model, fk_name))
# Removed an index? (no strict check, as multiple indexes are possible)
# Remove indexes if db_index switched to False or a unique constraint
# will now be used in lieu of an index. The following lines from the
# truth table show all True cases; the rest are False:
#
# old_field.db_index | old_field.unique | new_field.db_index | new_field.unique
# ------------------------------------------------------------------------------
# True | False | False | False
# True | False | False | True
# True | False | True | True
if old_field.db_index and not old_field.unique and (not new_field.db_index or new_field.unique):
# Find the index for this field
meta_index_names = {index.name for index in model._meta.indexes}
# Retrieve only BTREE indexes since this is what's created with
# db_index=True.
index_names = self._constraint_names(
model, [old_field.column], index=True, type_=Index.suffix,
exclude=meta_index_names,
)
for index_name in index_names:
# The only way to check if an index was created with
# db_index=True or with Index(['field'], name='foo')
# is to look at its name (refs #28053).
self.execute(self._delete_index_sql(model, index_name))
# Change check constraints?
if old_db_params['check'] != new_db_params['check'] and old_db_params['check']:
meta_constraint_names = {constraint.name for constraint in model._meta.constraints}
constraint_names = self._constraint_names(
model, [old_field.column], check=True,
exclude=meta_constraint_names,
)
if strict and len(constraint_names) != 1:
raise ValueError("Found wrong number (%s) of check constraints for %s.%s" % (
len(constraint_names),
model._meta.db_table,
old_field.column,
))
for constraint_name in constraint_names:
self.execute(self._delete_check_sql(model, constraint_name))
# Have they renamed the column?
if old_field.column != new_field.column:
self.execute(self._rename_field_sql(model._meta.db_table, old_field, new_field, new_type))
# Rename all references to the renamed column.
for sql in self.deferred_sql:
if isinstance(sql, Statement):
sql.rename_column_references(model._meta.db_table, old_field.column, new_field.column)
# Next, start accumulating actions to do
actions = []
null_actions = []
post_actions = []
# Collation change?
old_collation = getattr(old_field, 'db_collation', None)
new_collation = getattr(new_field, 'db_collation', None)
if old_collation != new_collation:
# Collation change handles also a type change.
fragment = self._alter_column_collation_sql(model, new_field, new_type, new_collation)
actions.append(fragment)
# Type change?
elif old_type != new_type:
fragment, other_actions = self._alter_column_type_sql(model, old_field, new_field, new_type)
actions.append(fragment)
post_actions.extend(other_actions)
# When changing a column NULL constraint to NOT NULL with a given
# default value, we need to perform 4 steps:
# 1. Add a default for new incoming writes
# 2. Update existing NULL rows with new default
# 3. Replace NULL constraint with NOT NULL
# 4. Drop the default again.
# Default change?
needs_database_default = False
if old_field.null and not new_field.null:
old_default = self.effective_default(old_field)
new_default = self.effective_default(new_field)
if (
not self.skip_default_on_alter(new_field) and
old_default != new_default and
new_default is not None
):
needs_database_default = True
actions.append(self._alter_column_default_sql(model, old_field, new_field))
# Nullability change?
if old_field.null != new_field.null:
fragment = self._alter_column_null_sql(model, old_field, new_field)
if fragment:
null_actions.append(fragment)
# Only if we have a default and there is a change from NULL to NOT NULL
four_way_default_alteration = (
new_field.has_default() and
(old_field.null and not new_field.null)
)
if actions or null_actions:
if not four_way_default_alteration:
# If we don't have to do a 4-way default alteration we can
# directly run a (NOT) NULL alteration
actions = actions + null_actions
# Combine actions together if we can (e.g. postgres)
if self.connection.features.supports_combined_alters and actions:
sql, params = tuple(zip(*actions))
actions = [(", ".join(sql), sum(params, []))]
# Apply those actions
for sql, params in actions:
self.execute(
self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": sql,
},
params,
)
if four_way_default_alteration:
# Update existing rows with default value
self.execute(
self.sql_update_with_default % {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(new_field.column),
"default": "%s",
},
[new_default],
)
# Since we didn't run a NOT NULL change before we need to do it
# now
for sql, params in null_actions:
self.execute(
self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": sql,
},
params,
)
if post_actions:
for sql, params in post_actions:
self.execute(sql, params)
# If primary_key changed to False, delete the primary key constraint.
if old_field.primary_key and not new_field.primary_key:
self._delete_primary_key(model, strict)
# Added a unique?
if self._unique_should_be_added(old_field, new_field):
self.execute(self._create_unique_sql(model, [new_field.column]))
# Added an index? Add an index if db_index switched to True or a unique
# constraint will no longer be used in lieu of an index. The following
# lines from the truth table show all True cases; the rest are False:
#
# old_field.db_index | old_field.unique | new_field.db_index | new_field.unique
# ------------------------------------------------------------------------------
# False | False | True | False
# False | True | True | False
# True | True | True | False
if (not old_field.db_index or old_field.unique) and new_field.db_index and not new_field.unique:
self.execute(self._create_index_sql(model, fields=[new_field]))
# Type alteration on primary key? Then we need to alter the column
# referring to us.
rels_to_update = []
if drop_foreign_keys:
rels_to_update.extend(_related_non_m2m_objects(old_field, new_field))
# Changed to become primary key?
if self._field_became_primary_key(old_field, new_field):
# Make the new one
self.execute(self._create_primary_key_sql(model, new_field))
# Update all referencing columns
rels_to_update.extend(_related_non_m2m_objects(old_field, new_field))
# Handle our type alters on the other end of rels from the PK stuff above
for old_rel, new_rel in rels_to_update:
rel_db_params = new_rel.field.db_parameters(connection=self.connection)
rel_type = rel_db_params['type']
fragment, other_actions = self._alter_column_type_sql(
new_rel.related_model, old_rel.field, new_rel.field, rel_type
)
self.execute(
self.sql_alter_column % {
"table": self.quote_name(new_rel.related_model._meta.db_table),
"changes": fragment[0],
},
fragment[1],
)
for sql, params in other_actions:
self.execute(sql, params)
# Does it have a foreign key?
if (self.connection.features.supports_foreign_keys and new_field.remote_field and
(fks_dropped or not old_field.remote_field or not old_field.db_constraint) and
new_field.db_constraint):
self.execute(self._create_fk_sql(model, new_field, "_fk_%(to_table)s_%(to_column)s"))
# Rebuild FKs that pointed to us if we previously had to drop them
if drop_foreign_keys:
for rel in new_field.model._meta.related_objects:
if _is_relevant_relation(rel, new_field) and rel.field.db_constraint:
self.execute(self._create_fk_sql(rel.related_model, rel.field, "_fk"))
# Does it have check constraints we need to add?
if old_db_params['check'] != new_db_params['check'] and new_db_params['check']:
constraint_name = self._create_index_name(model._meta.db_table, [new_field.column], suffix='_check')
self.execute(self._create_check_sql(model, constraint_name, new_db_params['check']))
# Drop the default if we need to
# (Django usually does not use in-database defaults)
if needs_database_default:
changes_sql, params = self._alter_column_default_sql(model, old_field, new_field, drop=True)
sql = self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": changes_sql,
}
self.execute(sql, params)
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close() | [
"def",
"_alter_field",
"(",
"self",
",",
"model",
",",
"old_field",
",",
"new_field",
",",
"old_type",
",",
"new_type",
",",
"old_db_params",
",",
"new_db_params",
",",
"strict",
"=",
"False",
")",
":",
"# Drop any FK constraints, we'll remake them later",
"fks_dropped",
"=",
"set",
"(",
")",
"if",
"(",
"self",
".",
"connection",
".",
"features",
".",
"supports_foreign_keys",
"and",
"old_field",
".",
"remote_field",
"and",
"old_field",
".",
"db_constraint",
")",
":",
"fk_names",
"=",
"self",
".",
"_constraint_names",
"(",
"model",
",",
"[",
"old_field",
".",
"column",
"]",
",",
"foreign_key",
"=",
"True",
")",
"if",
"strict",
"and",
"len",
"(",
"fk_names",
")",
"!=",
"1",
":",
"raise",
"ValueError",
"(",
"\"Found wrong number (%s) of foreign key constraints for %s.%s\"",
"%",
"(",
"len",
"(",
"fk_names",
")",
",",
"model",
".",
"_meta",
".",
"db_table",
",",
"old_field",
".",
"column",
",",
")",
")",
"for",
"fk_name",
"in",
"fk_names",
":",
"fks_dropped",
".",
"add",
"(",
"(",
"old_field",
".",
"column",
",",
")",
")",
"self",
".",
"execute",
"(",
"self",
".",
"_delete_fk_sql",
"(",
"model",
",",
"fk_name",
")",
")",
"# Has unique been removed?",
"if",
"old_field",
".",
"unique",
"and",
"(",
"not",
"new_field",
".",
"unique",
"or",
"self",
".",
"_field_became_primary_key",
"(",
"old_field",
",",
"new_field",
")",
")",
":",
"# Find the unique constraint for this field",
"meta_constraint_names",
"=",
"{",
"constraint",
".",
"name",
"for",
"constraint",
"in",
"model",
".",
"_meta",
".",
"constraints",
"}",
"constraint_names",
"=",
"self",
".",
"_constraint_names",
"(",
"model",
",",
"[",
"old_field",
".",
"column",
"]",
",",
"unique",
"=",
"True",
",",
"primary_key",
"=",
"False",
",",
"exclude",
"=",
"meta_constraint_names",
",",
")",
"if",
"strict",
"and",
"len",
"(",
"constraint_names",
")",
"!=",
"1",
":",
"raise",
"ValueError",
"(",
"\"Found wrong number (%s) of unique constraints for %s.%s\"",
"%",
"(",
"len",
"(",
"constraint_names",
")",
",",
"model",
".",
"_meta",
".",
"db_table",
",",
"old_field",
".",
"column",
",",
")",
")",
"for",
"constraint_name",
"in",
"constraint_names",
":",
"self",
".",
"execute",
"(",
"self",
".",
"_delete_unique_sql",
"(",
"model",
",",
"constraint_name",
")",
")",
"# Drop incoming FK constraints if the field is a primary key or unique,",
"# which might be a to_field target, and things are going to change.",
"drop_foreign_keys",
"=",
"(",
"self",
".",
"connection",
".",
"features",
".",
"supports_foreign_keys",
"and",
"(",
"(",
"old_field",
".",
"primary_key",
"and",
"new_field",
".",
"primary_key",
")",
"or",
"(",
"old_field",
".",
"unique",
"and",
"new_field",
".",
"unique",
")",
")",
"and",
"old_type",
"!=",
"new_type",
")",
"if",
"drop_foreign_keys",
":",
"# '_meta.related_field' also contains M2M reverse fields, these",
"# will be filtered out",
"for",
"_old_rel",
",",
"new_rel",
"in",
"_related_non_m2m_objects",
"(",
"old_field",
",",
"new_field",
")",
":",
"rel_fk_names",
"=",
"self",
".",
"_constraint_names",
"(",
"new_rel",
".",
"related_model",
",",
"[",
"new_rel",
".",
"field",
".",
"column",
"]",
",",
"foreign_key",
"=",
"True",
")",
"for",
"fk_name",
"in",
"rel_fk_names",
":",
"self",
".",
"execute",
"(",
"self",
".",
"_delete_fk_sql",
"(",
"new_rel",
".",
"related_model",
",",
"fk_name",
")",
")",
"# Removed an index? (no strict check, as multiple indexes are possible)",
"# Remove indexes if db_index switched to False or a unique constraint",
"# will now be used in lieu of an index. The following lines from the",
"# truth table show all True cases; the rest are False:",
"#",
"# old_field.db_index | old_field.unique | new_field.db_index | new_field.unique",
"# ------------------------------------------------------------------------------",
"# True | False | False | False",
"# True | False | False | True",
"# True | False | True | True",
"if",
"old_field",
".",
"db_index",
"and",
"not",
"old_field",
".",
"unique",
"and",
"(",
"not",
"new_field",
".",
"db_index",
"or",
"new_field",
".",
"unique",
")",
":",
"# Find the index for this field",
"meta_index_names",
"=",
"{",
"index",
".",
"name",
"for",
"index",
"in",
"model",
".",
"_meta",
".",
"indexes",
"}",
"# Retrieve only BTREE indexes since this is what's created with",
"# db_index=True.",
"index_names",
"=",
"self",
".",
"_constraint_names",
"(",
"model",
",",
"[",
"old_field",
".",
"column",
"]",
",",
"index",
"=",
"True",
",",
"type_",
"=",
"Index",
".",
"suffix",
",",
"exclude",
"=",
"meta_index_names",
",",
")",
"for",
"index_name",
"in",
"index_names",
":",
"# The only way to check if an index was created with",
"# db_index=True or with Index(['field'], name='foo')",
"# is to look at its name (refs #28053).",
"self",
".",
"execute",
"(",
"self",
".",
"_delete_index_sql",
"(",
"model",
",",
"index_name",
")",
")",
"# Change check constraints?",
"if",
"old_db_params",
"[",
"'check'",
"]",
"!=",
"new_db_params",
"[",
"'check'",
"]",
"and",
"old_db_params",
"[",
"'check'",
"]",
":",
"meta_constraint_names",
"=",
"{",
"constraint",
".",
"name",
"for",
"constraint",
"in",
"model",
".",
"_meta",
".",
"constraints",
"}",
"constraint_names",
"=",
"self",
".",
"_constraint_names",
"(",
"model",
",",
"[",
"old_field",
".",
"column",
"]",
",",
"check",
"=",
"True",
",",
"exclude",
"=",
"meta_constraint_names",
",",
")",
"if",
"strict",
"and",
"len",
"(",
"constraint_names",
")",
"!=",
"1",
":",
"raise",
"ValueError",
"(",
"\"Found wrong number (%s) of check constraints for %s.%s\"",
"%",
"(",
"len",
"(",
"constraint_names",
")",
",",
"model",
".",
"_meta",
".",
"db_table",
",",
"old_field",
".",
"column",
",",
")",
")",
"for",
"constraint_name",
"in",
"constraint_names",
":",
"self",
".",
"execute",
"(",
"self",
".",
"_delete_check_sql",
"(",
"model",
",",
"constraint_name",
")",
")",
"# Have they renamed the column?",
"if",
"old_field",
".",
"column",
"!=",
"new_field",
".",
"column",
":",
"self",
".",
"execute",
"(",
"self",
".",
"_rename_field_sql",
"(",
"model",
".",
"_meta",
".",
"db_table",
",",
"old_field",
",",
"new_field",
",",
"new_type",
")",
")",
"# Rename all references to the renamed column.",
"for",
"sql",
"in",
"self",
".",
"deferred_sql",
":",
"if",
"isinstance",
"(",
"sql",
",",
"Statement",
")",
":",
"sql",
".",
"rename_column_references",
"(",
"model",
".",
"_meta",
".",
"db_table",
",",
"old_field",
".",
"column",
",",
"new_field",
".",
"column",
")",
"# Next, start accumulating actions to do",
"actions",
"=",
"[",
"]",
"null_actions",
"=",
"[",
"]",
"post_actions",
"=",
"[",
"]",
"# Collation change?",
"old_collation",
"=",
"getattr",
"(",
"old_field",
",",
"'db_collation'",
",",
"None",
")",
"new_collation",
"=",
"getattr",
"(",
"new_field",
",",
"'db_collation'",
",",
"None",
")",
"if",
"old_collation",
"!=",
"new_collation",
":",
"# Collation change handles also a type change.",
"fragment",
"=",
"self",
".",
"_alter_column_collation_sql",
"(",
"model",
",",
"new_field",
",",
"new_type",
",",
"new_collation",
")",
"actions",
".",
"append",
"(",
"fragment",
")",
"# Type change?",
"elif",
"old_type",
"!=",
"new_type",
":",
"fragment",
",",
"other_actions",
"=",
"self",
".",
"_alter_column_type_sql",
"(",
"model",
",",
"old_field",
",",
"new_field",
",",
"new_type",
")",
"actions",
".",
"append",
"(",
"fragment",
")",
"post_actions",
".",
"extend",
"(",
"other_actions",
")",
"# When changing a column NULL constraint to NOT NULL with a given",
"# default value, we need to perform 4 steps:",
"# 1. Add a default for new incoming writes",
"# 2. Update existing NULL rows with new default",
"# 3. Replace NULL constraint with NOT NULL",
"# 4. Drop the default again.",
"# Default change?",
"needs_database_default",
"=",
"False",
"if",
"old_field",
".",
"null",
"and",
"not",
"new_field",
".",
"null",
":",
"old_default",
"=",
"self",
".",
"effective_default",
"(",
"old_field",
")",
"new_default",
"=",
"self",
".",
"effective_default",
"(",
"new_field",
")",
"if",
"(",
"not",
"self",
".",
"skip_default_on_alter",
"(",
"new_field",
")",
"and",
"old_default",
"!=",
"new_default",
"and",
"new_default",
"is",
"not",
"None",
")",
":",
"needs_database_default",
"=",
"True",
"actions",
".",
"append",
"(",
"self",
".",
"_alter_column_default_sql",
"(",
"model",
",",
"old_field",
",",
"new_field",
")",
")",
"# Nullability change?",
"if",
"old_field",
".",
"null",
"!=",
"new_field",
".",
"null",
":",
"fragment",
"=",
"self",
".",
"_alter_column_null_sql",
"(",
"model",
",",
"old_field",
",",
"new_field",
")",
"if",
"fragment",
":",
"null_actions",
".",
"append",
"(",
"fragment",
")",
"# Only if we have a default and there is a change from NULL to NOT NULL",
"four_way_default_alteration",
"=",
"(",
"new_field",
".",
"has_default",
"(",
")",
"and",
"(",
"old_field",
".",
"null",
"and",
"not",
"new_field",
".",
"null",
")",
")",
"if",
"actions",
"or",
"null_actions",
":",
"if",
"not",
"four_way_default_alteration",
":",
"# If we don't have to do a 4-way default alteration we can",
"# directly run a (NOT) NULL alteration",
"actions",
"=",
"actions",
"+",
"null_actions",
"# Combine actions together if we can (e.g. postgres)",
"if",
"self",
".",
"connection",
".",
"features",
".",
"supports_combined_alters",
"and",
"actions",
":",
"sql",
",",
"params",
"=",
"tuple",
"(",
"zip",
"(",
"*",
"actions",
")",
")",
"actions",
"=",
"[",
"(",
"\", \"",
".",
"join",
"(",
"sql",
")",
",",
"sum",
"(",
"params",
",",
"[",
"]",
")",
")",
"]",
"# Apply those actions",
"for",
"sql",
",",
"params",
"in",
"actions",
":",
"self",
".",
"execute",
"(",
"self",
".",
"sql_alter_column",
"%",
"{",
"\"table\"",
":",
"self",
".",
"quote_name",
"(",
"model",
".",
"_meta",
".",
"db_table",
")",
",",
"\"changes\"",
":",
"sql",
",",
"}",
",",
"params",
",",
")",
"if",
"four_way_default_alteration",
":",
"# Update existing rows with default value",
"self",
".",
"execute",
"(",
"self",
".",
"sql_update_with_default",
"%",
"{",
"\"table\"",
":",
"self",
".",
"quote_name",
"(",
"model",
".",
"_meta",
".",
"db_table",
")",
",",
"\"column\"",
":",
"self",
".",
"quote_name",
"(",
"new_field",
".",
"column",
")",
",",
"\"default\"",
":",
"\"%s\"",
",",
"}",
",",
"[",
"new_default",
"]",
",",
")",
"# Since we didn't run a NOT NULL change before we need to do it",
"# now",
"for",
"sql",
",",
"params",
"in",
"null_actions",
":",
"self",
".",
"execute",
"(",
"self",
".",
"sql_alter_column",
"%",
"{",
"\"table\"",
":",
"self",
".",
"quote_name",
"(",
"model",
".",
"_meta",
".",
"db_table",
")",
",",
"\"changes\"",
":",
"sql",
",",
"}",
",",
"params",
",",
")",
"if",
"post_actions",
":",
"for",
"sql",
",",
"params",
"in",
"post_actions",
":",
"self",
".",
"execute",
"(",
"sql",
",",
"params",
")",
"# If primary_key changed to False, delete the primary key constraint.",
"if",
"old_field",
".",
"primary_key",
"and",
"not",
"new_field",
".",
"primary_key",
":",
"self",
".",
"_delete_primary_key",
"(",
"model",
",",
"strict",
")",
"# Added a unique?",
"if",
"self",
".",
"_unique_should_be_added",
"(",
"old_field",
",",
"new_field",
")",
":",
"self",
".",
"execute",
"(",
"self",
".",
"_create_unique_sql",
"(",
"model",
",",
"[",
"new_field",
".",
"column",
"]",
")",
")",
"# Added an index? Add an index if db_index switched to True or a unique",
"# constraint will no longer be used in lieu of an index. The following",
"# lines from the truth table show all True cases; the rest are False:",
"#",
"# old_field.db_index | old_field.unique | new_field.db_index | new_field.unique",
"# ------------------------------------------------------------------------------",
"# False | False | True | False",
"# False | True | True | False",
"# True | True | True | False",
"if",
"(",
"not",
"old_field",
".",
"db_index",
"or",
"old_field",
".",
"unique",
")",
"and",
"new_field",
".",
"db_index",
"and",
"not",
"new_field",
".",
"unique",
":",
"self",
".",
"execute",
"(",
"self",
".",
"_create_index_sql",
"(",
"model",
",",
"fields",
"=",
"[",
"new_field",
"]",
")",
")",
"# Type alteration on primary key? Then we need to alter the column",
"# referring to us.",
"rels_to_update",
"=",
"[",
"]",
"if",
"drop_foreign_keys",
":",
"rels_to_update",
".",
"extend",
"(",
"_related_non_m2m_objects",
"(",
"old_field",
",",
"new_field",
")",
")",
"# Changed to become primary key?",
"if",
"self",
".",
"_field_became_primary_key",
"(",
"old_field",
",",
"new_field",
")",
":",
"# Make the new one",
"self",
".",
"execute",
"(",
"self",
".",
"_create_primary_key_sql",
"(",
"model",
",",
"new_field",
")",
")",
"# Update all referencing columns",
"rels_to_update",
".",
"extend",
"(",
"_related_non_m2m_objects",
"(",
"old_field",
",",
"new_field",
")",
")",
"# Handle our type alters on the other end of rels from the PK stuff above",
"for",
"old_rel",
",",
"new_rel",
"in",
"rels_to_update",
":",
"rel_db_params",
"=",
"new_rel",
".",
"field",
".",
"db_parameters",
"(",
"connection",
"=",
"self",
".",
"connection",
")",
"rel_type",
"=",
"rel_db_params",
"[",
"'type'",
"]",
"fragment",
",",
"other_actions",
"=",
"self",
".",
"_alter_column_type_sql",
"(",
"new_rel",
".",
"related_model",
",",
"old_rel",
".",
"field",
",",
"new_rel",
".",
"field",
",",
"rel_type",
")",
"self",
".",
"execute",
"(",
"self",
".",
"sql_alter_column",
"%",
"{",
"\"table\"",
":",
"self",
".",
"quote_name",
"(",
"new_rel",
".",
"related_model",
".",
"_meta",
".",
"db_table",
")",
",",
"\"changes\"",
":",
"fragment",
"[",
"0",
"]",
",",
"}",
",",
"fragment",
"[",
"1",
"]",
",",
")",
"for",
"sql",
",",
"params",
"in",
"other_actions",
":",
"self",
".",
"execute",
"(",
"sql",
",",
"params",
")",
"# Does it have a foreign key?",
"if",
"(",
"self",
".",
"connection",
".",
"features",
".",
"supports_foreign_keys",
"and",
"new_field",
".",
"remote_field",
"and",
"(",
"fks_dropped",
"or",
"not",
"old_field",
".",
"remote_field",
"or",
"not",
"old_field",
".",
"db_constraint",
")",
"and",
"new_field",
".",
"db_constraint",
")",
":",
"self",
".",
"execute",
"(",
"self",
".",
"_create_fk_sql",
"(",
"model",
",",
"new_field",
",",
"\"_fk_%(to_table)s_%(to_column)s\"",
")",
")",
"# Rebuild FKs that pointed to us if we previously had to drop them",
"if",
"drop_foreign_keys",
":",
"for",
"rel",
"in",
"new_field",
".",
"model",
".",
"_meta",
".",
"related_objects",
":",
"if",
"_is_relevant_relation",
"(",
"rel",
",",
"new_field",
")",
"and",
"rel",
".",
"field",
".",
"db_constraint",
":",
"self",
".",
"execute",
"(",
"self",
".",
"_create_fk_sql",
"(",
"rel",
".",
"related_model",
",",
"rel",
".",
"field",
",",
"\"_fk\"",
")",
")",
"# Does it have check constraints we need to add?",
"if",
"old_db_params",
"[",
"'check'",
"]",
"!=",
"new_db_params",
"[",
"'check'",
"]",
"and",
"new_db_params",
"[",
"'check'",
"]",
":",
"constraint_name",
"=",
"self",
".",
"_create_index_name",
"(",
"model",
".",
"_meta",
".",
"db_table",
",",
"[",
"new_field",
".",
"column",
"]",
",",
"suffix",
"=",
"'_check'",
")",
"self",
".",
"execute",
"(",
"self",
".",
"_create_check_sql",
"(",
"model",
",",
"constraint_name",
",",
"new_db_params",
"[",
"'check'",
"]",
")",
")",
"# Drop the default if we need to",
"# (Django usually does not use in-database defaults)",
"if",
"needs_database_default",
":",
"changes_sql",
",",
"params",
"=",
"self",
".",
"_alter_column_default_sql",
"(",
"model",
",",
"old_field",
",",
"new_field",
",",
"drop",
"=",
"True",
")",
"sql",
"=",
"self",
".",
"sql_alter_column",
"%",
"{",
"\"table\"",
":",
"self",
".",
"quote_name",
"(",
"model",
".",
"_meta",
".",
"db_table",
")",
",",
"\"changes\"",
":",
"changes_sql",
",",
"}",
"self",
".",
"execute",
"(",
"sql",
",",
"params",
")",
"# Reset connection if required",
"if",
"self",
".",
"connection",
".",
"features",
".",
"connection_persists_old_columns",
":",
"self",
".",
"connection",
".",
"close",
"(",
")"
] | [
610,
4
] | [
863,
35
] | python | en | ['en', 'en', 'en'] | True |
BaseDatabaseSchemaEditor._alter_column_null_sql | (self, model, old_field, new_field) |
Hook to specialize column null alteration.
Return a (sql, params) fragment to set a column to null or non-null
as required by new_field, or None if no changes are required.
|
Hook to specialize column null alteration. | def _alter_column_null_sql(self, model, old_field, new_field):
"""
Hook to specialize column null alteration.
Return a (sql, params) fragment to set a column to null or non-null
as required by new_field, or None if no changes are required.
"""
if (self.connection.features.interprets_empty_strings_as_nulls and
new_field.get_internal_type() in ("CharField", "TextField")):
# The field is nullable in the database anyway, leave it alone.
return
else:
new_db_params = new_field.db_parameters(connection=self.connection)
sql = self.sql_alter_column_null if new_field.null else self.sql_alter_column_not_null
return (
sql % {
'column': self.quote_name(new_field.column),
'type': new_db_params['type'],
},
[],
) | [
"def",
"_alter_column_null_sql",
"(",
"self",
",",
"model",
",",
"old_field",
",",
"new_field",
")",
":",
"if",
"(",
"self",
".",
"connection",
".",
"features",
".",
"interprets_empty_strings_as_nulls",
"and",
"new_field",
".",
"get_internal_type",
"(",
")",
"in",
"(",
"\"CharField\"",
",",
"\"TextField\"",
")",
")",
":",
"# The field is nullable in the database anyway, leave it alone.",
"return",
"else",
":",
"new_db_params",
"=",
"new_field",
".",
"db_parameters",
"(",
"connection",
"=",
"self",
".",
"connection",
")",
"sql",
"=",
"self",
".",
"sql_alter_column_null",
"if",
"new_field",
".",
"null",
"else",
"self",
".",
"sql_alter_column_not_null",
"return",
"(",
"sql",
"%",
"{",
"'column'",
":",
"self",
".",
"quote_name",
"(",
"new_field",
".",
"column",
")",
",",
"'type'",
":",
"new_db_params",
"[",
"'type'",
"]",
",",
"}",
",",
"[",
"]",
",",
")"
] | [
865,
4
] | [
885,
13
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseSchemaEditor._alter_column_default_sql | (self, model, old_field, new_field, drop=False) |
Hook to specialize column default alteration.
Return a (sql, params) fragment to add or drop (depending on the drop
argument) a default to new_field's column.
|
Hook to specialize column default alteration. | def _alter_column_default_sql(self, model, old_field, new_field, drop=False):
"""
Hook to specialize column default alteration.
Return a (sql, params) fragment to add or drop (depending on the drop
argument) a default to new_field's column.
"""
new_default = self.effective_default(new_field)
default = self._column_default_sql(new_field)
params = [new_default]
if drop:
params = []
elif self.connection.features.requires_literal_defaults:
# Some databases (Oracle) can't take defaults as a parameter
# If this is the case, the SchemaEditor for that database should
# implement prepare_default().
default = self.prepare_default(new_default)
params = []
new_db_params = new_field.db_parameters(connection=self.connection)
if drop:
if new_field.null:
sql = self.sql_alter_column_no_default_null
else:
sql = self.sql_alter_column_no_default
else:
sql = self.sql_alter_column_default
return (
sql % {
'column': self.quote_name(new_field.column),
'type': new_db_params['type'],
'default': default,
},
params,
) | [
"def",
"_alter_column_default_sql",
"(",
"self",
",",
"model",
",",
"old_field",
",",
"new_field",
",",
"drop",
"=",
"False",
")",
":",
"new_default",
"=",
"self",
".",
"effective_default",
"(",
"new_field",
")",
"default",
"=",
"self",
".",
"_column_default_sql",
"(",
"new_field",
")",
"params",
"=",
"[",
"new_default",
"]",
"if",
"drop",
":",
"params",
"=",
"[",
"]",
"elif",
"self",
".",
"connection",
".",
"features",
".",
"requires_literal_defaults",
":",
"# Some databases (Oracle) can't take defaults as a parameter",
"# If this is the case, the SchemaEditor for that database should",
"# implement prepare_default().",
"default",
"=",
"self",
".",
"prepare_default",
"(",
"new_default",
")",
"params",
"=",
"[",
"]",
"new_db_params",
"=",
"new_field",
".",
"db_parameters",
"(",
"connection",
"=",
"self",
".",
"connection",
")",
"if",
"drop",
":",
"if",
"new_field",
".",
"null",
":",
"sql",
"=",
"self",
".",
"sql_alter_column_no_default_null",
"else",
":",
"sql",
"=",
"self",
".",
"sql_alter_column_no_default",
"else",
":",
"sql",
"=",
"self",
".",
"sql_alter_column_default",
"return",
"(",
"sql",
"%",
"{",
"'column'",
":",
"self",
".",
"quote_name",
"(",
"new_field",
".",
"column",
")",
",",
"'type'",
":",
"new_db_params",
"[",
"'type'",
"]",
",",
"'default'",
":",
"default",
",",
"}",
",",
"params",
",",
")"
] | [
887,
4
] | [
922,
9
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseSchemaEditor._alter_column_type_sql | (self, model, old_field, new_field, new_type) |
Hook to specialize column type alteration for different backends,
for cases when a creation type is different to an alteration type
(e.g. SERIAL in PostgreSQL, PostGIS fields).
Return a two-tuple of: an SQL fragment of (sql, params) to insert into
an ALTER TABLE statement and a list of extra (sql, params) tuples to
run once the field is altered.
|
Hook to specialize column type alteration for different backends,
for cases when a creation type is different to an alteration type
(e.g. SERIAL in PostgreSQL, PostGIS fields). | def _alter_column_type_sql(self, model, old_field, new_field, new_type):
"""
Hook to specialize column type alteration for different backends,
for cases when a creation type is different to an alteration type
(e.g. SERIAL in PostgreSQL, PostGIS fields).
Return a two-tuple of: an SQL fragment of (sql, params) to insert into
an ALTER TABLE statement and a list of extra (sql, params) tuples to
run once the field is altered.
"""
return (
(
self.sql_alter_column_type % {
"column": self.quote_name(new_field.column),
"type": new_type,
},
[],
),
[],
) | [
"def",
"_alter_column_type_sql",
"(",
"self",
",",
"model",
",",
"old_field",
",",
"new_field",
",",
"new_type",
")",
":",
"return",
"(",
"(",
"self",
".",
"sql_alter_column_type",
"%",
"{",
"\"column\"",
":",
"self",
".",
"quote_name",
"(",
"new_field",
".",
"column",
")",
",",
"\"type\"",
":",
"new_type",
",",
"}",
",",
"[",
"]",
",",
")",
",",
"[",
"]",
",",
")"
] | [
924,
4
] | [
943,
9
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseSchemaEditor._alter_many_to_many | (self, model, old_field, new_field, strict) | Alter M2Ms to repoint their to= endpoints. | Alter M2Ms to repoint their to= endpoints. | def _alter_many_to_many(self, model, old_field, new_field, strict):
"""Alter M2Ms to repoint their to= endpoints."""
# Rename the through table
if old_field.remote_field.through._meta.db_table != new_field.remote_field.through._meta.db_table:
self.alter_db_table(old_field.remote_field.through, old_field.remote_field.through._meta.db_table,
new_field.remote_field.through._meta.db_table)
# Repoint the FK to the other side
self.alter_field(
new_field.remote_field.through,
# We need the field that points to the target model, so we can tell alter_field to change it -
# this is m2m_reverse_field_name() (as opposed to m2m_field_name, which points to our model)
old_field.remote_field.through._meta.get_field(old_field.m2m_reverse_field_name()),
new_field.remote_field.through._meta.get_field(new_field.m2m_reverse_field_name()),
)
self.alter_field(
new_field.remote_field.through,
# for self-referential models we need to alter field from the other end too
old_field.remote_field.through._meta.get_field(old_field.m2m_field_name()),
new_field.remote_field.through._meta.get_field(new_field.m2m_field_name()),
) | [
"def",
"_alter_many_to_many",
"(",
"self",
",",
"model",
",",
"old_field",
",",
"new_field",
",",
"strict",
")",
":",
"# Rename the through table",
"if",
"old_field",
".",
"remote_field",
".",
"through",
".",
"_meta",
".",
"db_table",
"!=",
"new_field",
".",
"remote_field",
".",
"through",
".",
"_meta",
".",
"db_table",
":",
"self",
".",
"alter_db_table",
"(",
"old_field",
".",
"remote_field",
".",
"through",
",",
"old_field",
".",
"remote_field",
".",
"through",
".",
"_meta",
".",
"db_table",
",",
"new_field",
".",
"remote_field",
".",
"through",
".",
"_meta",
".",
"db_table",
")",
"# Repoint the FK to the other side",
"self",
".",
"alter_field",
"(",
"new_field",
".",
"remote_field",
".",
"through",
",",
"# We need the field that points to the target model, so we can tell alter_field to change it -",
"# this is m2m_reverse_field_name() (as opposed to m2m_field_name, which points to our model)",
"old_field",
".",
"remote_field",
".",
"through",
".",
"_meta",
".",
"get_field",
"(",
"old_field",
".",
"m2m_reverse_field_name",
"(",
")",
")",
",",
"new_field",
".",
"remote_field",
".",
"through",
".",
"_meta",
".",
"get_field",
"(",
"new_field",
".",
"m2m_reverse_field_name",
"(",
")",
")",
",",
")",
"self",
".",
"alter_field",
"(",
"new_field",
".",
"remote_field",
".",
"through",
",",
"# for self-referential models we need to alter field from the other end too",
"old_field",
".",
"remote_field",
".",
"through",
".",
"_meta",
".",
"get_field",
"(",
"old_field",
".",
"m2m_field_name",
"(",
")",
")",
",",
"new_field",
".",
"remote_field",
".",
"through",
".",
"_meta",
".",
"get_field",
"(",
"new_field",
".",
"m2m_field_name",
"(",
")",
")",
",",
")"
] | [
955,
4
] | [
974,
9
] | python | en | ['en', 'en', 'en'] | True |
BaseDatabaseSchemaEditor._create_index_name | (self, table_name, column_names, suffix="") |
Generate a unique name for an index/unique constraint.
The name is divided into 3 parts: the table name, the column names,
and a unique digest and suffix.
|
Generate a unique name for an index/unique constraint. | def _create_index_name(self, table_name, column_names, suffix=""):
"""
Generate a unique name for an index/unique constraint.
The name is divided into 3 parts: the table name, the column names,
and a unique digest and suffix.
"""
_, table_name = split_identifier(table_name)
hash_suffix_part = '%s%s' % (names_digest(table_name, *column_names, length=8), suffix)
max_length = self.connection.ops.max_name_length() or 200
# If everything fits into max_length, use that name.
index_name = '%s_%s_%s' % (table_name, '_'.join(column_names), hash_suffix_part)
if len(index_name) <= max_length:
return index_name
# Shorten a long suffix.
if len(hash_suffix_part) > max_length / 3:
hash_suffix_part = hash_suffix_part[:max_length // 3]
other_length = (max_length - len(hash_suffix_part)) // 2 - 1
index_name = '%s_%s_%s' % (
table_name[:other_length],
'_'.join(column_names)[:other_length],
hash_suffix_part,
)
# Prepend D if needed to prevent the name from starting with an
# underscore or a number (not permitted on Oracle).
if index_name[0] == "_" or index_name[0].isdigit():
index_name = "D%s" % index_name[:-1]
return index_name | [
"def",
"_create_index_name",
"(",
"self",
",",
"table_name",
",",
"column_names",
",",
"suffix",
"=",
"\"\"",
")",
":",
"_",
",",
"table_name",
"=",
"split_identifier",
"(",
"table_name",
")",
"hash_suffix_part",
"=",
"'%s%s'",
"%",
"(",
"names_digest",
"(",
"table_name",
",",
"*",
"column_names",
",",
"length",
"=",
"8",
")",
",",
"suffix",
")",
"max_length",
"=",
"self",
".",
"connection",
".",
"ops",
".",
"max_name_length",
"(",
")",
"or",
"200",
"# If everything fits into max_length, use that name.",
"index_name",
"=",
"'%s_%s_%s'",
"%",
"(",
"table_name",
",",
"'_'",
".",
"join",
"(",
"column_names",
")",
",",
"hash_suffix_part",
")",
"if",
"len",
"(",
"index_name",
")",
"<=",
"max_length",
":",
"return",
"index_name",
"# Shorten a long suffix.",
"if",
"len",
"(",
"hash_suffix_part",
")",
">",
"max_length",
"/",
"3",
":",
"hash_suffix_part",
"=",
"hash_suffix_part",
"[",
":",
"max_length",
"//",
"3",
"]",
"other_length",
"=",
"(",
"max_length",
"-",
"len",
"(",
"hash_suffix_part",
")",
")",
"//",
"2",
"-",
"1",
"index_name",
"=",
"'%s_%s_%s'",
"%",
"(",
"table_name",
"[",
":",
"other_length",
"]",
",",
"'_'",
".",
"join",
"(",
"column_names",
")",
"[",
":",
"other_length",
"]",
",",
"hash_suffix_part",
",",
")",
"# Prepend D if needed to prevent the name from starting with an",
"# underscore or a number (not permitted on Oracle).",
"if",
"index_name",
"[",
"0",
"]",
"==",
"\"_\"",
"or",
"index_name",
"[",
"0",
"]",
".",
"isdigit",
"(",
")",
":",
"index_name",
"=",
"\"D%s\"",
"%",
"index_name",
"[",
":",
"-",
"1",
"]",
"return",
"index_name"
] | [
976,
4
] | [
1003,
25
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseSchemaEditor._create_index_sql | (self, model, *, fields=None, name=None, suffix='', using='',
db_tablespace=None, col_suffixes=(), sql=None, opclasses=(),
condition=None, include=None, expressions=None) |
Return the SQL statement to create the index for one or several fields
or expressions. `sql` can be specified if the syntax differs from the
standard (GIS indexes, ...).
|
Return the SQL statement to create the index for one or several fields
or expressions. `sql` can be specified if the syntax differs from the
standard (GIS indexes, ...).
| def _create_index_sql(self, model, *, fields=None, name=None, suffix='', using='',
db_tablespace=None, col_suffixes=(), sql=None, opclasses=(),
condition=None, include=None, expressions=None):
"""
Return the SQL statement to create the index for one or several fields
or expressions. `sql` can be specified if the syntax differs from the
standard (GIS indexes, ...).
"""
fields = fields or []
expressions = expressions or []
compiler = Query(model, alias_cols=False).get_compiler(
connection=self.connection,
)
tablespace_sql = self._get_index_tablespace_sql(model, fields, db_tablespace=db_tablespace)
columns = [field.column for field in fields]
sql_create_index = sql or self.sql_create_index
table = model._meta.db_table
def create_index_name(*args, **kwargs):
nonlocal name
if name is None:
name = self._create_index_name(*args, **kwargs)
return self.quote_name(name)
return Statement(
sql_create_index,
table=Table(table, self.quote_name),
name=IndexName(table, columns, suffix, create_index_name),
using=using,
columns=(
self._index_columns(table, columns, col_suffixes, opclasses)
if columns
else Expressions(table, expressions, compiler, self.quote_value)
),
extra=tablespace_sql,
condition=self._index_condition_sql(condition),
include=self._index_include_sql(model, include),
) | [
"def",
"_create_index_sql",
"(",
"self",
",",
"model",
",",
"*",
",",
"fields",
"=",
"None",
",",
"name",
"=",
"None",
",",
"suffix",
"=",
"''",
",",
"using",
"=",
"''",
",",
"db_tablespace",
"=",
"None",
",",
"col_suffixes",
"=",
"(",
")",
",",
"sql",
"=",
"None",
",",
"opclasses",
"=",
"(",
")",
",",
"condition",
"=",
"None",
",",
"include",
"=",
"None",
",",
"expressions",
"=",
"None",
")",
":",
"fields",
"=",
"fields",
"or",
"[",
"]",
"expressions",
"=",
"expressions",
"or",
"[",
"]",
"compiler",
"=",
"Query",
"(",
"model",
",",
"alias_cols",
"=",
"False",
")",
".",
"get_compiler",
"(",
"connection",
"=",
"self",
".",
"connection",
",",
")",
"tablespace_sql",
"=",
"self",
".",
"_get_index_tablespace_sql",
"(",
"model",
",",
"fields",
",",
"db_tablespace",
"=",
"db_tablespace",
")",
"columns",
"=",
"[",
"field",
".",
"column",
"for",
"field",
"in",
"fields",
"]",
"sql_create_index",
"=",
"sql",
"or",
"self",
".",
"sql_create_index",
"table",
"=",
"model",
".",
"_meta",
".",
"db_table",
"def",
"create_index_name",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"nonlocal",
"name",
"if",
"name",
"is",
"None",
":",
"name",
"=",
"self",
".",
"_create_index_name",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"self",
".",
"quote_name",
"(",
"name",
")",
"return",
"Statement",
"(",
"sql_create_index",
",",
"table",
"=",
"Table",
"(",
"table",
",",
"self",
".",
"quote_name",
")",
",",
"name",
"=",
"IndexName",
"(",
"table",
",",
"columns",
",",
"suffix",
",",
"create_index_name",
")",
",",
"using",
"=",
"using",
",",
"columns",
"=",
"(",
"self",
".",
"_index_columns",
"(",
"table",
",",
"columns",
",",
"col_suffixes",
",",
"opclasses",
")",
"if",
"columns",
"else",
"Expressions",
"(",
"table",
",",
"expressions",
",",
"compiler",
",",
"self",
".",
"quote_value",
")",
")",
",",
"extra",
"=",
"tablespace_sql",
",",
"condition",
"=",
"self",
".",
"_index_condition_sql",
"(",
"condition",
")",
",",
"include",
"=",
"self",
".",
"_index_include_sql",
"(",
"model",
",",
"include",
")",
",",
")"
] | [
1028,
4
] | [
1065,
9
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseSchemaEditor._model_indexes_sql | (self, model) |
Return a list of all index SQL statements (field indexes,
index_together, Meta.indexes) for the specified model.
|
Return a list of all index SQL statements (field indexes,
index_together, Meta.indexes) for the specified model.
| def _model_indexes_sql(self, model):
"""
Return a list of all index SQL statements (field indexes,
index_together, Meta.indexes) for the specified model.
"""
if not model._meta.managed or model._meta.proxy or model._meta.swapped:
return []
output = []
for field in model._meta.local_fields:
output.extend(self._field_indexes_sql(model, field))
for field_names in model._meta.index_together:
fields = [model._meta.get_field(field) for field in field_names]
output.append(self._create_index_sql(model, fields=fields, suffix='_idx'))
for index in model._meta.indexes:
if (
not index.contains_expressions or
self.connection.features.supports_expression_indexes
):
output.append(index.create_sql(model, self))
return output | [
"def",
"_model_indexes_sql",
"(",
"self",
",",
"model",
")",
":",
"if",
"not",
"model",
".",
"_meta",
".",
"managed",
"or",
"model",
".",
"_meta",
".",
"proxy",
"or",
"model",
".",
"_meta",
".",
"swapped",
":",
"return",
"[",
"]",
"output",
"=",
"[",
"]",
"for",
"field",
"in",
"model",
".",
"_meta",
".",
"local_fields",
":",
"output",
".",
"extend",
"(",
"self",
".",
"_field_indexes_sql",
"(",
"model",
",",
"field",
")",
")",
"for",
"field_names",
"in",
"model",
".",
"_meta",
".",
"index_together",
":",
"fields",
"=",
"[",
"model",
".",
"_meta",
".",
"get_field",
"(",
"field",
")",
"for",
"field",
"in",
"field_names",
"]",
"output",
".",
"append",
"(",
"self",
".",
"_create_index_sql",
"(",
"model",
",",
"fields",
"=",
"fields",
",",
"suffix",
"=",
"'_idx'",
")",
")",
"for",
"index",
"in",
"model",
".",
"_meta",
".",
"indexes",
":",
"if",
"(",
"not",
"index",
".",
"contains_expressions",
"or",
"self",
".",
"connection",
".",
"features",
".",
"supports_expression_indexes",
")",
":",
"output",
".",
"append",
"(",
"index",
".",
"create_sql",
"(",
"model",
",",
"self",
")",
")",
"return",
"output"
] | [
1077,
4
] | [
1098,
21
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseSchemaEditor._field_indexes_sql | (self, model, field) |
Return a list of all index SQL statements for the specified field.
|
Return a list of all index SQL statements for the specified field.
| def _field_indexes_sql(self, model, field):
"""
Return a list of all index SQL statements for the specified field.
"""
output = []
if self._field_should_be_indexed(model, field):
output.append(self._create_index_sql(model, fields=[field]))
return output | [
"def",
"_field_indexes_sql",
"(",
"self",
",",
"model",
",",
"field",
")",
":",
"output",
"=",
"[",
"]",
"if",
"self",
".",
"_field_should_be_indexed",
"(",
"model",
",",
"field",
")",
":",
"output",
".",
"append",
"(",
"self",
".",
"_create_index_sql",
"(",
"model",
",",
"fields",
"=",
"[",
"field",
"]",
")",
")",
"return",
"output"
] | [
1100,
4
] | [
1107,
21
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseSchemaEditor._constraint_names | (self, model, column_names=None, unique=None,
primary_key=None, index=None, foreign_key=None,
check=None, type_=None, exclude=None) | Return all constraint names matching the columns and conditions. | Return all constraint names matching the columns and conditions. | def _constraint_names(self, model, column_names=None, unique=None,
primary_key=None, index=None, foreign_key=None,
check=None, type_=None, exclude=None):
"""Return all constraint names matching the columns and conditions."""
if column_names is not None:
column_names = [
self.connection.introspection.identifier_converter(name)
for name in column_names
]
with self.connection.cursor() as cursor:
constraints = self.connection.introspection.get_constraints(cursor, model._meta.db_table)
result = []
for name, infodict in constraints.items():
if column_names is None or column_names == infodict['columns']:
if unique is not None and infodict['unique'] != unique:
continue
if primary_key is not None and infodict['primary_key'] != primary_key:
continue
if index is not None and infodict['index'] != index:
continue
if check is not None and infodict['check'] != check:
continue
if foreign_key is not None and not infodict['foreign_key']:
continue
if type_ is not None and infodict['type'] != type_:
continue
if not exclude or name not in exclude:
result.append(name)
return result | [
"def",
"_constraint_names",
"(",
"self",
",",
"model",
",",
"column_names",
"=",
"None",
",",
"unique",
"=",
"None",
",",
"primary_key",
"=",
"None",
",",
"index",
"=",
"None",
",",
"foreign_key",
"=",
"None",
",",
"check",
"=",
"None",
",",
"type_",
"=",
"None",
",",
"exclude",
"=",
"None",
")",
":",
"if",
"column_names",
"is",
"not",
"None",
":",
"column_names",
"=",
"[",
"self",
".",
"connection",
".",
"introspection",
".",
"identifier_converter",
"(",
"name",
")",
"for",
"name",
"in",
"column_names",
"]",
"with",
"self",
".",
"connection",
".",
"cursor",
"(",
")",
"as",
"cursor",
":",
"constraints",
"=",
"self",
".",
"connection",
".",
"introspection",
".",
"get_constraints",
"(",
"cursor",
",",
"model",
".",
"_meta",
".",
"db_table",
")",
"result",
"=",
"[",
"]",
"for",
"name",
",",
"infodict",
"in",
"constraints",
".",
"items",
"(",
")",
":",
"if",
"column_names",
"is",
"None",
"or",
"column_names",
"==",
"infodict",
"[",
"'columns'",
"]",
":",
"if",
"unique",
"is",
"not",
"None",
"and",
"infodict",
"[",
"'unique'",
"]",
"!=",
"unique",
":",
"continue",
"if",
"primary_key",
"is",
"not",
"None",
"and",
"infodict",
"[",
"'primary_key'",
"]",
"!=",
"primary_key",
":",
"continue",
"if",
"index",
"is",
"not",
"None",
"and",
"infodict",
"[",
"'index'",
"]",
"!=",
"index",
":",
"continue",
"if",
"check",
"is",
"not",
"None",
"and",
"infodict",
"[",
"'check'",
"]",
"!=",
"check",
":",
"continue",
"if",
"foreign_key",
"is",
"not",
"None",
"and",
"not",
"infodict",
"[",
"'foreign_key'",
"]",
":",
"continue",
"if",
"type_",
"is",
"not",
"None",
"and",
"infodict",
"[",
"'type'",
"]",
"!=",
"type_",
":",
"continue",
"if",
"not",
"exclude",
"or",
"name",
"not",
"in",
"exclude",
":",
"result",
".",
"append",
"(",
"name",
")",
"return",
"result"
] | [
1310,
4
] | [
1338,
21
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClientMeta.get_transport_class | (
cls, label: str = None,
) | Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
| Returns an appropriate transport class. | def get_transport_class(
cls, label: str = None,
) -> Type[DashboardsServiceTransport]:
"""Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values())) | [
"def",
"get_transport_class",
"(",
"cls",
",",
"label",
":",
"str",
"=",
"None",
",",
")",
"->",
"Type",
"[",
"DashboardsServiceTransport",
"]",
":",
"# If a specific transport is requested, return that one.",
"if",
"label",
":",
"return",
"cls",
".",
"_transport_registry",
"[",
"label",
"]",
"# No transport is requested; return the default (that is, the first one",
"# in the dictionary).",
"return",
"next",
"(",
"iter",
"(",
"cls",
".",
"_transport_registry",
".",
"values",
"(",
")",
")",
")"
] | [
59,
4
] | [
77,
59
] | python | en | ['en', 'lb', 'en'] | True |
DashboardsServiceClient._get_default_mtls_endpoint | (api_endpoint) | Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
| Converts api endpoint to mTLS endpoint. | def _get_default_mtls_endpoint(api_endpoint):
"""Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") | [
"def",
"_get_default_mtls_endpoint",
"(",
"api_endpoint",
")",
":",
"if",
"not",
"api_endpoint",
":",
"return",
"api_endpoint",
"mtls_endpoint_re",
"=",
"re",
".",
"compile",
"(",
"r\"(?P<name>[^.]+)(?P<mtls>\\.mtls)?(?P<sandbox>\\.sandbox)?(?P<googledomain>\\.googleapis\\.com)?\"",
")",
"m",
"=",
"mtls_endpoint_re",
".",
"match",
"(",
"api_endpoint",
")",
"name",
",",
"mtls",
",",
"sandbox",
",",
"googledomain",
"=",
"m",
".",
"groups",
"(",
")",
"if",
"mtls",
"or",
"not",
"googledomain",
":",
"return",
"api_endpoint",
"if",
"sandbox",
":",
"return",
"api_endpoint",
".",
"replace",
"(",
"\"sandbox.googleapis.com\"",
",",
"\"mtls.sandbox.googleapis.com\"",
")",
"return",
"api_endpoint",
".",
"replace",
"(",
"\".googleapis.com\"",
",",
"\".mtls.googleapis.com\"",
")"
] | [
86,
4
] | [
113,
78
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.from_service_account_info | (cls, info: dict, *args, **kwargs) | Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
DashboardsServiceClient: The constructed client.
| Creates an instance of this client using the provided credentials
info. | def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
DashboardsServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(info)
kwargs["credentials"] = credentials
return cls(*args, **kwargs) | [
"def",
"from_service_account_info",
"(",
"cls",
",",
"info",
":",
"dict",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"credentials",
"=",
"service_account",
".",
"Credentials",
".",
"from_service_account_info",
"(",
"info",
")",
"kwargs",
"[",
"\"credentials\"",
"]",
"=",
"credentials",
"return",
"cls",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | [
121,
4
] | [
135,
35
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.from_service_account_file | (cls, filename: str, *args, **kwargs) | Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
DashboardsServiceClient: The constructed client.
| Creates an instance of this client using the provided credentials
file. | def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
DashboardsServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs) | [
"def",
"from_service_account_file",
"(",
"cls",
",",
"filename",
":",
"str",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"credentials",
"=",
"service_account",
".",
"Credentials",
".",
"from_service_account_file",
"(",
"filename",
")",
"kwargs",
"[",
"\"credentials\"",
"]",
"=",
"credentials",
"return",
"cls",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | [
138,
4
] | [
153,
35
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.transport | (self) | Returns the transport used by the client instance.
Returns:
DashboardsServiceTransport: The transport used by the client
instance.
| Returns the transport used by the client instance. | def transport(self) -> DashboardsServiceTransport:
"""Returns the transport used by the client instance.
Returns:
DashboardsServiceTransport: The transport used by the client
instance.
"""
return self._transport | [
"def",
"transport",
"(",
"self",
")",
"->",
"DashboardsServiceTransport",
":",
"return",
"self",
".",
"_transport"
] | [
158,
4
] | [
165,
30
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.alert_policy_path | (project: str, alert_policy: str,) | Returns a fully-qualified alert_policy string. | Returns a fully-qualified alert_policy string. | def alert_policy_path(project: str, alert_policy: str,) -> str:
"""Returns a fully-qualified alert_policy string."""
return "projects/{project}/alertPolicies/{alert_policy}".format(
project=project, alert_policy=alert_policy,
) | [
"def",
"alert_policy_path",
"(",
"project",
":",
"str",
",",
"alert_policy",
":",
"str",
",",
")",
"->",
"str",
":",
"return",
"\"projects/{project}/alertPolicies/{alert_policy}\"",
".",
"format",
"(",
"project",
"=",
"project",
",",
"alert_policy",
"=",
"alert_policy",
",",
")"
] | [
168,
4
] | [
172,
9
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.parse_alert_policy_path | (path: str) | Parses a alert_policy path into its component segments. | Parses a alert_policy path into its component segments. | def parse_alert_policy_path(path: str) -> Dict[str, str]:
"""Parses a alert_policy path into its component segments."""
m = re.match(
r"^projects/(?P<project>.+?)/alertPolicies/(?P<alert_policy>.+?)$", path
)
return m.groupdict() if m else {} | [
"def",
"parse_alert_policy_path",
"(",
"path",
":",
"str",
")",
"->",
"Dict",
"[",
"str",
",",
"str",
"]",
":",
"m",
"=",
"re",
".",
"match",
"(",
"r\"^projects/(?P<project>.+?)/alertPolicies/(?P<alert_policy>.+?)$\"",
",",
"path",
")",
"return",
"m",
".",
"groupdict",
"(",
")",
"if",
"m",
"else",
"{",
"}"
] | [
175,
4
] | [
180,
41
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.dashboard_path | (project: str, dashboard: str,) | Returns a fully-qualified dashboard string. | Returns a fully-qualified dashboard string. | def dashboard_path(project: str, dashboard: str,) -> str:
"""Returns a fully-qualified dashboard string."""
return "projects/{project}/dashboards/{dashboard}".format(
project=project, dashboard=dashboard,
) | [
"def",
"dashboard_path",
"(",
"project",
":",
"str",
",",
"dashboard",
":",
"str",
",",
")",
"->",
"str",
":",
"return",
"\"projects/{project}/dashboards/{dashboard}\"",
".",
"format",
"(",
"project",
"=",
"project",
",",
"dashboard",
"=",
"dashboard",
",",
")"
] | [
183,
4
] | [
187,
9
] | python | en | ['pt', 'en', 'en'] | True |
DashboardsServiceClient.parse_dashboard_path | (path: str) | Parses a dashboard path into its component segments. | Parses a dashboard path into its component segments. | def parse_dashboard_path(path: str) -> Dict[str, str]:
"""Parses a dashboard path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/dashboards/(?P<dashboard>.+?)$", path)
return m.groupdict() if m else {} | [
"def",
"parse_dashboard_path",
"(",
"path",
":",
"str",
")",
"->",
"Dict",
"[",
"str",
",",
"str",
"]",
":",
"m",
"=",
"re",
".",
"match",
"(",
"r\"^projects/(?P<project>.+?)/dashboards/(?P<dashboard>.+?)$\"",
",",
"path",
")",
"return",
"m",
".",
"groupdict",
"(",
")",
"if",
"m",
"else",
"{",
"}"
] | [
190,
4
] | [
193,
41
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.common_billing_account_path | (billing_account: str,) | Returns a fully-qualified billing_account string. | Returns a fully-qualified billing_account string. | def common_billing_account_path(billing_account: str,) -> str:
"""Returns a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
) | [
"def",
"common_billing_account_path",
"(",
"billing_account",
":",
"str",
",",
")",
"->",
"str",
":",
"return",
"\"billingAccounts/{billing_account}\"",
".",
"format",
"(",
"billing_account",
"=",
"billing_account",
",",
")"
] | [
196,
4
] | [
200,
9
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.parse_common_billing_account_path | (path: str) | Parse a billing_account path into its component segments. | Parse a billing_account path into its component segments. | def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {} | [
"def",
"parse_common_billing_account_path",
"(",
"path",
":",
"str",
")",
"->",
"Dict",
"[",
"str",
",",
"str",
"]",
":",
"m",
"=",
"re",
".",
"match",
"(",
"r\"^billingAccounts/(?P<billing_account>.+?)$\"",
",",
"path",
")",
"return",
"m",
".",
"groupdict",
"(",
")",
"if",
"m",
"else",
"{",
"}"
] | [
203,
4
] | [
206,
41
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.common_folder_path | (folder: str,) | Returns a fully-qualified folder string. | Returns a fully-qualified folder string. | def common_folder_path(folder: str,) -> str:
"""Returns a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,) | [
"def",
"common_folder_path",
"(",
"folder",
":",
"str",
",",
")",
"->",
"str",
":",
"return",
"\"folders/{folder}\"",
".",
"format",
"(",
"folder",
"=",
"folder",
",",
")"
] | [
209,
4
] | [
211,
56
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.parse_common_folder_path | (path: str) | Parse a folder path into its component segments. | Parse a folder path into its component segments. | def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {} | [
"def",
"parse_common_folder_path",
"(",
"path",
":",
"str",
")",
"->",
"Dict",
"[",
"str",
",",
"str",
"]",
":",
"m",
"=",
"re",
".",
"match",
"(",
"r\"^folders/(?P<folder>.+?)$\"",
",",
"path",
")",
"return",
"m",
".",
"groupdict",
"(",
")",
"if",
"m",
"else",
"{",
"}"
] | [
214,
4
] | [
217,
41
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.common_organization_path | (organization: str,) | Returns a fully-qualified organization string. | Returns a fully-qualified organization string. | def common_organization_path(organization: str,) -> str:
"""Returns a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,) | [
"def",
"common_organization_path",
"(",
"organization",
":",
"str",
",",
")",
"->",
"str",
":",
"return",
"\"organizations/{organization}\"",
".",
"format",
"(",
"organization",
"=",
"organization",
",",
")"
] | [
220,
4
] | [
222,
80
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.parse_common_organization_path | (path: str) | Parse a organization path into its component segments. | Parse a organization path into its component segments. | def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {} | [
"def",
"parse_common_organization_path",
"(",
"path",
":",
"str",
")",
"->",
"Dict",
"[",
"str",
",",
"str",
"]",
":",
"m",
"=",
"re",
".",
"match",
"(",
"r\"^organizations/(?P<organization>.+?)$\"",
",",
"path",
")",
"return",
"m",
".",
"groupdict",
"(",
")",
"if",
"m",
"else",
"{",
"}"
] | [
225,
4
] | [
228,
41
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.common_project_path | (project: str,) | Returns a fully-qualified project string. | Returns a fully-qualified project string. | def common_project_path(project: str,) -> str:
"""Returns a fully-qualified project string."""
return "projects/{project}".format(project=project,) | [
"def",
"common_project_path",
"(",
"project",
":",
"str",
",",
")",
"->",
"str",
":",
"return",
"\"projects/{project}\"",
".",
"format",
"(",
"project",
"=",
"project",
",",
")"
] | [
231,
4
] | [
233,
60
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.parse_common_project_path | (path: str) | Parse a project path into its component segments. | Parse a project path into its component segments. | def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {} | [
"def",
"parse_common_project_path",
"(",
"path",
":",
"str",
")",
"->",
"Dict",
"[",
"str",
",",
"str",
"]",
":",
"m",
"=",
"re",
".",
"match",
"(",
"r\"^projects/(?P<project>.+?)$\"",
",",
"path",
")",
"return",
"m",
".",
"groupdict",
"(",
")",
"if",
"m",
"else",
"{",
"}"
] | [
236,
4
] | [
239,
41
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.common_location_path | (project: str, location: str,) | Returns a fully-qualified location string. | Returns a fully-qualified location string. | def common_location_path(project: str, location: str,) -> str:
"""Returns a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project, location=location,
) | [
"def",
"common_location_path",
"(",
"project",
":",
"str",
",",
"location",
":",
"str",
",",
")",
"->",
"str",
":",
"return",
"\"projects/{project}/locations/{location}\"",
".",
"format",
"(",
"project",
"=",
"project",
",",
"location",
"=",
"location",
",",
")"
] | [
242,
4
] | [
246,
9
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.parse_common_location_path | (path: str) | Parse a location path into its component segments. | Parse a location path into its component segments. | def parse_common_location_path(path: str) -> Dict[str, str]:
"""Parse a location path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
return m.groupdict() if m else {} | [
"def",
"parse_common_location_path",
"(",
"path",
":",
"str",
")",
"->",
"Dict",
"[",
"str",
",",
"str",
"]",
":",
"m",
"=",
"re",
".",
"match",
"(",
"r\"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$\"",
",",
"path",
")",
"return",
"m",
".",
"groupdict",
"(",
")",
"if",
"m",
"else",
"{",
"}"
] | [
249,
4
] | [
252,
41
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.__init__ | (
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, DashboardsServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) | Instantiates the dashboards service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, DashboardsServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
| Instantiates the dashboards service client. | def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, DashboardsServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the dashboards service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, DashboardsServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
# Create SSL credentials for mutual TLS if needed.
if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in (
"true",
"false",
):
raise ValueError(
"Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
)
use_client_cert = (
os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true"
)
client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
is_mtls = True
client_cert_source_func = client_options.client_cert_source
else:
is_mtls = mtls.has_default_client_cert_source()
if is_mtls:
client_cert_source_func = mtls.default_client_cert_source()
else:
client_cert_source_func = None
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
else:
use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
if is_mtls:
api_endpoint = self.DEFAULT_MTLS_ENDPOINT
else:
api_endpoint = self.DEFAULT_ENDPOINT
else:
raise MutualTLSChannelError(
"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
"values: never, auto, always"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, DashboardsServiceTransport):
# transport is a DashboardsServiceTransport instance.
if credentials or client_options.credentials_file:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
if client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
self._transport = transport
else:
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
always_use_jwt_access=True,
) | [
"def",
"__init__",
"(",
"self",
",",
"*",
",",
"credentials",
":",
"Optional",
"[",
"ga_credentials",
".",
"Credentials",
"]",
"=",
"None",
",",
"transport",
":",
"Union",
"[",
"str",
",",
"DashboardsServiceTransport",
",",
"None",
"]",
"=",
"None",
",",
"client_options",
":",
"Optional",
"[",
"client_options_lib",
".",
"ClientOptions",
"]",
"=",
"None",
",",
"client_info",
":",
"gapic_v1",
".",
"client_info",
".",
"ClientInfo",
"=",
"DEFAULT_CLIENT_INFO",
",",
")",
"->",
"None",
":",
"if",
"isinstance",
"(",
"client_options",
",",
"dict",
")",
":",
"client_options",
"=",
"client_options_lib",
".",
"from_dict",
"(",
"client_options",
")",
"if",
"client_options",
"is",
"None",
":",
"client_options",
"=",
"client_options_lib",
".",
"ClientOptions",
"(",
")",
"# Create SSL credentials for mutual TLS if needed.",
"if",
"os",
".",
"getenv",
"(",
"\"GOOGLE_API_USE_CLIENT_CERTIFICATE\"",
",",
"\"false\"",
")",
"not",
"in",
"(",
"\"true\"",
",",
"\"false\"",
",",
")",
":",
"raise",
"ValueError",
"(",
"\"Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`\"",
")",
"use_client_cert",
"=",
"(",
"os",
".",
"getenv",
"(",
"\"GOOGLE_API_USE_CLIENT_CERTIFICATE\"",
",",
"\"false\"",
")",
"==",
"\"true\"",
")",
"client_cert_source_func",
"=",
"None",
"is_mtls",
"=",
"False",
"if",
"use_client_cert",
":",
"if",
"client_options",
".",
"client_cert_source",
":",
"is_mtls",
"=",
"True",
"client_cert_source_func",
"=",
"client_options",
".",
"client_cert_source",
"else",
":",
"is_mtls",
"=",
"mtls",
".",
"has_default_client_cert_source",
"(",
")",
"if",
"is_mtls",
":",
"client_cert_source_func",
"=",
"mtls",
".",
"default_client_cert_source",
"(",
")",
"else",
":",
"client_cert_source_func",
"=",
"None",
"# Figure out which api endpoint to use.",
"if",
"client_options",
".",
"api_endpoint",
"is",
"not",
"None",
":",
"api_endpoint",
"=",
"client_options",
".",
"api_endpoint",
"else",
":",
"use_mtls_env",
"=",
"os",
".",
"getenv",
"(",
"\"GOOGLE_API_USE_MTLS_ENDPOINT\"",
",",
"\"auto\"",
")",
"if",
"use_mtls_env",
"==",
"\"never\"",
":",
"api_endpoint",
"=",
"self",
".",
"DEFAULT_ENDPOINT",
"elif",
"use_mtls_env",
"==",
"\"always\"",
":",
"api_endpoint",
"=",
"self",
".",
"DEFAULT_MTLS_ENDPOINT",
"elif",
"use_mtls_env",
"==",
"\"auto\"",
":",
"if",
"is_mtls",
":",
"api_endpoint",
"=",
"self",
".",
"DEFAULT_MTLS_ENDPOINT",
"else",
":",
"api_endpoint",
"=",
"self",
".",
"DEFAULT_ENDPOINT",
"else",
":",
"raise",
"MutualTLSChannelError",
"(",
"\"Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted \"",
"\"values: never, auto, always\"",
")",
"# Save or instantiate the transport.",
"# Ordinarily, we provide the transport, but allowing a custom transport",
"# instance provides an extensibility point for unusual situations.",
"if",
"isinstance",
"(",
"transport",
",",
"DashboardsServiceTransport",
")",
":",
"# transport is a DashboardsServiceTransport instance.",
"if",
"credentials",
"or",
"client_options",
".",
"credentials_file",
":",
"raise",
"ValueError",
"(",
"\"When providing a transport instance, \"",
"\"provide its credentials directly.\"",
")",
"if",
"client_options",
".",
"scopes",
":",
"raise",
"ValueError",
"(",
"\"When providing a transport instance, provide its scopes \"",
"\"directly.\"",
")",
"self",
".",
"_transport",
"=",
"transport",
"else",
":",
"Transport",
"=",
"type",
"(",
"self",
")",
".",
"get_transport_class",
"(",
"transport",
")",
"self",
".",
"_transport",
"=",
"Transport",
"(",
"credentials",
"=",
"credentials",
",",
"credentials_file",
"=",
"client_options",
".",
"credentials_file",
",",
"host",
"=",
"api_endpoint",
",",
"scopes",
"=",
"client_options",
".",
"scopes",
",",
"client_cert_source_for_mtls",
"=",
"client_cert_source_func",
",",
"quota_project_id",
"=",
"client_options",
".",
"quota_project_id",
",",
"client_info",
"=",
"client_info",
",",
"always_use_jwt_access",
"=",
"True",
",",
")"
] | [
254,
4
] | [
376,
13
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.create_dashboard | (
self,
request: Union[dashboards_service.CreateDashboardRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) | r"""Creates a new custom dashboard. For examples on how you can use
this API to create dashboards, see `Managing dashboards by
API <https://cloud.google.com/monitoring/dashboards/api-dashboard>`__.
This method requires the ``monitoring.dashboards.create``
permission on the specified project. For more information about
permissions, see `Cloud Identity and Access
Management <https://cloud.google.com/iam>`__.
Args:
request (Union[google.cloud.monitoring_dashboard_v1.types.CreateDashboardRequest, dict]):
The request object. The `CreateDashboard` request.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.monitoring_dashboard_v1.types.Dashboard:
A Google Stackdriver dashboard.
Dashboards define the content and layout
of pages in the Stackdriver web
application.
| r"""Creates a new custom dashboard. For examples on how you can use
this API to create dashboards, see `Managing dashboards by
API <https://cloud.google.com/monitoring/dashboards/api-dashboard>`__.
This method requires the ``monitoring.dashboards.create``
permission on the specified project. For more information about
permissions, see `Cloud Identity and Access
Management <https://cloud.google.com/iam>`__. | def create_dashboard(
self,
request: Union[dashboards_service.CreateDashboardRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> dashboard.Dashboard:
r"""Creates a new custom dashboard. For examples on how you can use
this API to create dashboards, see `Managing dashboards by
API <https://cloud.google.com/monitoring/dashboards/api-dashboard>`__.
This method requires the ``monitoring.dashboards.create``
permission on the specified project. For more information about
permissions, see `Cloud Identity and Access
Management <https://cloud.google.com/iam>`__.
Args:
request (Union[google.cloud.monitoring_dashboard_v1.types.CreateDashboardRequest, dict]):
The request object. The `CreateDashboard` request.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.monitoring_dashboard_v1.types.Dashboard:
A Google Stackdriver dashboard.
Dashboards define the content and layout
of pages in the Stackdriver web
application.
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a dashboards_service.CreateDashboardRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, dashboards_service.CreateDashboardRequest):
request = dashboards_service.CreateDashboardRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.create_dashboard]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response | [
"def",
"create_dashboard",
"(",
"self",
",",
"request",
":",
"Union",
"[",
"dashboards_service",
".",
"CreateDashboardRequest",
",",
"dict",
"]",
"=",
"None",
",",
"*",
",",
"retry",
":",
"OptionalRetry",
"=",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"timeout",
":",
"float",
"=",
"None",
",",
"metadata",
":",
"Sequence",
"[",
"Tuple",
"[",
"str",
",",
"str",
"]",
"]",
"=",
"(",
")",
",",
")",
"->",
"dashboard",
".",
"Dashboard",
":",
"# Create or coerce a protobuf request object.",
"# Minor optimization to avoid making a copy if the user passes",
"# in a dashboards_service.CreateDashboardRequest.",
"# There's no risk of modifying the input as we've already verified",
"# there are no flattened fields.",
"if",
"not",
"isinstance",
"(",
"request",
",",
"dashboards_service",
".",
"CreateDashboardRequest",
")",
":",
"request",
"=",
"dashboards_service",
".",
"CreateDashboardRequest",
"(",
"request",
")",
"# Wrap the RPC method; this adds retry and timeout information,",
"# and friendly error handling.",
"rpc",
"=",
"self",
".",
"_transport",
".",
"_wrapped_methods",
"[",
"self",
".",
"_transport",
".",
"create_dashboard",
"]",
"# Certain fields should be provided within the metadata header;",
"# add these here.",
"metadata",
"=",
"tuple",
"(",
"metadata",
")",
"+",
"(",
"gapic_v1",
".",
"routing_header",
".",
"to_grpc_metadata",
"(",
"(",
"(",
"\"parent\"",
",",
"request",
".",
"parent",
")",
",",
")",
")",
",",
")",
"# Send the request.",
"response",
"=",
"rpc",
"(",
"request",
",",
"retry",
"=",
"retry",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
",",
")",
"# Done; return the response.",
"return",
"response"
] | [
378,
4
] | [
433,
23
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.list_dashboards | (
self,
request: Union[dashboards_service.ListDashboardsRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) | r"""Lists the existing dashboards.
This method requires the ``monitoring.dashboards.list``
permission on the specified project. For more information, see
`Cloud Identity and Access
Management <https://cloud.google.com/iam>`__.
Args:
request (Union[google.cloud.monitoring_dashboard_v1.types.ListDashboardsRequest, dict]):
The request object. The `ListDashboards` request.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.monitoring_dashboard_v1.services.dashboards_service.pagers.ListDashboardsPager:
The ListDashboards request.
Iterating over this object will yield results and
resolve additional pages automatically.
| r"""Lists the existing dashboards. | def list_dashboards(
self,
request: Union[dashboards_service.ListDashboardsRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.ListDashboardsPager:
r"""Lists the existing dashboards.
This method requires the ``monitoring.dashboards.list``
permission on the specified project. For more information, see
`Cloud Identity and Access
Management <https://cloud.google.com/iam>`__.
Args:
request (Union[google.cloud.monitoring_dashboard_v1.types.ListDashboardsRequest, dict]):
The request object. The `ListDashboards` request.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.monitoring_dashboard_v1.services.dashboards_service.pagers.ListDashboardsPager:
The ListDashboards request.
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a dashboards_service.ListDashboardsRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, dashboards_service.ListDashboardsRequest):
request = dashboards_service.ListDashboardsRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.list_dashboards]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__iter__` convenience method.
response = pagers.ListDashboardsPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response | [
"def",
"list_dashboards",
"(",
"self",
",",
"request",
":",
"Union",
"[",
"dashboards_service",
".",
"ListDashboardsRequest",
",",
"dict",
"]",
"=",
"None",
",",
"*",
",",
"retry",
":",
"OptionalRetry",
"=",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"timeout",
":",
"float",
"=",
"None",
",",
"metadata",
":",
"Sequence",
"[",
"Tuple",
"[",
"str",
",",
"str",
"]",
"]",
"=",
"(",
")",
",",
")",
"->",
"pagers",
".",
"ListDashboardsPager",
":",
"# Create or coerce a protobuf request object.",
"# Minor optimization to avoid making a copy if the user passes",
"# in a dashboards_service.ListDashboardsRequest.",
"# There's no risk of modifying the input as we've already verified",
"# there are no flattened fields.",
"if",
"not",
"isinstance",
"(",
"request",
",",
"dashboards_service",
".",
"ListDashboardsRequest",
")",
":",
"request",
"=",
"dashboards_service",
".",
"ListDashboardsRequest",
"(",
"request",
")",
"# Wrap the RPC method; this adds retry and timeout information,",
"# and friendly error handling.",
"rpc",
"=",
"self",
".",
"_transport",
".",
"_wrapped_methods",
"[",
"self",
".",
"_transport",
".",
"list_dashboards",
"]",
"# Certain fields should be provided within the metadata header;",
"# add these here.",
"metadata",
"=",
"tuple",
"(",
"metadata",
")",
"+",
"(",
"gapic_v1",
".",
"routing_header",
".",
"to_grpc_metadata",
"(",
"(",
"(",
"\"parent\"",
",",
"request",
".",
"parent",
")",
",",
")",
")",
",",
")",
"# Send the request.",
"response",
"=",
"rpc",
"(",
"request",
",",
"retry",
"=",
"retry",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
",",
")",
"# This method is paged; wrap the response in a pager, which provides",
"# an `__iter__` convenience method.",
"response",
"=",
"pagers",
".",
"ListDashboardsPager",
"(",
"method",
"=",
"rpc",
",",
"request",
"=",
"request",
",",
"response",
"=",
"response",
",",
"metadata",
"=",
"metadata",
",",
")",
"# Done; return the response.",
"return",
"response"
] | [
435,
4
] | [
495,
23
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.get_dashboard | (
self,
request: Union[dashboards_service.GetDashboardRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) | r"""Fetches a specific dashboard.
This method requires the ``monitoring.dashboards.get``
permission on the specified dashboard. For more information, see
`Cloud Identity and Access
Management <https://cloud.google.com/iam>`__.
Args:
request (Union[google.cloud.monitoring_dashboard_v1.types.GetDashboardRequest, dict]):
The request object. The `GetDashboard` request.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.monitoring_dashboard_v1.types.Dashboard:
A Google Stackdriver dashboard.
Dashboards define the content and layout
of pages in the Stackdriver web
application.
| r"""Fetches a specific dashboard. | def get_dashboard(
self,
request: Union[dashboards_service.GetDashboardRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> dashboard.Dashboard:
r"""Fetches a specific dashboard.
This method requires the ``monitoring.dashboards.get``
permission on the specified dashboard. For more information, see
`Cloud Identity and Access
Management <https://cloud.google.com/iam>`__.
Args:
request (Union[google.cloud.monitoring_dashboard_v1.types.GetDashboardRequest, dict]):
The request object. The `GetDashboard` request.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.monitoring_dashboard_v1.types.Dashboard:
A Google Stackdriver dashboard.
Dashboards define the content and layout
of pages in the Stackdriver web
application.
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a dashboards_service.GetDashboardRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, dashboards_service.GetDashboardRequest):
request = dashboards_service.GetDashboardRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.get_dashboard]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response | [
"def",
"get_dashboard",
"(",
"self",
",",
"request",
":",
"Union",
"[",
"dashboards_service",
".",
"GetDashboardRequest",
",",
"dict",
"]",
"=",
"None",
",",
"*",
",",
"retry",
":",
"OptionalRetry",
"=",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"timeout",
":",
"float",
"=",
"None",
",",
"metadata",
":",
"Sequence",
"[",
"Tuple",
"[",
"str",
",",
"str",
"]",
"]",
"=",
"(",
")",
",",
")",
"->",
"dashboard",
".",
"Dashboard",
":",
"# Create or coerce a protobuf request object.",
"# Minor optimization to avoid making a copy if the user passes",
"# in a dashboards_service.GetDashboardRequest.",
"# There's no risk of modifying the input as we've already verified",
"# there are no flattened fields.",
"if",
"not",
"isinstance",
"(",
"request",
",",
"dashboards_service",
".",
"GetDashboardRequest",
")",
":",
"request",
"=",
"dashboards_service",
".",
"GetDashboardRequest",
"(",
"request",
")",
"# Wrap the RPC method; this adds retry and timeout information,",
"# and friendly error handling.",
"rpc",
"=",
"self",
".",
"_transport",
".",
"_wrapped_methods",
"[",
"self",
".",
"_transport",
".",
"get_dashboard",
"]",
"# Certain fields should be provided within the metadata header;",
"# add these here.",
"metadata",
"=",
"tuple",
"(",
"metadata",
")",
"+",
"(",
"gapic_v1",
".",
"routing_header",
".",
"to_grpc_metadata",
"(",
"(",
"(",
"\"name\"",
",",
"request",
".",
"name",
")",
",",
")",
")",
",",
")",
"# Send the request.",
"response",
"=",
"rpc",
"(",
"request",
",",
"retry",
"=",
"retry",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
",",
")",
"# Done; return the response.",
"return",
"response"
] | [
497,
4
] | [
551,
23
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.delete_dashboard | (
self,
request: Union[dashboards_service.DeleteDashboardRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) | r"""Deletes an existing custom dashboard.
This method requires the ``monitoring.dashboards.delete``
permission on the specified dashboard. For more information, see
`Cloud Identity and Access
Management <https://cloud.google.com/iam>`__.
Args:
request (Union[google.cloud.monitoring_dashboard_v1.types.DeleteDashboardRequest, dict]):
The request object. The `DeleteDashboard` request.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
| r"""Deletes an existing custom dashboard. | def delete_dashboard(
self,
request: Union[dashboards_service.DeleteDashboardRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> None:
r"""Deletes an existing custom dashboard.
This method requires the ``monitoring.dashboards.delete``
permission on the specified dashboard. For more information, see
`Cloud Identity and Access
Management <https://cloud.google.com/iam>`__.
Args:
request (Union[google.cloud.monitoring_dashboard_v1.types.DeleteDashboardRequest, dict]):
The request object. The `DeleteDashboard` request.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a dashboards_service.DeleteDashboardRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, dashboards_service.DeleteDashboardRequest):
request = dashboards_service.DeleteDashboardRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.delete_dashboard]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
)
# Send the request.
rpc(
request, retry=retry, timeout=timeout, metadata=metadata,
) | [
"def",
"delete_dashboard",
"(",
"self",
",",
"request",
":",
"Union",
"[",
"dashboards_service",
".",
"DeleteDashboardRequest",
",",
"dict",
"]",
"=",
"None",
",",
"*",
",",
"retry",
":",
"OptionalRetry",
"=",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"timeout",
":",
"float",
"=",
"None",
",",
"metadata",
":",
"Sequence",
"[",
"Tuple",
"[",
"str",
",",
"str",
"]",
"]",
"=",
"(",
")",
",",
")",
"->",
"None",
":",
"# Create or coerce a protobuf request object.",
"# Minor optimization to avoid making a copy if the user passes",
"# in a dashboards_service.DeleteDashboardRequest.",
"# There's no risk of modifying the input as we've already verified",
"# there are no flattened fields.",
"if",
"not",
"isinstance",
"(",
"request",
",",
"dashboards_service",
".",
"DeleteDashboardRequest",
")",
":",
"request",
"=",
"dashboards_service",
".",
"DeleteDashboardRequest",
"(",
"request",
")",
"# Wrap the RPC method; this adds retry and timeout information,",
"# and friendly error handling.",
"rpc",
"=",
"self",
".",
"_transport",
".",
"_wrapped_methods",
"[",
"self",
".",
"_transport",
".",
"delete_dashboard",
"]",
"# Certain fields should be provided within the metadata header;",
"# add these here.",
"metadata",
"=",
"tuple",
"(",
"metadata",
")",
"+",
"(",
"gapic_v1",
".",
"routing_header",
".",
"to_grpc_metadata",
"(",
"(",
"(",
"\"name\"",
",",
"request",
".",
"name",
")",
",",
")",
")",
",",
")",
"# Send the request.",
"rpc",
"(",
"request",
",",
"retry",
"=",
"retry",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
",",
")"
] | [
553,
4
] | [
598,
9
] | python | en | ['en', 'cy', 'en'] | True |
DashboardsServiceClient.update_dashboard | (
self,
request: Union[dashboards_service.UpdateDashboardRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) | r"""Replaces an existing custom dashboard with a new definition.
This method requires the ``monitoring.dashboards.update``
permission on the specified dashboard. For more information, see
`Cloud Identity and Access
Management <https://cloud.google.com/iam>`__.
Args:
request (Union[google.cloud.monitoring_dashboard_v1.types.UpdateDashboardRequest, dict]):
The request object. The `UpdateDashboard` request.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.monitoring_dashboard_v1.types.Dashboard:
A Google Stackdriver dashboard.
Dashboards define the content and layout
of pages in the Stackdriver web
application.
| r"""Replaces an existing custom dashboard with a new definition. | def update_dashboard(
self,
request: Union[dashboards_service.UpdateDashboardRequest, dict] = None,
*,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> dashboard.Dashboard:
r"""Replaces an existing custom dashboard with a new definition.
This method requires the ``monitoring.dashboards.update``
permission on the specified dashboard. For more information, see
`Cloud Identity and Access
Management <https://cloud.google.com/iam>`__.
Args:
request (Union[google.cloud.monitoring_dashboard_v1.types.UpdateDashboardRequest, dict]):
The request object. The `UpdateDashboard` request.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.monitoring_dashboard_v1.types.Dashboard:
A Google Stackdriver dashboard.
Dashboards define the content and layout
of pages in the Stackdriver web
application.
"""
# Create or coerce a protobuf request object.
# Minor optimization to avoid making a copy if the user passes
# in a dashboards_service.UpdateDashboardRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, dashboards_service.UpdateDashboardRequest):
request = dashboards_service.UpdateDashboardRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.update_dashboard]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("dashboard.name", request.dashboard.name),)
),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response | [
"def",
"update_dashboard",
"(",
"self",
",",
"request",
":",
"Union",
"[",
"dashboards_service",
".",
"UpdateDashboardRequest",
",",
"dict",
"]",
"=",
"None",
",",
"*",
",",
"retry",
":",
"OptionalRetry",
"=",
"gapic_v1",
".",
"method",
".",
"DEFAULT",
",",
"timeout",
":",
"float",
"=",
"None",
",",
"metadata",
":",
"Sequence",
"[",
"Tuple",
"[",
"str",
",",
"str",
"]",
"]",
"=",
"(",
")",
",",
")",
"->",
"dashboard",
".",
"Dashboard",
":",
"# Create or coerce a protobuf request object.",
"# Minor optimization to avoid making a copy if the user passes",
"# in a dashboards_service.UpdateDashboardRequest.",
"# There's no risk of modifying the input as we've already verified",
"# there are no flattened fields.",
"if",
"not",
"isinstance",
"(",
"request",
",",
"dashboards_service",
".",
"UpdateDashboardRequest",
")",
":",
"request",
"=",
"dashboards_service",
".",
"UpdateDashboardRequest",
"(",
"request",
")",
"# Wrap the RPC method; this adds retry and timeout information,",
"# and friendly error handling.",
"rpc",
"=",
"self",
".",
"_transport",
".",
"_wrapped_methods",
"[",
"self",
".",
"_transport",
".",
"update_dashboard",
"]",
"# Certain fields should be provided within the metadata header;",
"# add these here.",
"metadata",
"=",
"tuple",
"(",
"metadata",
")",
"+",
"(",
"gapic_v1",
".",
"routing_header",
".",
"to_grpc_metadata",
"(",
"(",
"(",
"\"dashboard.name\"",
",",
"request",
".",
"dashboard",
".",
"name",
")",
",",
")",
")",
",",
")",
"# Send the request.",
"response",
"=",
"rpc",
"(",
"request",
",",
"retry",
"=",
"retry",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
",",
")",
"# Done; return the response.",
"return",
"response"
] | [
600,
4
] | [
656,
23
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceClient.__exit__ | (self, type, value, traceback) | Releases underlying transport's resources.
.. warning::
ONLY use as a context manager if the transport is NOT shared
with other clients! Exiting the with block will CLOSE the transport
and may cause errors in other clients!
| Releases underlying transport's resources. | def __exit__(self, type, value, traceback):
"""Releases underlying transport's resources.
.. warning::
ONLY use as a context manager if the transport is NOT shared
with other clients! Exiting the with block will CLOSE the transport
and may cause errors in other clients!
"""
self.transport.close() | [
"def",
"__exit__",
"(",
"self",
",",
"type",
",",
"value",
",",
"traceback",
")",
":",
"self",
".",
"transport",
".",
"close",
"(",
")"
] | [
661,
4
] | [
669,
30
] | python | en | ['en', 'en', 'en'] | True |
responder | (f) | Marks a function as responder. Decorate a function with it and it
will automatically call the return value as WSGI application.
Example::
@responder
def application(environ, start_response):
return Response('Hello World!')
| Marks a function as responder. Decorate a function with it and it
will automatically call the return value as WSGI application. | def responder(f):
"""Marks a function as responder. Decorate a function with it and it
will automatically call the return value as WSGI application.
Example::
@responder
def application(environ, start_response):
return Response('Hello World!')
"""
return update_wrapper(lambda *a: f(*a)(*a[-2:]), f) | [
"def",
"responder",
"(",
"f",
")",
":",
"return",
"update_wrapper",
"(",
"lambda",
"*",
"a",
":",
"f",
"(",
"*",
"a",
")",
"(",
"*",
"a",
"[",
"-",
"2",
":",
"]",
")",
",",
"f",
")"
] | [
32,
0
] | [
42,
55
] | python | en | ['en', 'en', 'en'] | True |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.