Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
SSLTransport._ssl_io_loop | (self, func, *args) | Performs an I/O loop between incoming/outgoing and the socket. | Performs an I/O loop between incoming/outgoing and the socket. | def _ssl_io_loop(self, func, *args):
"""Performs an I/O loop between incoming/outgoing and the socket."""
should_loop = True
ret = None
while should_loop:
errno = None
try:
ret = func(*args)
except ssl.SSLError as e:
if e.errno not in (ssl.SSL_ERROR_WANT_READ, ssl.SSL_ERROR_WANT_WRITE):
# WANT_READ, and WANT_WRITE are expected, others are not.
raise e
errno = e.errno
buf = self.outgoing.read()
self.socket.sendall(buf)
if errno is None:
should_loop = False
elif errno == ssl.SSL_ERROR_WANT_READ:
buf = self.socket.recv(SSL_BLOCKSIZE)
if buf:
self.incoming.write(buf)
else:
self.incoming.write_eof()
return ret | [
"def",
"_ssl_io_loop",
"(",
"self",
",",
"func",
",",
"*",
"args",
")",
":",
"should_loop",
"=",
"True",
"ret",
"=",
"None",
"while",
"should_loop",
":",
"errno",
"=",
"None",
"try",
":",
"ret",
"=",
"func",
"(",
"*",
"args",
")",
"except",
"ssl",
".",
"SSLError",
"as",
"e",
":",
"if",
"e",
".",
"errno",
"not",
"in",
"(",
"ssl",
".",
"SSL_ERROR_WANT_READ",
",",
"ssl",
".",
"SSL_ERROR_WANT_WRITE",
")",
":",
"# WANT_READ, and WANT_WRITE are expected, others are not.",
"raise",
"e",
"errno",
"=",
"e",
".",
"errno",
"buf",
"=",
"self",
".",
"outgoing",
".",
"read",
"(",
")",
"self",
".",
"socket",
".",
"sendall",
"(",
"buf",
")",
"if",
"errno",
"is",
"None",
":",
"should_loop",
"=",
"False",
"elif",
"errno",
"==",
"ssl",
".",
"SSL_ERROR_WANT_READ",
":",
"buf",
"=",
"self",
".",
"socket",
".",
"recv",
"(",
"SSL_BLOCKSIZE",
")",
"if",
"buf",
":",
"self",
".",
"incoming",
".",
"write",
"(",
"buf",
")",
"else",
":",
"self",
".",
"incoming",
".",
"write_eof",
"(",
")",
"return",
"ret"
] | [
194,
4
] | [
220,
18
] | python | en | ['en', 'en', 'en'] | True |
SessionBase.encode | (self, session_dict) | Return the given session dictionary serialized and encoded as a string. | Return the given session dictionary serialized and encoded as a string. | def encode(self, session_dict):
"Return the given session dictionary serialized and encoded as a string."
# RemovedInDjango40Warning: DEFAULT_HASHING_ALGORITHM will be removed.
if settings.DEFAULT_HASHING_ALGORITHM == 'sha1':
return self._legacy_encode(session_dict)
return signing.dumps(
session_dict, salt=self.key_salt, serializer=self.serializer,
compress=True,
) | [
"def",
"encode",
"(",
"self",
",",
"session_dict",
")",
":",
"# RemovedInDjango40Warning: DEFAULT_HASHING_ALGORITHM will be removed.",
"if",
"settings",
".",
"DEFAULT_HASHING_ALGORITHM",
"==",
"'sha1'",
":",
"return",
"self",
".",
"_legacy_encode",
"(",
"session_dict",
")",
"return",
"signing",
".",
"dumps",
"(",
"session_dict",
",",
"salt",
"=",
"self",
".",
"key_salt",
",",
"serializer",
"=",
"self",
".",
"serializer",
",",
"compress",
"=",
"True",
",",
")"
] | [
108,
4
] | [
116,
9
] | python | en | ['en', 'en', 'en'] | True |
SessionBase.is_empty | (self) | Return True when there is no session_key and the session is empty. | Return True when there is no session_key and the session is empty. | def is_empty(self):
"Return True when there is no session_key and the session is empty."
try:
return not self._session_key and not self._session_cache
except AttributeError:
return True | [
"def",
"is_empty",
"(",
"self",
")",
":",
"try",
":",
"return",
"not",
"self",
".",
"_session_key",
"and",
"not",
"self",
".",
"_session_cache",
"except",
"AttributeError",
":",
"return",
"True"
] | [
184,
4
] | [
189,
23
] | python | en | ['en', 'en', 'en'] | True |
SessionBase._get_new_session_key | (self) | Return session key that isn't being used. | Return session key that isn't being used. | def _get_new_session_key(self):
"Return session key that isn't being used."
while True:
session_key = get_random_string(32, VALID_KEY_CHARS)
if not self.exists(session_key):
return session_key | [
"def",
"_get_new_session_key",
"(",
"self",
")",
":",
"while",
"True",
":",
"session_key",
"=",
"get_random_string",
"(",
"32",
",",
"VALID_KEY_CHARS",
")",
"if",
"not",
"self",
".",
"exists",
"(",
"session_key",
")",
":",
"return",
"session_key"
] | [
191,
4
] | [
196,
34
] | python | en | ['en', 'en', 'en'] | True |
SessionBase._validate_session_key | (self, key) |
Key must be truthy and at least 8 characters long. 8 characters is an
arbitrary lower bound for some minimal key security.
|
Key must be truthy and at least 8 characters long. 8 characters is an
arbitrary lower bound for some minimal key security.
| def _validate_session_key(self, key):
"""
Key must be truthy and at least 8 characters long. 8 characters is an
arbitrary lower bound for some minimal key security.
"""
return key and len(key) >= 8 | [
"def",
"_validate_session_key",
"(",
"self",
",",
"key",
")",
":",
"return",
"key",
"and",
"len",
"(",
"key",
")",
">=",
"8"
] | [
203,
4
] | [
208,
36
] | python | en | ['en', 'error', 'th'] | False |
SessionBase._set_session_key | (self, value) |
Validate session key on assignment. Invalid values will set to None.
|
Validate session key on assignment. Invalid values will set to None.
| def _set_session_key(self, value):
"""
Validate session key on assignment. Invalid values will set to None.
"""
if self._validate_session_key(value):
self.__session_key = value
else:
self.__session_key = None | [
"def",
"_set_session_key",
"(",
"self",
",",
"value",
")",
":",
"if",
"self",
".",
"_validate_session_key",
"(",
"value",
")",
":",
"self",
".",
"__session_key",
"=",
"value",
"else",
":",
"self",
".",
"__session_key",
"=",
"None"
] | [
213,
4
] | [
220,
37
] | python | en | ['en', 'error', 'th'] | False |
SessionBase._get_session | (self, no_load=False) |
Lazily load session from storage (unless "no_load" is True, when only
an empty dict is stored) and store it in the current instance.
|
Lazily load session from storage (unless "no_load" is True, when only
an empty dict is stored) and store it in the current instance.
| def _get_session(self, no_load=False):
"""
Lazily load session from storage (unless "no_load" is True, when only
an empty dict is stored) and store it in the current instance.
"""
self.accessed = True
try:
return self._session_cache
except AttributeError:
if self.session_key is None or no_load:
self._session_cache = {}
else:
self._session_cache = self.load()
return self._session_cache | [
"def",
"_get_session",
"(",
"self",
",",
"no_load",
"=",
"False",
")",
":",
"self",
".",
"accessed",
"=",
"True",
"try",
":",
"return",
"self",
".",
"_session_cache",
"except",
"AttributeError",
":",
"if",
"self",
".",
"session_key",
"is",
"None",
"or",
"no_load",
":",
"self",
".",
"_session_cache",
"=",
"{",
"}",
"else",
":",
"self",
".",
"_session_cache",
"=",
"self",
".",
"load",
"(",
")",
"return",
"self",
".",
"_session_cache"
] | [
225,
4
] | [
238,
34
] | python | en | ['en', 'error', 'th'] | False |
SessionBase.get_expiry_age | (self, **kwargs) | Get the number of seconds until the session expires.
Optionally, this function accepts `modification` and `expiry` keyword
arguments specifying the modification and expiry of the session.
| Get the number of seconds until the session expires. | def get_expiry_age(self, **kwargs):
"""Get the number of seconds until the session expires.
Optionally, this function accepts `modification` and `expiry` keyword
arguments specifying the modification and expiry of the session.
"""
try:
modification = kwargs['modification']
except KeyError:
modification = timezone.now()
# Make the difference between "expiry=None passed in kwargs" and
# "expiry not passed in kwargs", in order to guarantee not to trigger
# self.load() when expiry is provided.
try:
expiry = kwargs['expiry']
except KeyError:
expiry = self.get('_session_expiry')
if not expiry: # Checks both None and 0 cases
return self.get_session_cookie_age()
if not isinstance(expiry, datetime):
return expiry
delta = expiry - modification
return delta.days * 86400 + delta.seconds | [
"def",
"get_expiry_age",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"modification",
"=",
"kwargs",
"[",
"'modification'",
"]",
"except",
"KeyError",
":",
"modification",
"=",
"timezone",
".",
"now",
"(",
")",
"# Make the difference between \"expiry=None passed in kwargs\" and",
"# \"expiry not passed in kwargs\", in order to guarantee not to trigger",
"# self.load() when expiry is provided.",
"try",
":",
"expiry",
"=",
"kwargs",
"[",
"'expiry'",
"]",
"except",
"KeyError",
":",
"expiry",
"=",
"self",
".",
"get",
"(",
"'_session_expiry'",
")",
"if",
"not",
"expiry",
":",
"# Checks both None and 0 cases",
"return",
"self",
".",
"get_session_cookie_age",
"(",
")",
"if",
"not",
"isinstance",
"(",
"expiry",
",",
"datetime",
")",
":",
"return",
"expiry",
"delta",
"=",
"expiry",
"-",
"modification",
"return",
"delta",
".",
"days",
"*",
"86400",
"+",
"delta",
".",
"seconds"
] | [
245,
4
] | [
268,
49
] | python | en | ['en', 'en', 'en'] | True |
SessionBase.get_expiry_date | (self, **kwargs) | Get session the expiry date (as a datetime object).
Optionally, this function accepts `modification` and `expiry` keyword
arguments specifying the modification and expiry of the session.
| Get session the expiry date (as a datetime object). | def get_expiry_date(self, **kwargs):
"""Get session the expiry date (as a datetime object).
Optionally, this function accepts `modification` and `expiry` keyword
arguments specifying the modification and expiry of the session.
"""
try:
modification = kwargs['modification']
except KeyError:
modification = timezone.now()
# Same comment as in get_expiry_age
try:
expiry = kwargs['expiry']
except KeyError:
expiry = self.get('_session_expiry')
if isinstance(expiry, datetime):
return expiry
expiry = expiry or self.get_session_cookie_age()
return modification + timedelta(seconds=expiry) | [
"def",
"get_expiry_date",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"modification",
"=",
"kwargs",
"[",
"'modification'",
"]",
"except",
"KeyError",
":",
"modification",
"=",
"timezone",
".",
"now",
"(",
")",
"# Same comment as in get_expiry_age",
"try",
":",
"expiry",
"=",
"kwargs",
"[",
"'expiry'",
"]",
"except",
"KeyError",
":",
"expiry",
"=",
"self",
".",
"get",
"(",
"'_session_expiry'",
")",
"if",
"isinstance",
"(",
"expiry",
",",
"datetime",
")",
":",
"return",
"expiry",
"expiry",
"=",
"expiry",
"or",
"self",
".",
"get_session_cookie_age",
"(",
")",
"return",
"modification",
"+",
"timedelta",
"(",
"seconds",
"=",
"expiry",
")"
] | [
270,
4
] | [
289,
55
] | python | en | ['en', 'en', 'en'] | True |
SessionBase.set_expiry | (self, value) |
Set a custom expiration for the session. ``value`` can be an integer,
a Python ``datetime`` or ``timedelta`` object or ``None``.
If ``value`` is an integer, the session will expire after that many
seconds of inactivity. If set to ``0`` then the session will expire on
browser close.
If ``value`` is a ``datetime`` or ``timedelta`` object, the session
will expire at that specific future time.
If ``value`` is ``None``, the session uses the global session expiry
policy.
|
Set a custom expiration for the session. ``value`` can be an integer,
a Python ``datetime`` or ``timedelta`` object or ``None``. | def set_expiry(self, value):
"""
Set a custom expiration for the session. ``value`` can be an integer,
a Python ``datetime`` or ``timedelta`` object or ``None``.
If ``value`` is an integer, the session will expire after that many
seconds of inactivity. If set to ``0`` then the session will expire on
browser close.
If ``value`` is a ``datetime`` or ``timedelta`` object, the session
will expire at that specific future time.
If ``value`` is ``None``, the session uses the global session expiry
policy.
"""
if value is None:
# Remove any custom expiration for this session.
try:
del self['_session_expiry']
except KeyError:
pass
return
if isinstance(value, timedelta):
value = timezone.now() + value
self['_session_expiry'] = value | [
"def",
"set_expiry",
"(",
"self",
",",
"value",
")",
":",
"if",
"value",
"is",
"None",
":",
"# Remove any custom expiration for this session.",
"try",
":",
"del",
"self",
"[",
"'_session_expiry'",
"]",
"except",
"KeyError",
":",
"pass",
"return",
"if",
"isinstance",
"(",
"value",
",",
"timedelta",
")",
":",
"value",
"=",
"timezone",
".",
"now",
"(",
")",
"+",
"value",
"self",
"[",
"'_session_expiry'",
"]",
"=",
"value"
] | [
291,
4
] | [
315,
39
] | python | en | ['en', 'error', 'th'] | False |
SessionBase.get_expire_at_browser_close | (self) |
Return ``True`` if the session is set to expire when the browser
closes, and ``False`` if there's an expiry date. Use
``get_expiry_date()`` or ``get_expiry_age()`` to find the actual expiry
date/age, if there is one.
|
Return ``True`` if the session is set to expire when the browser
closes, and ``False`` if there's an expiry date. Use
``get_expiry_date()`` or ``get_expiry_age()`` to find the actual expiry
date/age, if there is one.
| def get_expire_at_browser_close(self):
"""
Return ``True`` if the session is set to expire when the browser
closes, and ``False`` if there's an expiry date. Use
``get_expiry_date()`` or ``get_expiry_age()`` to find the actual expiry
date/age, if there is one.
"""
if self.get('_session_expiry') is None:
return settings.SESSION_EXPIRE_AT_BROWSER_CLOSE
return self.get('_session_expiry') == 0 | [
"def",
"get_expire_at_browser_close",
"(",
"self",
")",
":",
"if",
"self",
".",
"get",
"(",
"'_session_expiry'",
")",
"is",
"None",
":",
"return",
"settings",
".",
"SESSION_EXPIRE_AT_BROWSER_CLOSE",
"return",
"self",
".",
"get",
"(",
"'_session_expiry'",
")",
"==",
"0"
] | [
317,
4
] | [
326,
47
] | python | en | ['en', 'error', 'th'] | False |
SessionBase.flush | (self) |
Remove the current session data from the database and regenerate the
key.
|
Remove the current session data from the database and regenerate the
key.
| def flush(self):
"""
Remove the current session data from the database and regenerate the
key.
"""
self.clear()
self.delete()
self._session_key = None | [
"def",
"flush",
"(",
"self",
")",
":",
"self",
".",
"clear",
"(",
")",
"self",
".",
"delete",
"(",
")",
"self",
".",
"_session_key",
"=",
"None"
] | [
328,
4
] | [
335,
32
] | python | en | ['en', 'error', 'th'] | False |
SessionBase.cycle_key | (self) |
Create a new session key, while retaining the current session data.
|
Create a new session key, while retaining the current session data.
| def cycle_key(self):
"""
Create a new session key, while retaining the current session data.
"""
data = self._session
key = self.session_key
self.create()
self._session_cache = data
if key:
self.delete(key) | [
"def",
"cycle_key",
"(",
"self",
")",
":",
"data",
"=",
"self",
".",
"_session",
"key",
"=",
"self",
".",
"session_key",
"self",
".",
"create",
"(",
")",
"self",
".",
"_session_cache",
"=",
"data",
"if",
"key",
":",
"self",
".",
"delete",
"(",
"key",
")"
] | [
337,
4
] | [
346,
28
] | python | en | ['en', 'error', 'th'] | False |
SessionBase.exists | (self, session_key) |
Return True if the given session_key already exists.
|
Return True if the given session_key already exists.
| def exists(self, session_key):
"""
Return True if the given session_key already exists.
"""
raise NotImplementedError('subclasses of SessionBase must provide an exists() method') | [
"def",
"exists",
"(",
"self",
",",
"session_key",
")",
":",
"raise",
"NotImplementedError",
"(",
"'subclasses of SessionBase must provide an exists() method'",
")"
] | [
350,
4
] | [
354,
94
] | python | en | ['en', 'error', 'th'] | False |
SessionBase.create | (self) |
Create a new session instance. Guaranteed to create a new object with
a unique key and will have saved the result once (with empty data)
before the method returns.
|
Create a new session instance. Guaranteed to create a new object with
a unique key and will have saved the result once (with empty data)
before the method returns.
| def create(self):
"""
Create a new session instance. Guaranteed to create a new object with
a unique key and will have saved the result once (with empty data)
before the method returns.
"""
raise NotImplementedError('subclasses of SessionBase must provide a create() method') | [
"def",
"create",
"(",
"self",
")",
":",
"raise",
"NotImplementedError",
"(",
"'subclasses of SessionBase must provide a create() method'",
")"
] | [
356,
4
] | [
362,
93
] | python | en | ['en', 'error', 'th'] | False |
SessionBase.save | (self, must_create=False) |
Save the session data. If 'must_create' is True, create a new session
object (or raise CreateError). Otherwise, only update an existing
object and don't create one (raise UpdateError if needed).
|
Save the session data. If 'must_create' is True, create a new session
object (or raise CreateError). Otherwise, only update an existing
object and don't create one (raise UpdateError if needed).
| def save(self, must_create=False):
"""
Save the session data. If 'must_create' is True, create a new session
object (or raise CreateError). Otherwise, only update an existing
object and don't create one (raise UpdateError if needed).
"""
raise NotImplementedError('subclasses of SessionBase must provide a save() method') | [
"def",
"save",
"(",
"self",
",",
"must_create",
"=",
"False",
")",
":",
"raise",
"NotImplementedError",
"(",
"'subclasses of SessionBase must provide a save() method'",
")"
] | [
364,
4
] | [
370,
91
] | python | en | ['en', 'error', 'th'] | False |
SessionBase.delete | (self, session_key=None) |
Delete the session data under this key. If the key is None, use the
current session key value.
|
Delete the session data under this key. If the key is None, use the
current session key value.
| def delete(self, session_key=None):
"""
Delete the session data under this key. If the key is None, use the
current session key value.
"""
raise NotImplementedError('subclasses of SessionBase must provide a delete() method') | [
"def",
"delete",
"(",
"self",
",",
"session_key",
"=",
"None",
")",
":",
"raise",
"NotImplementedError",
"(",
"'subclasses of SessionBase must provide a delete() method'",
")"
] | [
372,
4
] | [
377,
93
] | python | en | ['en', 'error', 'th'] | False |
SessionBase.load | (self) |
Load the session data and return a dictionary.
|
Load the session data and return a dictionary.
| def load(self):
"""
Load the session data and return a dictionary.
"""
raise NotImplementedError('subclasses of SessionBase must provide a load() method') | [
"def",
"load",
"(",
"self",
")",
":",
"raise",
"NotImplementedError",
"(",
"'subclasses of SessionBase must provide a load() method'",
")"
] | [
379,
4
] | [
383,
91
] | python | en | ['en', 'error', 'th'] | False |
SessionBase.clear_expired | (cls) |
Remove expired sessions from the session store.
If this operation isn't possible on a given backend, it should raise
NotImplementedError. If it isn't necessary, because the backend has
a built-in expiration mechanism, it should be a no-op.
|
Remove expired sessions from the session store. | def clear_expired(cls):
"""
Remove expired sessions from the session store.
If this operation isn't possible on a given backend, it should raise
NotImplementedError. If it isn't necessary, because the backend has
a built-in expiration mechanism, it should be a no-op.
"""
raise NotImplementedError('This backend does not support clear_expired().') | [
"def",
"clear_expired",
"(",
"cls",
")",
":",
"raise",
"NotImplementedError",
"(",
"'This backend does not support clear_expired().'",
")"
] | [
386,
4
] | [
394,
83
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.autoinc_sql | (self, table, column) |
Return any SQL needed to support auto-incrementing primary keys, or
None if no SQL is necessary.
This SQL is executed when a table is created.
|
Return any SQL needed to support auto-incrementing primary keys, or
None if no SQL is necessary. | def autoinc_sql(self, table, column):
"""
Return any SQL needed to support auto-incrementing primary keys, or
None if no SQL is necessary.
This SQL is executed when a table is created.
"""
return None | [
"def",
"autoinc_sql",
"(",
"self",
",",
"table",
",",
"column",
")",
":",
"return",
"None"
] | [
59,
4
] | [
66,
19
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.bulk_batch_size | (self, fields, objs) |
Return the maximum allowed batch size for the backend. The fields
are the fields going to be inserted in the batch, the objs contains
all the objects to be inserted.
|
Return the maximum allowed batch size for the backend. The fields
are the fields going to be inserted in the batch, the objs contains
all the objects to be inserted.
| def bulk_batch_size(self, fields, objs):
"""
Return the maximum allowed batch size for the backend. The fields
are the fields going to be inserted in the batch, the objs contains
all the objects to be inserted.
"""
return len(objs) | [
"def",
"bulk_batch_size",
"(",
"self",
",",
"fields",
",",
"objs",
")",
":",
"return",
"len",
"(",
"objs",
")"
] | [
68,
4
] | [
74,
24
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.cache_key_culling_sql | (self) |
Return an SQL query that retrieves the first cache key greater than the
n smallest.
This is used by the 'db' cache backend to determine where to start
culling.
|
Return an SQL query that retrieves the first cache key greater than the
n smallest. | def cache_key_culling_sql(self):
"""
Return an SQL query that retrieves the first cache key greater than the
n smallest.
This is used by the 'db' cache backend to determine where to start
culling.
"""
return "SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s" | [
"def",
"cache_key_culling_sql",
"(",
"self",
")",
":",
"return",
"\"SELECT cache_key FROM %s ORDER BY cache_key LIMIT 1 OFFSET %%s\""
] | [
76,
4
] | [
84,
79
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.unification_cast_sql | (self, output_field) |
Given a field instance, return the SQL that casts the result of a union
to that type. The resulting string should contain a '%s' placeholder
for the expression being cast.
|
Given a field instance, return the SQL that casts the result of a union
to that type. The resulting string should contain a '%s' placeholder
for the expression being cast.
| def unification_cast_sql(self, output_field):
"""
Given a field instance, return the SQL that casts the result of a union
to that type. The resulting string should contain a '%s' placeholder
for the expression being cast.
"""
return '%s' | [
"def",
"unification_cast_sql",
"(",
"self",
",",
"output_field",
")",
":",
"return",
"'%s'"
] | [
86,
4
] | [
92,
19
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.date_extract_sql | (self, lookup_type, field_name) |
Given a lookup_type of 'year', 'month', or 'day', return the SQL that
extracts a value from the given date field field_name.
|
Given a lookup_type of 'year', 'month', or 'day', return the SQL that
extracts a value from the given date field field_name.
| def date_extract_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'year', 'month', or 'day', return the SQL that
extracts a value from the given date field field_name.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a date_extract_sql() method') | [
"def",
"date_extract_sql",
"(",
"self",
",",
"lookup_type",
",",
"field_name",
")",
":",
"raise",
"NotImplementedError",
"(",
"'subclasses of BaseDatabaseOperations may require a date_extract_sql() method'",
")"
] | [
94,
4
] | [
99,
113
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.date_trunc_sql | (self, lookup_type, field_name, tzname=None) |
Given a lookup_type of 'year', 'month', or 'day', return the SQL that
truncates the given date or datetime field field_name to a date object
with only the given specificity.
If `tzname` is provided, the given value is truncated in a specific
timezone.
|
Given a lookup_type of 'year', 'month', or 'day', return the SQL that
truncates the given date or datetime field field_name to a date object
with only the given specificity. | def date_trunc_sql(self, lookup_type, field_name, tzname=None):
"""
Given a lookup_type of 'year', 'month', or 'day', return the SQL that
truncates the given date or datetime field field_name to a date object
with only the given specificity.
If `tzname` is provided, the given value is truncated in a specific
timezone.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a date_trunc_sql() method.') | [
"def",
"date_trunc_sql",
"(",
"self",
",",
"lookup_type",
",",
"field_name",
",",
"tzname",
"=",
"None",
")",
":",
"raise",
"NotImplementedError",
"(",
"'subclasses of BaseDatabaseOperations may require a date_trunc_sql() method.'",
")"
] | [
101,
4
] | [
110,
112
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.datetime_cast_date_sql | (self, field_name, tzname) |
Return the SQL to cast a datetime value to date value.
|
Return the SQL to cast a datetime value to date value.
| def datetime_cast_date_sql(self, field_name, tzname):
"""
Return the SQL to cast a datetime value to date value.
"""
raise NotImplementedError(
'subclasses of BaseDatabaseOperations may require a '
'datetime_cast_date_sql() method.'
) | [
"def",
"datetime_cast_date_sql",
"(",
"self",
",",
"field_name",
",",
"tzname",
")",
":",
"raise",
"NotImplementedError",
"(",
"'subclasses of BaseDatabaseOperations may require a '",
"'datetime_cast_date_sql() method.'",
")"
] | [
112,
4
] | [
119,
9
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.datetime_cast_time_sql | (self, field_name, tzname) |
Return the SQL to cast a datetime value to time value.
|
Return the SQL to cast a datetime value to time value.
| def datetime_cast_time_sql(self, field_name, tzname):
"""
Return the SQL to cast a datetime value to time value.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_cast_time_sql() method') | [
"def",
"datetime_cast_time_sql",
"(",
"self",
",",
"field_name",
",",
"tzname",
")",
":",
"raise",
"NotImplementedError",
"(",
"'subclasses of BaseDatabaseOperations may require a datetime_cast_time_sql() method'",
")"
] | [
121,
4
] | [
125,
119
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.datetime_extract_sql | (self, lookup_type, field_name, tzname) |
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or
'second', return the SQL that extracts a value from the given
datetime field field_name.
|
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or
'second', return the SQL that extracts a value from the given
datetime field field_name.
| def datetime_extract_sql(self, lookup_type, field_name, tzname):
"""
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or
'second', return the SQL that extracts a value from the given
datetime field field_name.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_extract_sql() method') | [
"def",
"datetime_extract_sql",
"(",
"self",
",",
"lookup_type",
",",
"field_name",
",",
"tzname",
")",
":",
"raise",
"NotImplementedError",
"(",
"'subclasses of BaseDatabaseOperations may require a datetime_extract_sql() method'",
")"
] | [
127,
4
] | [
133,
117
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.datetime_trunc_sql | (self, lookup_type, field_name, tzname) |
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or
'second', return the SQL that truncates the given datetime field
field_name to a datetime object with only the given specificity.
|
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or
'second', return the SQL that truncates the given datetime field
field_name to a datetime object with only the given specificity.
| def datetime_trunc_sql(self, lookup_type, field_name, tzname):
"""
Given a lookup_type of 'year', 'month', 'day', 'hour', 'minute', or
'second', return the SQL that truncates the given datetime field
field_name to a datetime object with only the given specificity.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a datetime_trunc_sql() method') | [
"def",
"datetime_trunc_sql",
"(",
"self",
",",
"lookup_type",
",",
"field_name",
",",
"tzname",
")",
":",
"raise",
"NotImplementedError",
"(",
"'subclasses of BaseDatabaseOperations may require a datetime_trunc_sql() method'",
")"
] | [
135,
4
] | [
141,
115
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.time_trunc_sql | (self, lookup_type, field_name, tzname=None) |
Given a lookup_type of 'hour', 'minute' or 'second', return the SQL
that truncates the given time or datetime field field_name to a time
object with only the given specificity.
If `tzname` is provided, the given value is truncated in a specific
timezone.
|
Given a lookup_type of 'hour', 'minute' or 'second', return the SQL
that truncates the given time or datetime field field_name to a time
object with only the given specificity. | def time_trunc_sql(self, lookup_type, field_name, tzname=None):
"""
Given a lookup_type of 'hour', 'minute' or 'second', return the SQL
that truncates the given time or datetime field field_name to a time
object with only the given specificity.
If `tzname` is provided, the given value is truncated in a specific
timezone.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a time_trunc_sql() method') | [
"def",
"time_trunc_sql",
"(",
"self",
",",
"lookup_type",
",",
"field_name",
",",
"tzname",
"=",
"None",
")",
":",
"raise",
"NotImplementedError",
"(",
"'subclasses of BaseDatabaseOperations may require a time_trunc_sql() method'",
")"
] | [
143,
4
] | [
152,
111
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.time_extract_sql | (self, lookup_type, field_name) |
Given a lookup_type of 'hour', 'minute', or 'second', return the SQL
that extracts a value from the given time field field_name.
|
Given a lookup_type of 'hour', 'minute', or 'second', return the SQL
that extracts a value from the given time field field_name.
| def time_extract_sql(self, lookup_type, field_name):
"""
Given a lookup_type of 'hour', 'minute', or 'second', return the SQL
that extracts a value from the given time field field_name.
"""
return self.date_extract_sql(lookup_type, field_name) | [
"def",
"time_extract_sql",
"(",
"self",
",",
"lookup_type",
",",
"field_name",
")",
":",
"return",
"self",
".",
"date_extract_sql",
"(",
"lookup_type",
",",
"field_name",
")"
] | [
154,
4
] | [
159,
61
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.deferrable_sql | (self) |
Return the SQL to make a constraint "initially deferred" during a
CREATE TABLE statement.
|
Return the SQL to make a constraint "initially deferred" during a
CREATE TABLE statement.
| def deferrable_sql(self):
"""
Return the SQL to make a constraint "initially deferred" during a
CREATE TABLE statement.
"""
return '' | [
"def",
"deferrable_sql",
"(",
"self",
")",
":",
"return",
"''"
] | [
161,
4
] | [
166,
17
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.distinct_sql | (self, fields, params) |
Return an SQL DISTINCT clause which removes duplicate rows from the
result set. If any fields are given, only check the given fields for
duplicates.
|
Return an SQL DISTINCT clause which removes duplicate rows from the
result set. If any fields are given, only check the given fields for
duplicates.
| def distinct_sql(self, fields, params):
"""
Return an SQL DISTINCT clause which removes duplicate rows from the
result set. If any fields are given, only check the given fields for
duplicates.
"""
if fields:
raise NotSupportedError('DISTINCT ON fields is not supported by this database backend')
else:
return ['DISTINCT'], [] | [
"def",
"distinct_sql",
"(",
"self",
",",
"fields",
",",
"params",
")",
":",
"if",
"fields",
":",
"raise",
"NotSupportedError",
"(",
"'DISTINCT ON fields is not supported by this database backend'",
")",
"else",
":",
"return",
"[",
"'DISTINCT'",
"]",
",",
"[",
"]"
] | [
168,
4
] | [
177,
35
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.fetch_returned_insert_columns | (self, cursor, returning_params) |
Given a cursor object that has just performed an INSERT...RETURNING
statement into a table, return the newly created data.
|
Given a cursor object that has just performed an INSERT...RETURNING
statement into a table, return the newly created data.
| def fetch_returned_insert_columns(self, cursor, returning_params):
"""
Given a cursor object that has just performed an INSERT...RETURNING
statement into a table, return the newly created data.
"""
return cursor.fetchone() | [
"def",
"fetch_returned_insert_columns",
"(",
"self",
",",
"cursor",
",",
"returning_params",
")",
":",
"return",
"cursor",
".",
"fetchone",
"(",
")"
] | [
179,
4
] | [
184,
32
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.field_cast_sql | (self, db_type, internal_type) |
Given a column type (e.g. 'BLOB', 'VARCHAR') and an internal type
(e.g. 'GenericIPAddressField'), return the SQL to cast it before using
it in a WHERE statement. The resulting string should contain a '%s'
placeholder for the column being searched against.
|
Given a column type (e.g. 'BLOB', 'VARCHAR') and an internal type
(e.g. 'GenericIPAddressField'), return the SQL to cast it before using
it in a WHERE statement. The resulting string should contain a '%s'
placeholder for the column being searched against.
| def field_cast_sql(self, db_type, internal_type):
"""
Given a column type (e.g. 'BLOB', 'VARCHAR') and an internal type
(e.g. 'GenericIPAddressField'), return the SQL to cast it before using
it in a WHERE statement. The resulting string should contain a '%s'
placeholder for the column being searched against.
"""
return '%s' | [
"def",
"field_cast_sql",
"(",
"self",
",",
"db_type",
",",
"internal_type",
")",
":",
"return",
"'%s'"
] | [
186,
4
] | [
193,
19
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.force_no_ordering | (self) |
Return a list used in the "ORDER BY" clause to force no ordering at
all. Return an empty list to include nothing in the ordering.
|
Return a list used in the "ORDER BY" clause to force no ordering at
all. Return an empty list to include nothing in the ordering.
| def force_no_ordering(self):
"""
Return a list used in the "ORDER BY" clause to force no ordering at
all. Return an empty list to include nothing in the ordering.
"""
return [] | [
"def",
"force_no_ordering",
"(",
"self",
")",
":",
"return",
"[",
"]"
] | [
195,
4
] | [
200,
17
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.for_update_sql | (self, nowait=False, skip_locked=False, of=(), no_key=False) |
Return the FOR UPDATE SQL clause to lock rows for an update operation.
|
Return the FOR UPDATE SQL clause to lock rows for an update operation.
| def for_update_sql(self, nowait=False, skip_locked=False, of=(), no_key=False):
"""
Return the FOR UPDATE SQL clause to lock rows for an update operation.
"""
return 'FOR%s UPDATE%s%s%s' % (
' NO KEY' if no_key else '',
' OF %s' % ', '.join(of) if of else '',
' NOWAIT' if nowait else '',
' SKIP LOCKED' if skip_locked else '',
) | [
"def",
"for_update_sql",
"(",
"self",
",",
"nowait",
"=",
"False",
",",
"skip_locked",
"=",
"False",
",",
"of",
"=",
"(",
")",
",",
"no_key",
"=",
"False",
")",
":",
"return",
"'FOR%s UPDATE%s%s%s'",
"%",
"(",
"' NO KEY'",
"if",
"no_key",
"else",
"''",
",",
"' OF %s'",
"%",
"', '",
".",
"join",
"(",
"of",
")",
"if",
"of",
"else",
"''",
",",
"' NOWAIT'",
"if",
"nowait",
"else",
"''",
",",
"' SKIP LOCKED'",
"if",
"skip_locked",
"else",
"''",
",",
")"
] | [
202,
4
] | [
211,
9
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.limit_offset_sql | (self, low_mark, high_mark) | Return LIMIT/OFFSET SQL clause. | Return LIMIT/OFFSET SQL clause. | def limit_offset_sql(self, low_mark, high_mark):
"""Return LIMIT/OFFSET SQL clause."""
limit, offset = self._get_limit_offset_params(low_mark, high_mark)
return ' '.join(sql for sql in (
('LIMIT %d' % limit) if limit else None,
('OFFSET %d' % offset) if offset else None,
) if sql) | [
"def",
"limit_offset_sql",
"(",
"self",
",",
"low_mark",
",",
"high_mark",
")",
":",
"limit",
",",
"offset",
"=",
"self",
".",
"_get_limit_offset_params",
"(",
"low_mark",
",",
"high_mark",
")",
"return",
"' '",
".",
"join",
"(",
"sql",
"for",
"sql",
"in",
"(",
"(",
"'LIMIT %d'",
"%",
"limit",
")",
"if",
"limit",
"else",
"None",
",",
"(",
"'OFFSET %d'",
"%",
"offset",
")",
"if",
"offset",
"else",
"None",
",",
")",
"if",
"sql",
")"
] | [
221,
4
] | [
227,
17
] | python | de | ['de', 'fr', 'en'] | False |
BaseDatabaseOperations.last_executed_query | (self, cursor, sql, params) |
Return a string of the query last executed by the given cursor, with
placeholders replaced with actual values.
`sql` is the raw query containing placeholders and `params` is the
sequence of parameters. These are used by default, but this method
exists for database backends to provide a better implementation
according to their own quoting schemes.
|
Return a string of the query last executed by the given cursor, with
placeholders replaced with actual values. | def last_executed_query(self, cursor, sql, params):
"""
Return a string of the query last executed by the given cursor, with
placeholders replaced with actual values.
`sql` is the raw query containing placeholders and `params` is the
sequence of parameters. These are used by default, but this method
exists for database backends to provide a better implementation
according to their own quoting schemes.
"""
# Convert params to contain string values.
def to_string(s):
return force_str(s, strings_only=True, errors='replace')
if isinstance(params, (list, tuple)):
u_params = tuple(to_string(val) for val in params)
elif params is None:
u_params = ()
else:
u_params = {to_string(k): to_string(v) for k, v in params.items()}
return "QUERY = %r - PARAMS = %r" % (sql, u_params) | [
"def",
"last_executed_query",
"(",
"self",
",",
"cursor",
",",
"sql",
",",
"params",
")",
":",
"# Convert params to contain string values.",
"def",
"to_string",
"(",
"s",
")",
":",
"return",
"force_str",
"(",
"s",
",",
"strings_only",
"=",
"True",
",",
"errors",
"=",
"'replace'",
")",
"if",
"isinstance",
"(",
"params",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"u_params",
"=",
"tuple",
"(",
"to_string",
"(",
"val",
")",
"for",
"val",
"in",
"params",
")",
"elif",
"params",
"is",
"None",
":",
"u_params",
"=",
"(",
")",
"else",
":",
"u_params",
"=",
"{",
"to_string",
"(",
"k",
")",
":",
"to_string",
"(",
"v",
")",
"for",
"k",
",",
"v",
"in",
"params",
".",
"items",
"(",
")",
"}",
"return",
"\"QUERY = %r - PARAMS = %r\"",
"%",
"(",
"sql",
",",
"u_params",
")"
] | [
229,
4
] | [
249,
59
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.last_insert_id | (self, cursor, table_name, pk_name) |
Given a cursor object that has just performed an INSERT statement into
a table that has an auto-incrementing ID, return the newly created ID.
`pk_name` is the name of the primary-key column.
|
Given a cursor object that has just performed an INSERT statement into
a table that has an auto-incrementing ID, return the newly created ID. | def last_insert_id(self, cursor, table_name, pk_name):
"""
Given a cursor object that has just performed an INSERT statement into
a table that has an auto-incrementing ID, return the newly created ID.
`pk_name` is the name of the primary-key column.
"""
return cursor.lastrowid | [
"def",
"last_insert_id",
"(",
"self",
",",
"cursor",
",",
"table_name",
",",
"pk_name",
")",
":",
"return",
"cursor",
".",
"lastrowid"
] | [
251,
4
] | [
258,
31
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.lookup_cast | (self, lookup_type, internal_type=None) |
Return the string to use in a query when performing lookups
("contains", "like", etc.). It should contain a '%s' placeholder for
the column being searched against.
|
Return the string to use in a query when performing lookups
("contains", "like", etc.). It should contain a '%s' placeholder for
the column being searched against.
| def lookup_cast(self, lookup_type, internal_type=None):
"""
Return the string to use in a query when performing lookups
("contains", "like", etc.). It should contain a '%s' placeholder for
the column being searched against.
"""
return "%s" | [
"def",
"lookup_cast",
"(",
"self",
",",
"lookup_type",
",",
"internal_type",
"=",
"None",
")",
":",
"return",
"\"%s\""
] | [
260,
4
] | [
266,
19
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.max_in_list_size | (self) |
Return the maximum number of items that can be passed in a single 'IN'
list condition, or None if the backend does not impose a limit.
|
Return the maximum number of items that can be passed in a single 'IN'
list condition, or None if the backend does not impose a limit.
| def max_in_list_size(self):
"""
Return the maximum number of items that can be passed in a single 'IN'
list condition, or None if the backend does not impose a limit.
"""
return None | [
"def",
"max_in_list_size",
"(",
"self",
")",
":",
"return",
"None"
] | [
268,
4
] | [
273,
19
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.max_name_length | (self) |
Return the maximum length of table and column names, or None if there
is no limit.
|
Return the maximum length of table and column names, or None if there
is no limit.
| def max_name_length(self):
"""
Return the maximum length of table and column names, or None if there
is no limit.
"""
return None | [
"def",
"max_name_length",
"(",
"self",
")",
":",
"return",
"None"
] | [
275,
4
] | [
280,
19
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.no_limit_value | (self) |
Return the value to use for the LIMIT when we are wanting "LIMIT
infinity". Return None if the limit clause can be omitted in this case.
|
Return the value to use for the LIMIT when we are wanting "LIMIT
infinity". Return None if the limit clause can be omitted in this case.
| def no_limit_value(self):
"""
Return the value to use for the LIMIT when we are wanting "LIMIT
infinity". Return None if the limit clause can be omitted in this case.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a no_limit_value() method') | [
"def",
"no_limit_value",
"(",
"self",
")",
":",
"raise",
"NotImplementedError",
"(",
"'subclasses of BaseDatabaseOperations may require a no_limit_value() method'",
")"
] | [
282,
4
] | [
287,
111
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.pk_default_value | (self) |
Return the value to use during an INSERT statement to specify that
the field should use its default value.
|
Return the value to use during an INSERT statement to specify that
the field should use its default value.
| def pk_default_value(self):
"""
Return the value to use during an INSERT statement to specify that
the field should use its default value.
"""
return 'DEFAULT' | [
"def",
"pk_default_value",
"(",
"self",
")",
":",
"return",
"'DEFAULT'"
] | [
289,
4
] | [
294,
24
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.prepare_sql_script | (self, sql) |
Take an SQL script that may contain multiple lines and return a list
of statements to feed to successive cursor.execute() calls.
Since few databases are able to process raw SQL scripts in a single
cursor.execute() call and PEP 249 doesn't talk about this use case,
the default implementation is conservative.
|
Take an SQL script that may contain multiple lines and return a list
of statements to feed to successive cursor.execute() calls. | def prepare_sql_script(self, sql):
"""
Take an SQL script that may contain multiple lines and return a list
of statements to feed to successive cursor.execute() calls.
Since few databases are able to process raw SQL scripts in a single
cursor.execute() call and PEP 249 doesn't talk about this use case,
the default implementation is conservative.
"""
return [
sqlparse.format(statement, strip_comments=True)
for statement in sqlparse.split(sql) if statement
] | [
"def",
"prepare_sql_script",
"(",
"self",
",",
"sql",
")",
":",
"return",
"[",
"sqlparse",
".",
"format",
"(",
"statement",
",",
"strip_comments",
"=",
"True",
")",
"for",
"statement",
"in",
"sqlparse",
".",
"split",
"(",
"sql",
")",
"if",
"statement",
"]"
] | [
296,
4
] | [
308,
9
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.process_clob | (self, value) |
Return the value of a CLOB column, for backends that return a locator
object that requires additional processing.
|
Return the value of a CLOB column, for backends that return a locator
object that requires additional processing.
| def process_clob(self, value):
"""
Return the value of a CLOB column, for backends that return a locator
object that requires additional processing.
"""
return value | [
"def",
"process_clob",
"(",
"self",
",",
"value",
")",
":",
"return",
"value"
] | [
310,
4
] | [
315,
20
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.return_insert_columns | (self, fields) |
For backends that support returning columns as part of an insert query,
return the SQL and params to append to the INSERT query. The returned
fragment should contain a format string to hold the appropriate column.
|
For backends that support returning columns as part of an insert query,
return the SQL and params to append to the INSERT query. The returned
fragment should contain a format string to hold the appropriate column.
| def return_insert_columns(self, fields):
"""
For backends that support returning columns as part of an insert query,
return the SQL and params to append to the INSERT query. The returned
fragment should contain a format string to hold the appropriate column.
"""
pass | [
"def",
"return_insert_columns",
"(",
"self",
",",
"fields",
")",
":",
"pass"
] | [
317,
4
] | [
323,
12
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.compiler | (self, compiler_name) |
Return the SQLCompiler class corresponding to the given name,
in the namespace corresponding to the `compiler_module` attribute
on this backend.
|
Return the SQLCompiler class corresponding to the given name,
in the namespace corresponding to the `compiler_module` attribute
on this backend.
| def compiler(self, compiler_name):
"""
Return the SQLCompiler class corresponding to the given name,
in the namespace corresponding to the `compiler_module` attribute
on this backend.
"""
if self._cache is None:
self._cache = import_module(self.compiler_module)
return getattr(self._cache, compiler_name) | [
"def",
"compiler",
"(",
"self",
",",
"compiler_name",
")",
":",
"if",
"self",
".",
"_cache",
"is",
"None",
":",
"self",
".",
"_cache",
"=",
"import_module",
"(",
"self",
".",
"compiler_module",
")",
"return",
"getattr",
"(",
"self",
".",
"_cache",
",",
"compiler_name",
")"
] | [
325,
4
] | [
333,
50
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.quote_name | (self, name) |
Return a quoted version of the given table, index, or column name. Do
not quote the given name if it's already been quoted.
|
Return a quoted version of the given table, index, or column name. Do
not quote the given name if it's already been quoted.
| def quote_name(self, name):
"""
Return a quoted version of the given table, index, or column name. Do
not quote the given name if it's already been quoted.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a quote_name() method') | [
"def",
"quote_name",
"(",
"self",
",",
"name",
")",
":",
"raise",
"NotImplementedError",
"(",
"'subclasses of BaseDatabaseOperations may require a quote_name() method'",
")"
] | [
335,
4
] | [
340,
107
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.regex_lookup | (self, lookup_type) |
Return the string to use in a query when performing regular expression
lookups (using "regex" or "iregex"). It should contain a '%s'
placeholder for the column being searched against.
If the feature is not supported (or part of it is not supported), raise
NotImplementedError.
|
Return the string to use in a query when performing regular expression
lookups (using "regex" or "iregex"). It should contain a '%s'
placeholder for the column being searched against. | def regex_lookup(self, lookup_type):
"""
Return the string to use in a query when performing regular expression
lookups (using "regex" or "iregex"). It should contain a '%s'
placeholder for the column being searched against.
If the feature is not supported (or part of it is not supported), raise
NotImplementedError.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations may require a regex_lookup() method') | [
"def",
"regex_lookup",
"(",
"self",
",",
"lookup_type",
")",
":",
"raise",
"NotImplementedError",
"(",
"'subclasses of BaseDatabaseOperations may require a regex_lookup() method'",
")"
] | [
342,
4
] | [
351,
109
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.savepoint_create_sql | (self, sid) |
Return the SQL for starting a new savepoint. Only required if the
"uses_savepoints" feature is True. The "sid" parameter is a string
for the savepoint id.
|
Return the SQL for starting a new savepoint. Only required if the
"uses_savepoints" feature is True. The "sid" parameter is a string
for the savepoint id.
| def savepoint_create_sql(self, sid):
"""
Return the SQL for starting a new savepoint. Only required if the
"uses_savepoints" feature is True. The "sid" parameter is a string
for the savepoint id.
"""
return "SAVEPOINT %s" % self.quote_name(sid) | [
"def",
"savepoint_create_sql",
"(",
"self",
",",
"sid",
")",
":",
"return",
"\"SAVEPOINT %s\"",
"%",
"self",
".",
"quote_name",
"(",
"sid",
")"
] | [
353,
4
] | [
359,
52
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.savepoint_commit_sql | (self, sid) |
Return the SQL for committing the given savepoint.
|
Return the SQL for committing the given savepoint.
| def savepoint_commit_sql(self, sid):
"""
Return the SQL for committing the given savepoint.
"""
return "RELEASE SAVEPOINT %s" % self.quote_name(sid) | [
"def",
"savepoint_commit_sql",
"(",
"self",
",",
"sid",
")",
":",
"return",
"\"RELEASE SAVEPOINT %s\"",
"%",
"self",
".",
"quote_name",
"(",
"sid",
")"
] | [
361,
4
] | [
365,
60
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.savepoint_rollback_sql | (self, sid) |
Return the SQL for rolling back the given savepoint.
|
Return the SQL for rolling back the given savepoint.
| def savepoint_rollback_sql(self, sid):
"""
Return the SQL for rolling back the given savepoint.
"""
return "ROLLBACK TO SAVEPOINT %s" % self.quote_name(sid) | [
"def",
"savepoint_rollback_sql",
"(",
"self",
",",
"sid",
")",
":",
"return",
"\"ROLLBACK TO SAVEPOINT %s\"",
"%",
"self",
".",
"quote_name",
"(",
"sid",
")"
] | [
367,
4
] | [
371,
64
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.set_time_zone_sql | (self) |
Return the SQL that will set the connection's time zone.
Return '' if the backend doesn't support time zones.
|
Return the SQL that will set the connection's time zone. | def set_time_zone_sql(self):
"""
Return the SQL that will set the connection's time zone.
Return '' if the backend doesn't support time zones.
"""
return '' | [
"def",
"set_time_zone_sql",
"(",
"self",
")",
":",
"return",
"''"
] | [
373,
4
] | [
379,
17
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.sql_flush | (self, style, tables, *, reset_sequences=False, allow_cascade=False) |
Return a list of SQL statements required to remove all data from
the given database tables (without actually removing the tables
themselves).
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
If `reset_sequences` is True, the list includes SQL statements required
to reset the sequences.
The `allow_cascade` argument determines whether truncation may cascade
to tables with foreign keys pointing the tables being truncated.
PostgreSQL requires a cascade even if these tables are empty.
|
Return a list of SQL statements required to remove all data from
the given database tables (without actually removing the tables
themselves). | def sql_flush(self, style, tables, *, reset_sequences=False, allow_cascade=False):
"""
Return a list of SQL statements required to remove all data from
the given database tables (without actually removing the tables
themselves).
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
If `reset_sequences` is True, the list includes SQL statements required
to reset the sequences.
The `allow_cascade` argument determines whether truncation may cascade
to tables with foreign keys pointing the tables being truncated.
PostgreSQL requires a cascade even if these tables are empty.
"""
raise NotImplementedError('subclasses of BaseDatabaseOperations must provide an sql_flush() method') | [
"def",
"sql_flush",
"(",
"self",
",",
"style",
",",
"tables",
",",
"*",
",",
"reset_sequences",
"=",
"False",
",",
"allow_cascade",
"=",
"False",
")",
":",
"raise",
"NotImplementedError",
"(",
"'subclasses of BaseDatabaseOperations must provide an sql_flush() method'",
")"
] | [
381,
4
] | [
397,
108
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.execute_sql_flush | (self, sql_list) | Execute a list of SQL statements to flush the database. | Execute a list of SQL statements to flush the database. | def execute_sql_flush(self, sql_list):
"""Execute a list of SQL statements to flush the database."""
with transaction.atomic(
using=self.connection.alias,
savepoint=self.connection.features.can_rollback_ddl,
):
with self.connection.cursor() as cursor:
for sql in sql_list:
cursor.execute(sql) | [
"def",
"execute_sql_flush",
"(",
"self",
",",
"sql_list",
")",
":",
"with",
"transaction",
".",
"atomic",
"(",
"using",
"=",
"self",
".",
"connection",
".",
"alias",
",",
"savepoint",
"=",
"self",
".",
"connection",
".",
"features",
".",
"can_rollback_ddl",
",",
")",
":",
"with",
"self",
".",
"connection",
".",
"cursor",
"(",
")",
"as",
"cursor",
":",
"for",
"sql",
"in",
"sql_list",
":",
"cursor",
".",
"execute",
"(",
"sql",
")"
] | [
399,
4
] | [
407,
39
] | python | en | ['en', 'en', 'en'] | True |
BaseDatabaseOperations.sequence_reset_by_name_sql | (self, style, sequences) |
Return a list of the SQL statements required to reset sequences
passed in `sequences`.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
|
Return a list of the SQL statements required to reset sequences
passed in `sequences`. | def sequence_reset_by_name_sql(self, style, sequences):
"""
Return a list of the SQL statements required to reset sequences
passed in `sequences`.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
"""
return [] | [
"def",
"sequence_reset_by_name_sql",
"(",
"self",
",",
"style",
",",
"sequences",
")",
":",
"return",
"[",
"]"
] | [
409,
4
] | [
417,
17
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.sequence_reset_sql | (self, style, model_list) |
Return a list of the SQL statements required to reset sequences for
the given models.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
|
Return a list of the SQL statements required to reset sequences for
the given models. | def sequence_reset_sql(self, style, model_list):
"""
Return a list of the SQL statements required to reset sequences for
the given models.
The `style` argument is a Style object as returned by either
color_style() or no_style() in django.core.management.color.
"""
return [] | [
"def",
"sequence_reset_sql",
"(",
"self",
",",
"style",
",",
"model_list",
")",
":",
"return",
"[",
"]"
] | [
419,
4
] | [
427,
17
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.start_transaction_sql | (self) | Return the SQL statement required to start a transaction. | Return the SQL statement required to start a transaction. | def start_transaction_sql(self):
"""Return the SQL statement required to start a transaction."""
return "BEGIN;" | [
"def",
"start_transaction_sql",
"(",
"self",
")",
":",
"return",
"\"BEGIN;\""
] | [
429,
4
] | [
431,
23
] | python | en | ['en', 'en', 'en'] | True |
BaseDatabaseOperations.end_transaction_sql | (self, success=True) | Return the SQL statement required to end a transaction. | Return the SQL statement required to end a transaction. | def end_transaction_sql(self, success=True):
"""Return the SQL statement required to end a transaction."""
if not success:
return "ROLLBACK;"
return "COMMIT;" | [
"def",
"end_transaction_sql",
"(",
"self",
",",
"success",
"=",
"True",
")",
":",
"if",
"not",
"success",
":",
"return",
"\"ROLLBACK;\"",
"return",
"\"COMMIT;\""
] | [
433,
4
] | [
437,
24
] | python | en | ['en', 'en', 'en'] | True |
BaseDatabaseOperations.tablespace_sql | (self, tablespace, inline=False) |
Return the SQL that will be used in a query to define the tablespace.
Return '' if the backend doesn't support tablespaces.
If `inline` is True, append the SQL to a row; otherwise append it to
the entire CREATE TABLE or CREATE INDEX statement.
|
Return the SQL that will be used in a query to define the tablespace. | def tablespace_sql(self, tablespace, inline=False):
"""
Return the SQL that will be used in a query to define the tablespace.
Return '' if the backend doesn't support tablespaces.
If `inline` is True, append the SQL to a row; otherwise append it to
the entire CREATE TABLE or CREATE INDEX statement.
"""
return '' | [
"def",
"tablespace_sql",
"(",
"self",
",",
"tablespace",
",",
"inline",
"=",
"False",
")",
":",
"return",
"''"
] | [
439,
4
] | [
448,
17
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.prep_for_like_query | (self, x) | Prepare a value for use in a LIKE query. | Prepare a value for use in a LIKE query. | def prep_for_like_query(self, x):
"""Prepare a value for use in a LIKE query."""
return str(x).replace("\\", "\\\\").replace("%", r"\%").replace("_", r"\_") | [
"def",
"prep_for_like_query",
"(",
"self",
",",
"x",
")",
":",
"return",
"str",
"(",
"x",
")",
".",
"replace",
"(",
"\"\\\\\"",
",",
"\"\\\\\\\\\"",
")",
".",
"replace",
"(",
"\"%\"",
",",
"r\"\\%\"",
")",
".",
"replace",
"(",
"\"_\"",
",",
"r\"\\_\"",
")"
] | [
450,
4
] | [
452,
83
] | python | en | ['en', 'en', 'en'] | True |
BaseDatabaseOperations.validate_autopk_value | (self, value) |
Certain backends do not accept some values for "serial" fields
(for example zero in MySQL). Raise a ValueError if the value is
invalid, otherwise return the validated value.
|
Certain backends do not accept some values for "serial" fields
(for example zero in MySQL). Raise a ValueError if the value is
invalid, otherwise return the validated value.
| def validate_autopk_value(self, value):
"""
Certain backends do not accept some values for "serial" fields
(for example zero in MySQL). Raise a ValueError if the value is
invalid, otherwise return the validated value.
"""
return value | [
"def",
"validate_autopk_value",
"(",
"self",
",",
"value",
")",
":",
"return",
"value"
] | [
458,
4
] | [
464,
20
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.adapt_unknown_value | (self, value) |
Transform a value to something compatible with the backend driver.
This method only depends on the type of the value. It's designed for
cases where the target type isn't known, such as .raw() SQL queries.
As a consequence it may not work perfectly in all circumstances.
|
Transform a value to something compatible with the backend driver. | def adapt_unknown_value(self, value):
"""
Transform a value to something compatible with the backend driver.
This method only depends on the type of the value. It's designed for
cases where the target type isn't known, such as .raw() SQL queries.
As a consequence it may not work perfectly in all circumstances.
"""
if isinstance(value, datetime.datetime): # must be before date
return self.adapt_datetimefield_value(value)
elif isinstance(value, datetime.date):
return self.adapt_datefield_value(value)
elif isinstance(value, datetime.time):
return self.adapt_timefield_value(value)
elif isinstance(value, decimal.Decimal):
return self.adapt_decimalfield_value(value)
else:
return value | [
"def",
"adapt_unknown_value",
"(",
"self",
",",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"datetime",
".",
"datetime",
")",
":",
"# must be before date",
"return",
"self",
".",
"adapt_datetimefield_value",
"(",
"value",
")",
"elif",
"isinstance",
"(",
"value",
",",
"datetime",
".",
"date",
")",
":",
"return",
"self",
".",
"adapt_datefield_value",
"(",
"value",
")",
"elif",
"isinstance",
"(",
"value",
",",
"datetime",
".",
"time",
")",
":",
"return",
"self",
".",
"adapt_timefield_value",
"(",
"value",
")",
"elif",
"isinstance",
"(",
"value",
",",
"decimal",
".",
"Decimal",
")",
":",
"return",
"self",
".",
"adapt_decimalfield_value",
"(",
"value",
")",
"else",
":",
"return",
"value"
] | [
466,
4
] | [
483,
24
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.adapt_datefield_value | (self, value) |
Transform a date value to an object compatible with what is expected
by the backend driver for date columns.
|
Transform a date value to an object compatible with what is expected
by the backend driver for date columns.
| def adapt_datefield_value(self, value):
"""
Transform a date value to an object compatible with what is expected
by the backend driver for date columns.
"""
if value is None:
return None
return str(value) | [
"def",
"adapt_datefield_value",
"(",
"self",
",",
"value",
")",
":",
"if",
"value",
"is",
"None",
":",
"return",
"None",
"return",
"str",
"(",
"value",
")"
] | [
485,
4
] | [
492,
25
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.adapt_datetimefield_value | (self, value) |
Transform a datetime value to an object compatible with what is expected
by the backend driver for datetime columns.
|
Transform a datetime value to an object compatible with what is expected
by the backend driver for datetime columns.
| def adapt_datetimefield_value(self, value):
"""
Transform a datetime value to an object compatible with what is expected
by the backend driver for datetime columns.
"""
if value is None:
return None
return str(value) | [
"def",
"adapt_datetimefield_value",
"(",
"self",
",",
"value",
")",
":",
"if",
"value",
"is",
"None",
":",
"return",
"None",
"return",
"str",
"(",
"value",
")"
] | [
494,
4
] | [
501,
25
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.adapt_timefield_value | (self, value) |
Transform a time value to an object compatible with what is expected
by the backend driver for time columns.
|
Transform a time value to an object compatible with what is expected
by the backend driver for time columns.
| def adapt_timefield_value(self, value):
"""
Transform a time value to an object compatible with what is expected
by the backend driver for time columns.
"""
if value is None:
return None
if timezone.is_aware(value):
raise ValueError("Django does not support timezone-aware times.")
return str(value) | [
"def",
"adapt_timefield_value",
"(",
"self",
",",
"value",
")",
":",
"if",
"value",
"is",
"None",
":",
"return",
"None",
"if",
"timezone",
".",
"is_aware",
"(",
"value",
")",
":",
"raise",
"ValueError",
"(",
"\"Django does not support timezone-aware times.\"",
")",
"return",
"str",
"(",
"value",
")"
] | [
503,
4
] | [
512,
25
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.adapt_decimalfield_value | (self, value, max_digits=None, decimal_places=None) |
Transform a decimal.Decimal value to an object compatible with what is
expected by the backend driver for decimal (numeric) columns.
|
Transform a decimal.Decimal value to an object compatible with what is
expected by the backend driver for decimal (numeric) columns.
| def adapt_decimalfield_value(self, value, max_digits=None, decimal_places=None):
"""
Transform a decimal.Decimal value to an object compatible with what is
expected by the backend driver for decimal (numeric) columns.
"""
return utils.format_number(value, max_digits, decimal_places) | [
"def",
"adapt_decimalfield_value",
"(",
"self",
",",
"value",
",",
"max_digits",
"=",
"None",
",",
"decimal_places",
"=",
"None",
")",
":",
"return",
"utils",
".",
"format_number",
"(",
"value",
",",
"max_digits",
",",
"decimal_places",
")"
] | [
514,
4
] | [
519,
69
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.adapt_ipaddressfield_value | (self, value) |
Transform a string representation of an IP address into the expected
type for the backend driver.
|
Transform a string representation of an IP address into the expected
type for the backend driver.
| def adapt_ipaddressfield_value(self, value):
"""
Transform a string representation of an IP address into the expected
type for the backend driver.
"""
return value or None | [
"def",
"adapt_ipaddressfield_value",
"(",
"self",
",",
"value",
")",
":",
"return",
"value",
"or",
"None"
] | [
521,
4
] | [
526,
28
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.year_lookup_bounds_for_date_field | (self, value) |
Return a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a DateField value using a year
lookup.
`value` is an int, containing the looked-up year.
|
Return a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a DateField value using a year
lookup. | def year_lookup_bounds_for_date_field(self, value):
"""
Return a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a DateField value using a year
lookup.
`value` is an int, containing the looked-up year.
"""
first = datetime.date(value, 1, 1)
second = datetime.date(value, 12, 31)
first = self.adapt_datefield_value(first)
second = self.adapt_datefield_value(second)
return [first, second] | [
"def",
"year_lookup_bounds_for_date_field",
"(",
"self",
",",
"value",
")",
":",
"first",
"=",
"datetime",
".",
"date",
"(",
"value",
",",
"1",
",",
"1",
")",
"second",
"=",
"datetime",
".",
"date",
"(",
"value",
",",
"12",
",",
"31",
")",
"first",
"=",
"self",
".",
"adapt_datefield_value",
"(",
"first",
")",
"second",
"=",
"self",
".",
"adapt_datefield_value",
"(",
"second",
")",
"return",
"[",
"first",
",",
"second",
"]"
] | [
528,
4
] | [
540,
30
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.year_lookup_bounds_for_datetime_field | (self, value) |
Return a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a DateTimeField value using a year
lookup.
`value` is an int, containing the looked-up year.
|
Return a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a DateTimeField value using a year
lookup. | def year_lookup_bounds_for_datetime_field(self, value):
"""
Return a two-elements list with the lower and upper bound to be used
with a BETWEEN operator to query a DateTimeField value using a year
lookup.
`value` is an int, containing the looked-up year.
"""
first = datetime.datetime(value, 1, 1)
second = datetime.datetime(value, 12, 31, 23, 59, 59, 999999)
if settings.USE_TZ:
tz = timezone.get_current_timezone()
first = timezone.make_aware(first, tz)
second = timezone.make_aware(second, tz)
first = self.adapt_datetimefield_value(first)
second = self.adapt_datetimefield_value(second)
return [first, second] | [
"def",
"year_lookup_bounds_for_datetime_field",
"(",
"self",
",",
"value",
")",
":",
"first",
"=",
"datetime",
".",
"datetime",
"(",
"value",
",",
"1",
",",
"1",
")",
"second",
"=",
"datetime",
".",
"datetime",
"(",
"value",
",",
"12",
",",
"31",
",",
"23",
",",
"59",
",",
"59",
",",
"999999",
")",
"if",
"settings",
".",
"USE_TZ",
":",
"tz",
"=",
"timezone",
".",
"get_current_timezone",
"(",
")",
"first",
"=",
"timezone",
".",
"make_aware",
"(",
"first",
",",
"tz",
")",
"second",
"=",
"timezone",
".",
"make_aware",
"(",
"second",
",",
"tz",
")",
"first",
"=",
"self",
".",
"adapt_datetimefield_value",
"(",
"first",
")",
"second",
"=",
"self",
".",
"adapt_datetimefield_value",
"(",
"second",
")",
"return",
"[",
"first",
",",
"second",
"]"
] | [
542,
4
] | [
558,
30
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.get_db_converters | (self, expression) |
Return a list of functions needed to convert field data.
Some field types on some backends do not provide data in the correct
format, this is the hook for converter functions.
|
Return a list of functions needed to convert field data. | def get_db_converters(self, expression):
"""
Return a list of functions needed to convert field data.
Some field types on some backends do not provide data in the correct
format, this is the hook for converter functions.
"""
return [] | [
"def",
"get_db_converters",
"(",
"self",
",",
"expression",
")",
":",
"return",
"[",
"]"
] | [
560,
4
] | [
567,
17
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.check_expression_support | (self, expression) |
Check that the backend supports the provided expression.
This is used on specific backends to rule out known expressions
that have problematic or nonexistent implementations. If the
expression has a known problem, the backend should raise
NotSupportedError.
|
Check that the backend supports the provided expression. | def check_expression_support(self, expression):
"""
Check that the backend supports the provided expression.
This is used on specific backends to rule out known expressions
that have problematic or nonexistent implementations. If the
expression has a known problem, the backend should raise
NotSupportedError.
"""
pass | [
"def",
"check_expression_support",
"(",
"self",
",",
"expression",
")",
":",
"pass"
] | [
573,
4
] | [
582,
12
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.conditional_expression_supported_in_where_clause | (self, expression) |
Return True, if the conditional expression is supported in the WHERE
clause.
|
Return True, if the conditional expression is supported in the WHERE
clause.
| def conditional_expression_supported_in_where_clause(self, expression):
"""
Return True, if the conditional expression is supported in the WHERE
clause.
"""
return True | [
"def",
"conditional_expression_supported_in_where_clause",
"(",
"self",
",",
"expression",
")",
":",
"return",
"True"
] | [
584,
4
] | [
589,
19
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.combine_expression | (self, connector, sub_expressions) |
Combine a list of subexpressions into a single expression, using
the provided connecting operator. This is required because operators
can vary between backends (e.g., Oracle with %% and &) and between
subexpression types (e.g., date expressions).
|
Combine a list of subexpressions into a single expression, using
the provided connecting operator. This is required because operators
can vary between backends (e.g., Oracle with %% and &) and between
subexpression types (e.g., date expressions).
| def combine_expression(self, connector, sub_expressions):
"""
Combine a list of subexpressions into a single expression, using
the provided connecting operator. This is required because operators
can vary between backends (e.g., Oracle with %% and &) and between
subexpression types (e.g., date expressions).
"""
conn = ' %s ' % connector
return conn.join(sub_expressions) | [
"def",
"combine_expression",
"(",
"self",
",",
"connector",
",",
"sub_expressions",
")",
":",
"conn",
"=",
"' %s '",
"%",
"connector",
"return",
"conn",
".",
"join",
"(",
"sub_expressions",
")"
] | [
591,
4
] | [
599,
41
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.binary_placeholder_sql | (self, value) |
Some backends require special syntax to insert binary content (MySQL
for example uses '_binary %s').
|
Some backends require special syntax to insert binary content (MySQL
for example uses '_binary %s').
| def binary_placeholder_sql(self, value):
"""
Some backends require special syntax to insert binary content (MySQL
for example uses '_binary %s').
"""
return '%s' | [
"def",
"binary_placeholder_sql",
"(",
"self",
",",
"value",
")",
":",
"return",
"'%s'"
] | [
604,
4
] | [
609,
19
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.modify_insert_params | (self, placeholder, params) |
Allow modification of insert parameters. Needed for Oracle Spatial
backend due to #10888.
|
Allow modification of insert parameters. Needed for Oracle Spatial
backend due to #10888.
| def modify_insert_params(self, placeholder, params):
"""
Allow modification of insert parameters. Needed for Oracle Spatial
backend due to #10888.
"""
return params | [
"def",
"modify_insert_params",
"(",
"self",
",",
"placeholder",
",",
"params",
")",
":",
"return",
"params"
] | [
611,
4
] | [
616,
21
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.integer_field_range | (self, internal_type) |
Given an integer field internal type (e.g. 'PositiveIntegerField'),
return a tuple of the (min_value, max_value) form representing the
range of the column type bound to the field.
|
Given an integer field internal type (e.g. 'PositiveIntegerField'),
return a tuple of the (min_value, max_value) form representing the
range of the column type bound to the field.
| def integer_field_range(self, internal_type):
"""
Given an integer field internal type (e.g. 'PositiveIntegerField'),
return a tuple of the (min_value, max_value) form representing the
range of the column type bound to the field.
"""
return self.integer_field_ranges[internal_type] | [
"def",
"integer_field_range",
"(",
"self",
",",
"internal_type",
")",
":",
"return",
"self",
".",
"integer_field_ranges",
"[",
"internal_type",
"]"
] | [
618,
4
] | [
624,
55
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseOperations.window_frame_rows_start_end | (self, start=None, end=None) |
Return SQL for start and end points in an OVER clause window frame.
|
Return SQL for start and end points in an OVER clause window frame.
| def window_frame_rows_start_end(self, start=None, end=None):
"""
Return SQL for start and end points in an OVER clause window frame.
"""
if not self.connection.features.supports_over_clause:
raise NotSupportedError('This backend does not support window expressions.')
return self.window_frame_start(start), self.window_frame_end(end) | [
"def",
"window_frame_rows_start_end",
"(",
"self",
",",
"start",
"=",
"None",
",",
"end",
"=",
"None",
")",
":",
"if",
"not",
"self",
".",
"connection",
".",
"features",
".",
"supports_over_clause",
":",
"raise",
"NotSupportedError",
"(",
"'This backend does not support window expressions.'",
")",
"return",
"self",
".",
"window_frame_start",
"(",
"start",
")",
",",
"self",
".",
"window_frame_end",
"(",
"end",
")"
] | [
653,
4
] | [
659,
73
] | python | en | ['en', 'error', 'th'] | False |
find_p_q | (nbits, getprime_func=rsa.prime.getprime, accurate=True) | Returns a tuple of two different primes of nbits bits each.
The resulting p * q has exacty 2 * nbits bits, and the returned p and q
will not be equal.
:param nbits: the number of bits in each of p and q.
:param getprime_func: the getprime function, defaults to
:py:func:`rsa.prime.getprime`.
*Introduced in Python-RSA 3.1*
:param accurate: whether to enable accurate mode or not.
:returns: (p, q), where p > q
>>> (p, q) = find_p_q(128)
>>> from rsa import common
>>> common.bit_size(p * q)
256
When not in accurate mode, the number of bits can be slightly less
>>> (p, q) = find_p_q(128, accurate=False)
>>> from rsa import common
>>> common.bit_size(p * q) <= 256
True
>>> common.bit_size(p * q) > 240
True
| Returns a tuple of two different primes of nbits bits each. | def find_p_q(nbits, getprime_func=rsa.prime.getprime, accurate=True):
"""Returns a tuple of two different primes of nbits bits each.
The resulting p * q has exacty 2 * nbits bits, and the returned p and q
will not be equal.
:param nbits: the number of bits in each of p and q.
:param getprime_func: the getprime function, defaults to
:py:func:`rsa.prime.getprime`.
*Introduced in Python-RSA 3.1*
:param accurate: whether to enable accurate mode or not.
:returns: (p, q), where p > q
>>> (p, q) = find_p_q(128)
>>> from rsa import common
>>> common.bit_size(p * q)
256
When not in accurate mode, the number of bits can be slightly less
>>> (p, q) = find_p_q(128, accurate=False)
>>> from rsa import common
>>> common.bit_size(p * q) <= 256
True
>>> common.bit_size(p * q) > 240
True
"""
total_bits = nbits * 2
# Make sure that p and q aren't too close or the factoring programs can
# factor n.
shift = nbits // 16
pbits = nbits + shift
qbits = nbits - shift
# Choose the two initial primes
log.debug('find_p_q(%i): Finding p', nbits)
p = getprime_func(pbits)
log.debug('find_p_q(%i): Finding q', nbits)
q = getprime_func(qbits)
def is_acceptable(p, q):
"""Returns True iff p and q are acceptable:
- p and q differ
- (p * q) has the right nr of bits (when accurate=True)
"""
if p == q:
return False
if not accurate:
return True
# Make sure we have just the right amount of bits
found_size = rsa.common.bit_size(p * q)
return total_bits == found_size
# Keep choosing other primes until they match our requirements.
change_p = False
while not is_acceptable(p, q):
# Change p on one iteration and q on the other
if change_p:
p = getprime_func(pbits)
else:
q = getprime_func(qbits)
change_p = not change_p
# We want p > q as described on
# http://www.di-mgt.com.au/rsa_alg.html#crt
return max(p, q), min(p, q) | [
"def",
"find_p_q",
"(",
"nbits",
",",
"getprime_func",
"=",
"rsa",
".",
"prime",
".",
"getprime",
",",
"accurate",
"=",
"True",
")",
":",
"total_bits",
"=",
"nbits",
"*",
"2",
"# Make sure that p and q aren't too close or the factoring programs can",
"# factor n.",
"shift",
"=",
"nbits",
"//",
"16",
"pbits",
"=",
"nbits",
"+",
"shift",
"qbits",
"=",
"nbits",
"-",
"shift",
"# Choose the two initial primes",
"log",
".",
"debug",
"(",
"'find_p_q(%i): Finding p'",
",",
"nbits",
")",
"p",
"=",
"getprime_func",
"(",
"pbits",
")",
"log",
".",
"debug",
"(",
"'find_p_q(%i): Finding q'",
",",
"nbits",
")",
"q",
"=",
"getprime_func",
"(",
"qbits",
")",
"def",
"is_acceptable",
"(",
"p",
",",
"q",
")",
":",
"\"\"\"Returns True iff p and q are acceptable:\n\n - p and q differ\n - (p * q) has the right nr of bits (when accurate=True)\n \"\"\"",
"if",
"p",
"==",
"q",
":",
"return",
"False",
"if",
"not",
"accurate",
":",
"return",
"True",
"# Make sure we have just the right amount of bits",
"found_size",
"=",
"rsa",
".",
"common",
".",
"bit_size",
"(",
"p",
"*",
"q",
")",
"return",
"total_bits",
"==",
"found_size",
"# Keep choosing other primes until they match our requirements.",
"change_p",
"=",
"False",
"while",
"not",
"is_acceptable",
"(",
"p",
",",
"q",
")",
":",
"# Change p on one iteration and q on the other",
"if",
"change_p",
":",
"p",
"=",
"getprime_func",
"(",
"pbits",
")",
"else",
":",
"q",
"=",
"getprime_func",
"(",
"qbits",
")",
"change_p",
"=",
"not",
"change_p",
"# We want p > q as described on",
"# http://www.di-mgt.com.au/rsa_alg.html#crt",
"return",
"max",
"(",
"p",
",",
"q",
")",
",",
"min",
"(",
"p",
",",
"q",
")"
] | [
572,
0
] | [
647,
31
] | python | en | ['en', 'en', 'en'] | True |
calculate_keys_custom_exponent | (p, q, exponent) | Calculates an encryption and a decryption key given p, q and an exponent,
and returns them as a tuple (e, d)
:param p: the first large prime
:param q: the second large prime
:param exponent: the exponent for the key; only change this if you know
what you're doing, as the exponent influences how difficult your
private key can be cracked. A very common choice for e is 65537.
:type exponent: int
| Calculates an encryption and a decryption key given p, q and an exponent,
and returns them as a tuple (e, d) | def calculate_keys_custom_exponent(p, q, exponent):
"""Calculates an encryption and a decryption key given p, q and an exponent,
and returns them as a tuple (e, d)
:param p: the first large prime
:param q: the second large prime
:param exponent: the exponent for the key; only change this if you know
what you're doing, as the exponent influences how difficult your
private key can be cracked. A very common choice for e is 65537.
:type exponent: int
"""
phi_n = (p - 1) * (q - 1)
try:
d = rsa.common.inverse(exponent, phi_n)
except rsa.common.NotRelativePrimeError as ex:
raise rsa.common.NotRelativePrimeError(
exponent, phi_n, ex.d,
msg="e (%d) and phi_n (%d) are not relatively prime (divider=%i)" %
(exponent, phi_n, ex.d))
if (exponent * d) % phi_n != 1:
raise ValueError("e (%d) and d (%d) are not mult. inv. modulo "
"phi_n (%d)" % (exponent, d, phi_n))
return exponent, d | [
"def",
"calculate_keys_custom_exponent",
"(",
"p",
",",
"q",
",",
"exponent",
")",
":",
"phi_n",
"=",
"(",
"p",
"-",
"1",
")",
"*",
"(",
"q",
"-",
"1",
")",
"try",
":",
"d",
"=",
"rsa",
".",
"common",
".",
"inverse",
"(",
"exponent",
",",
"phi_n",
")",
"except",
"rsa",
".",
"common",
".",
"NotRelativePrimeError",
"as",
"ex",
":",
"raise",
"rsa",
".",
"common",
".",
"NotRelativePrimeError",
"(",
"exponent",
",",
"phi_n",
",",
"ex",
".",
"d",
",",
"msg",
"=",
"\"e (%d) and phi_n (%d) are not relatively prime (divider=%i)\"",
"%",
"(",
"exponent",
",",
"phi_n",
",",
"ex",
".",
"d",
")",
")",
"if",
"(",
"exponent",
"*",
"d",
")",
"%",
"phi_n",
"!=",
"1",
":",
"raise",
"ValueError",
"(",
"\"e (%d) and d (%d) are not mult. inv. modulo \"",
"\"phi_n (%d)\"",
"%",
"(",
"exponent",
",",
"d",
",",
"phi_n",
")",
")",
"return",
"exponent",
",",
"d"
] | [
650,
0
] | [
677,
22
] | python | en | ['en', 'en', 'en'] | True |
calculate_keys | (p, q) | Calculates an encryption and a decryption key given p and q, and
returns them as a tuple (e, d)
:param p: the first large prime
:param q: the second large prime
:return: tuple (e, d) with the encryption and decryption exponents.
| Calculates an encryption and a decryption key given p and q, and
returns them as a tuple (e, d) | def calculate_keys(p, q):
"""Calculates an encryption and a decryption key given p and q, and
returns them as a tuple (e, d)
:param p: the first large prime
:param q: the second large prime
:return: tuple (e, d) with the encryption and decryption exponents.
"""
return calculate_keys_custom_exponent(p, q, DEFAULT_EXPONENT) | [
"def",
"calculate_keys",
"(",
"p",
",",
"q",
")",
":",
"return",
"calculate_keys_custom_exponent",
"(",
"p",
",",
"q",
",",
"DEFAULT_EXPONENT",
")"
] | [
680,
0
] | [
690,
65
] | python | en | ['en', 'en', 'en'] | True |
gen_keys | (nbits, getprime_func, accurate=True, exponent=DEFAULT_EXPONENT) | Generate RSA keys of nbits bits. Returns (p, q, e, d).
Note: this can take a long time, depending on the key size.
:param nbits: the total number of bits in ``p`` and ``q``. Both ``p`` and
``q`` will use ``nbits/2`` bits.
:param getprime_func: either :py:func:`rsa.prime.getprime` or a function
with similar signature.
:param exponent: the exponent for the key; only change this if you know
what you're doing, as the exponent influences how difficult your
private key can be cracked. A very common choice for e is 65537.
:type exponent: int
| Generate RSA keys of nbits bits. Returns (p, q, e, d). | def gen_keys(nbits, getprime_func, accurate=True, exponent=DEFAULT_EXPONENT):
"""Generate RSA keys of nbits bits. Returns (p, q, e, d).
Note: this can take a long time, depending on the key size.
:param nbits: the total number of bits in ``p`` and ``q``. Both ``p`` and
``q`` will use ``nbits/2`` bits.
:param getprime_func: either :py:func:`rsa.prime.getprime` or a function
with similar signature.
:param exponent: the exponent for the key; only change this if you know
what you're doing, as the exponent influences how difficult your
private key can be cracked. A very common choice for e is 65537.
:type exponent: int
"""
# Regenerate p and q values, until calculate_keys doesn't raise a
# ValueError.
while True:
(p, q) = find_p_q(nbits // 2, getprime_func, accurate)
try:
(e, d) = calculate_keys_custom_exponent(p, q, exponent=exponent)
break
except ValueError:
pass
return p, q, e, d | [
"def",
"gen_keys",
"(",
"nbits",
",",
"getprime_func",
",",
"accurate",
"=",
"True",
",",
"exponent",
"=",
"DEFAULT_EXPONENT",
")",
":",
"# Regenerate p and q values, until calculate_keys doesn't raise a",
"# ValueError.",
"while",
"True",
":",
"(",
"p",
",",
"q",
")",
"=",
"find_p_q",
"(",
"nbits",
"//",
"2",
",",
"getprime_func",
",",
"accurate",
")",
"try",
":",
"(",
"e",
",",
"d",
")",
"=",
"calculate_keys_custom_exponent",
"(",
"p",
",",
"q",
",",
"exponent",
"=",
"exponent",
")",
"break",
"except",
"ValueError",
":",
"pass",
"return",
"p",
",",
"q",
",",
"e",
",",
"d"
] | [
693,
0
] | [
718,
21
] | python | en | ['en', 'ca', 'en'] | True |
newkeys | (nbits, accurate=True, poolsize=1, exponent=DEFAULT_EXPONENT) | Generates public and private keys, and returns them as (pub, priv).
The public key is also known as the 'encryption key', and is a
:py:class:`rsa.PublicKey` object. The private key is also known as the
'decryption key' and is a :py:class:`rsa.PrivateKey` object.
:param nbits: the number of bits required to store ``n = p*q``.
:param accurate: when True, ``n`` will have exactly the number of bits you
asked for. However, this makes key generation much slower. When False,
`n`` may have slightly less bits.
:param poolsize: the number of processes to use to generate the prime
numbers. If set to a number > 1, a parallel algorithm will be used.
This requires Python 2.6 or newer.
:param exponent: the exponent for the key; only change this if you know
what you're doing, as the exponent influences how difficult your
private key can be cracked. A very common choice for e is 65537.
:type exponent: int
:returns: a tuple (:py:class:`rsa.PublicKey`, :py:class:`rsa.PrivateKey`)
The ``poolsize`` parameter was added in *Python-RSA 3.1* and requires
Python 2.6 or newer.
| Generates public and private keys, and returns them as (pub, priv). | def newkeys(nbits, accurate=True, poolsize=1, exponent=DEFAULT_EXPONENT):
"""Generates public and private keys, and returns them as (pub, priv).
The public key is also known as the 'encryption key', and is a
:py:class:`rsa.PublicKey` object. The private key is also known as the
'decryption key' and is a :py:class:`rsa.PrivateKey` object.
:param nbits: the number of bits required to store ``n = p*q``.
:param accurate: when True, ``n`` will have exactly the number of bits you
asked for. However, this makes key generation much slower. When False,
`n`` may have slightly less bits.
:param poolsize: the number of processes to use to generate the prime
numbers. If set to a number > 1, a parallel algorithm will be used.
This requires Python 2.6 or newer.
:param exponent: the exponent for the key; only change this if you know
what you're doing, as the exponent influences how difficult your
private key can be cracked. A very common choice for e is 65537.
:type exponent: int
:returns: a tuple (:py:class:`rsa.PublicKey`, :py:class:`rsa.PrivateKey`)
The ``poolsize`` parameter was added in *Python-RSA 3.1* and requires
Python 2.6 or newer.
"""
if nbits < 16:
raise ValueError('Key too small')
if poolsize < 1:
raise ValueError('Pool size (%i) should be >= 1' % poolsize)
# Determine which getprime function to use
if poolsize > 1:
from rsa import parallel
import functools
getprime_func = functools.partial(parallel.getprime, poolsize=poolsize)
else:
getprime_func = rsa.prime.getprime
# Generate the key components
(p, q, e, d) = gen_keys(nbits, getprime_func, accurate=accurate, exponent=exponent)
# Create the key objects
n = p * q
return (
PublicKey(n, e),
PrivateKey(n, e, d, p, q)
) | [
"def",
"newkeys",
"(",
"nbits",
",",
"accurate",
"=",
"True",
",",
"poolsize",
"=",
"1",
",",
"exponent",
"=",
"DEFAULT_EXPONENT",
")",
":",
"if",
"nbits",
"<",
"16",
":",
"raise",
"ValueError",
"(",
"'Key too small'",
")",
"if",
"poolsize",
"<",
"1",
":",
"raise",
"ValueError",
"(",
"'Pool size (%i) should be >= 1'",
"%",
"poolsize",
")",
"# Determine which getprime function to use",
"if",
"poolsize",
">",
"1",
":",
"from",
"rsa",
"import",
"parallel",
"import",
"functools",
"getprime_func",
"=",
"functools",
".",
"partial",
"(",
"parallel",
".",
"getprime",
",",
"poolsize",
"=",
"poolsize",
")",
"else",
":",
"getprime_func",
"=",
"rsa",
".",
"prime",
".",
"getprime",
"# Generate the key components",
"(",
"p",
",",
"q",
",",
"e",
",",
"d",
")",
"=",
"gen_keys",
"(",
"nbits",
",",
"getprime_func",
",",
"accurate",
"=",
"accurate",
",",
"exponent",
"=",
"exponent",
")",
"# Create the key objects",
"n",
"=",
"p",
"*",
"q",
"return",
"(",
"PublicKey",
"(",
"n",
",",
"e",
")",
",",
"PrivateKey",
"(",
"n",
",",
"e",
",",
"d",
",",
"p",
",",
"q",
")",
")"
] | [
721,
0
] | [
771,
5
] | python | en | ['en', 'en', 'en'] | True |
AbstractKey._load_pkcs1_pem | (cls, keyfile) | Loads a key in PKCS#1 PEM format, implement in a subclass.
:param keyfile: contents of a PEM-encoded file that contains
the public key.
:type keyfile: bytes
:return: the loaded key
:rtype: AbstractKey
| Loads a key in PKCS#1 PEM format, implement in a subclass. | def _load_pkcs1_pem(cls, keyfile):
"""Loads a key in PKCS#1 PEM format, implement in a subclass.
:param keyfile: contents of a PEM-encoded file that contains
the public key.
:type keyfile: bytes
:return: the loaded key
:rtype: AbstractKey
""" | [
"def",
"_load_pkcs1_pem",
"(",
"cls",
",",
"keyfile",
")",
":"
] | [
60,
4
] | [
69,
11
] | python | en | ['en', 'en', 'en'] | True |
AbstractKey._load_pkcs1_der | (cls, keyfile) | Loads a key in PKCS#1 PEM format, implement in a subclass.
:param keyfile: contents of a DER-encoded file that contains
the public key.
:type keyfile: bytes
:return: the loaded key
:rtype: AbstractKey
| Loads a key in PKCS#1 PEM format, implement in a subclass. | def _load_pkcs1_der(cls, keyfile):
"""Loads a key in PKCS#1 PEM format, implement in a subclass.
:param keyfile: contents of a DER-encoded file that contains
the public key.
:type keyfile: bytes
:return: the loaded key
:rtype: AbstractKey
""" | [
"def",
"_load_pkcs1_der",
"(",
"cls",
",",
"keyfile",
")",
":"
] | [
72,
4
] | [
81,
11
] | python | en | ['en', 'en', 'en'] | True |
AbstractKey._save_pkcs1_pem | (self) | Saves the key in PKCS#1 PEM format, implement in a subclass.
:returns: the PEM-encoded key.
:rtype: bytes
| Saves the key in PKCS#1 PEM format, implement in a subclass. | def _save_pkcs1_pem(self):
"""Saves the key in PKCS#1 PEM format, implement in a subclass.
:returns: the PEM-encoded key.
:rtype: bytes
""" | [
"def",
"_save_pkcs1_pem",
"(",
"self",
")",
":"
] | [
83,
4
] | [
88,
11
] | python | en | ['en', 'en', 'en'] | True |
AbstractKey._save_pkcs1_der | (self) | Saves the key in PKCS#1 DER format, implement in a subclass.
:returns: the DER-encoded key.
:rtype: bytes
| Saves the key in PKCS#1 DER format, implement in a subclass. | def _save_pkcs1_der(self):
"""Saves the key in PKCS#1 DER format, implement in a subclass.
:returns: the DER-encoded key.
:rtype: bytes
""" | [
"def",
"_save_pkcs1_der",
"(",
"self",
")",
":"
] | [
90,
4
] | [
95,
11
] | python | en | ['en', 'en', 'en'] | True |
AbstractKey.load_pkcs1 | (cls, keyfile, format='PEM') | Loads a key in PKCS#1 DER or PEM format.
:param keyfile: contents of a DER- or PEM-encoded file that contains
the key.
:type keyfile: bytes
:param format: the format of the file to load; 'PEM' or 'DER'
:type format: str
:return: the loaded key
:rtype: AbstractKey
| Loads a key in PKCS#1 DER or PEM format. | def load_pkcs1(cls, keyfile, format='PEM'):
"""Loads a key in PKCS#1 DER or PEM format.
:param keyfile: contents of a DER- or PEM-encoded file that contains
the key.
:type keyfile: bytes
:param format: the format of the file to load; 'PEM' or 'DER'
:type format: str
:return: the loaded key
:rtype: AbstractKey
"""
methods = {
'PEM': cls._load_pkcs1_pem,
'DER': cls._load_pkcs1_der,
}
method = cls._assert_format_exists(format, methods)
return method(keyfile) | [
"def",
"load_pkcs1",
"(",
"cls",
",",
"keyfile",
",",
"format",
"=",
"'PEM'",
")",
":",
"methods",
"=",
"{",
"'PEM'",
":",
"cls",
".",
"_load_pkcs1_pem",
",",
"'DER'",
":",
"cls",
".",
"_load_pkcs1_der",
",",
"}",
"method",
"=",
"cls",
".",
"_assert_format_exists",
"(",
"format",
",",
"methods",
")",
"return",
"method",
"(",
"keyfile",
")"
] | [
98,
4
] | [
117,
30
] | python | en | ['en', 'da', 'en'] | True |
AbstractKey._assert_format_exists | (file_format, methods) | Checks whether the given file format exists in 'methods'.
| Checks whether the given file format exists in 'methods'.
| def _assert_format_exists(file_format, methods):
"""Checks whether the given file format exists in 'methods'.
"""
try:
return methods[file_format]
except KeyError:
formats = ', '.join(sorted(methods.keys()))
raise ValueError('Unsupported format: %r, try one of %s' % (file_format,
formats)) | [
"def",
"_assert_format_exists",
"(",
"file_format",
",",
"methods",
")",
":",
"try",
":",
"return",
"methods",
"[",
"file_format",
"]",
"except",
"KeyError",
":",
"formats",
"=",
"', '",
".",
"join",
"(",
"sorted",
"(",
"methods",
".",
"keys",
"(",
")",
")",
")",
"raise",
"ValueError",
"(",
"'Unsupported format: %r, try one of %s'",
"%",
"(",
"file_format",
",",
"formats",
")",
")"
] | [
120,
4
] | [
129,
81
] | python | en | ['en', 'en', 'en'] | True |
AbstractKey.save_pkcs1 | (self, format='PEM') | Saves the key in PKCS#1 DER or PEM format.
:param format: the format to save; 'PEM' or 'DER'
:type format: str
:returns: the DER- or PEM-encoded key.
:rtype: bytes
| Saves the key in PKCS#1 DER or PEM format. | def save_pkcs1(self, format='PEM'):
"""Saves the key in PKCS#1 DER or PEM format.
:param format: the format to save; 'PEM' or 'DER'
:type format: str
:returns: the DER- or PEM-encoded key.
:rtype: bytes
"""
methods = {
'PEM': self._save_pkcs1_pem,
'DER': self._save_pkcs1_der,
}
method = self._assert_format_exists(format, methods)
return method() | [
"def",
"save_pkcs1",
"(",
"self",
",",
"format",
"=",
"'PEM'",
")",
":",
"methods",
"=",
"{",
"'PEM'",
":",
"self",
".",
"_save_pkcs1_pem",
",",
"'DER'",
":",
"self",
".",
"_save_pkcs1_der",
",",
"}",
"method",
"=",
"self",
".",
"_assert_format_exists",
"(",
"format",
",",
"methods",
")",
"return",
"method",
"(",
")"
] | [
131,
4
] | [
146,
23
] | python | en | ['en', 'da', 'en'] | True |
AbstractKey.blind | (self, message, r) | Performs blinding on the message using random number 'r'.
:param message: the message, as integer, to blind.
:type message: int
:param r: the random number to blind with.
:type r: int
:return: the blinded message.
:rtype: int
The blinding is such that message = unblind(decrypt(blind(encrypt(message))).
See https://en.wikipedia.org/wiki/Blinding_%28cryptography%29
| Performs blinding on the message using random number 'r'. | def blind(self, message, r):
"""Performs blinding on the message using random number 'r'.
:param message: the message, as integer, to blind.
:type message: int
:param r: the random number to blind with.
:type r: int
:return: the blinded message.
:rtype: int
The blinding is such that message = unblind(decrypt(blind(encrypt(message))).
See https://en.wikipedia.org/wiki/Blinding_%28cryptography%29
"""
return (message * pow(r, self.e, self.n)) % self.n | [
"def",
"blind",
"(",
"self",
",",
"message",
",",
"r",
")",
":",
"return",
"(",
"message",
"*",
"pow",
"(",
"r",
",",
"self",
".",
"e",
",",
"self",
".",
"n",
")",
")",
"%",
"self",
".",
"n"
] | [
148,
4
] | [
163,
58
] | python | en | ['en', 'en', 'en'] | True |
AbstractKey.unblind | (self, blinded, r) | Performs blinding on the message using random number 'r'.
:param blinded: the blinded message, as integer, to unblind.
:param r: the random number to unblind with.
:return: the original message.
The blinding is such that message = unblind(decrypt(blind(encrypt(message))).
See https://en.wikipedia.org/wiki/Blinding_%28cryptography%29
| Performs blinding on the message using random number 'r'. | def unblind(self, blinded, r):
"""Performs blinding on the message using random number 'r'.
:param blinded: the blinded message, as integer, to unblind.
:param r: the random number to unblind with.
:return: the original message.
The blinding is such that message = unblind(decrypt(blind(encrypt(message))).
See https://en.wikipedia.org/wiki/Blinding_%28cryptography%29
"""
return (rsa.common.inverse(r, self.n) * blinded) % self.n | [
"def",
"unblind",
"(",
"self",
",",
"blinded",
",",
"r",
")",
":",
"return",
"(",
"rsa",
".",
"common",
".",
"inverse",
"(",
"r",
",",
"self",
".",
"n",
")",
"*",
"blinded",
")",
"%",
"self",
".",
"n"
] | [
165,
4
] | [
177,
65
] | python | en | ['en', 'en', 'en'] | True |
PublicKey.__getstate__ | (self) | Returns the key as tuple for pickling. | Returns the key as tuple for pickling. | def __getstate__(self):
"""Returns the key as tuple for pickling."""
return self.n, self.e | [
"def",
"__getstate__",
"(",
"self",
")",
":",
"return",
"self",
".",
"n",
",",
"self",
".",
"e"
] | [
212,
4
] | [
214,
29
] | python | en | ['en', 'en', 'en'] | True |
PublicKey.__setstate__ | (self, state) | Sets the key from tuple. | Sets the key from tuple. | def __setstate__(self, state):
"""Sets the key from tuple."""
self.n, self.e = state | [
"def",
"__setstate__",
"(",
"self",
",",
"state",
")",
":",
"self",
".",
"n",
",",
"self",
".",
"e",
"=",
"state"
] | [
216,
4
] | [
218,
30
] | python | en | ['en', 'en', 'en'] | True |
PublicKey._load_pkcs1_der | (cls, keyfile) | Loads a key in PKCS#1 DER format.
:param keyfile: contents of a DER-encoded file that contains the public
key.
:return: a PublicKey object
First let's construct a DER encoded key:
>>> import base64
>>> b64der = 'MAwCBQCNGmYtAgMBAAE='
>>> der = base64.standard_b64decode(b64der)
This loads the file:
>>> PublicKey._load_pkcs1_der(der)
PublicKey(2367317549, 65537)
| Loads a key in PKCS#1 DER format. | def _load_pkcs1_der(cls, keyfile):
"""Loads a key in PKCS#1 DER format.
:param keyfile: contents of a DER-encoded file that contains the public
key.
:return: a PublicKey object
First let's construct a DER encoded key:
>>> import base64
>>> b64der = 'MAwCBQCNGmYtAgMBAAE='
>>> der = base64.standard_b64decode(b64der)
This loads the file:
>>> PublicKey._load_pkcs1_der(der)
PublicKey(2367317549, 65537)
"""
from pyasn1.codec.der import decoder
from rsa.asn1 import AsnPubKey
(priv, _) = decoder.decode(keyfile, asn1Spec=AsnPubKey())
return cls(n=int(priv['modulus']), e=int(priv['publicExponent'])) | [
"def",
"_load_pkcs1_der",
"(",
"cls",
",",
"keyfile",
")",
":",
"from",
"pyasn1",
".",
"codec",
".",
"der",
"import",
"decoder",
"from",
"rsa",
".",
"asn1",
"import",
"AsnPubKey",
"(",
"priv",
",",
"_",
")",
"=",
"decoder",
".",
"decode",
"(",
"keyfile",
",",
"asn1Spec",
"=",
"AsnPubKey",
"(",
")",
")",
"return",
"cls",
"(",
"n",
"=",
"int",
"(",
"priv",
"[",
"'modulus'",
"]",
")",
",",
"e",
"=",
"int",
"(",
"priv",
"[",
"'publicExponent'",
"]",
")",
")"
] | [
236,
4
] | [
260,
73
] | python | en | ['en', 'fy', 'en'] | True |
PublicKey._save_pkcs1_der | (self) | Saves the public key in PKCS#1 DER format.
:returns: the DER-encoded public key.
:rtype: bytes
| Saves the public key in PKCS#1 DER format. | def _save_pkcs1_der(self):
"""Saves the public key in PKCS#1 DER format.
:returns: the DER-encoded public key.
:rtype: bytes
"""
from pyasn1.codec.der import encoder
from rsa.asn1 import AsnPubKey
# Create the ASN object
asn_key = AsnPubKey()
asn_key.setComponentByName('modulus', self.n)
asn_key.setComponentByName('publicExponent', self.e)
return encoder.encode(asn_key) | [
"def",
"_save_pkcs1_der",
"(",
"self",
")",
":",
"from",
"pyasn1",
".",
"codec",
".",
"der",
"import",
"encoder",
"from",
"rsa",
".",
"asn1",
"import",
"AsnPubKey",
"# Create the ASN object",
"asn_key",
"=",
"AsnPubKey",
"(",
")",
"asn_key",
".",
"setComponentByName",
"(",
"'modulus'",
",",
"self",
".",
"n",
")",
"asn_key",
".",
"setComponentByName",
"(",
"'publicExponent'",
",",
"self",
".",
"e",
")",
"return",
"encoder",
".",
"encode",
"(",
"asn_key",
")"
] | [
262,
4
] | [
277,
38
] | python | en | ['en', 'da', 'en'] | True |
PublicKey._load_pkcs1_pem | (cls, keyfile) | Loads a PKCS#1 PEM-encoded public key file.
The contents of the file before the "-----BEGIN RSA PUBLIC KEY-----" and
after the "-----END RSA PUBLIC KEY-----" lines is ignored.
:param keyfile: contents of a PEM-encoded file that contains the public
key.
:return: a PublicKey object
| Loads a PKCS#1 PEM-encoded public key file. | def _load_pkcs1_pem(cls, keyfile):
"""Loads a PKCS#1 PEM-encoded public key file.
The contents of the file before the "-----BEGIN RSA PUBLIC KEY-----" and
after the "-----END RSA PUBLIC KEY-----" lines is ignored.
:param keyfile: contents of a PEM-encoded file that contains the public
key.
:return: a PublicKey object
"""
der = rsa.pem.load_pem(keyfile, 'RSA PUBLIC KEY')
return cls._load_pkcs1_der(der) | [
"def",
"_load_pkcs1_pem",
"(",
"cls",
",",
"keyfile",
")",
":",
"der",
"=",
"rsa",
".",
"pem",
".",
"load_pem",
"(",
"keyfile",
",",
"'RSA PUBLIC KEY'",
")",
"return",
"cls",
".",
"_load_pkcs1_der",
"(",
"der",
")"
] | [
280,
4
] | [
292,
39
] | python | en | ['en', 'sq', 'en'] | True |
PublicKey._save_pkcs1_pem | (self) | Saves a PKCS#1 PEM-encoded public key file.
:return: contents of a PEM-encoded file that contains the public key.
:rtype: bytes
| Saves a PKCS#1 PEM-encoded public key file. | def _save_pkcs1_pem(self):
"""Saves a PKCS#1 PEM-encoded public key file.
:return: contents of a PEM-encoded file that contains the public key.
:rtype: bytes
"""
der = self._save_pkcs1_der()
return rsa.pem.save_pem(der, 'RSA PUBLIC KEY') | [
"def",
"_save_pkcs1_pem",
"(",
"self",
")",
":",
"der",
"=",
"self",
".",
"_save_pkcs1_der",
"(",
")",
"return",
"rsa",
".",
"pem",
".",
"save_pem",
"(",
"der",
",",
"'RSA PUBLIC KEY'",
")"
] | [
294,
4
] | [
302,
54
] | python | en | ['en', 'sq', 'en'] | True |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.