Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
logical_and
(image1, image2)
Logical AND between two images. Both of the images must have mode "1". If you would like to perform a logical AND on an image with a mode other than "1", try :py:meth:`~PIL.ImageChops.multiply` instead, using a black-and-white mask as the second image. .. code-block:: python out = ((image1 and image2) % MAX) :rtype: :py:class:`~PIL.Image.Image`
Logical AND between two images.
def logical_and(image1, image2): """Logical AND between two images. Both of the images must have mode "1". If you would like to perform a logical AND on an image with a mode other than "1", try :py:meth:`~PIL.ImageChops.multiply` instead, using a black-and-white mask as the second image. .. code-block:: python out = ((image1 and image2) % MAX) :rtype: :py:class:`~PIL.Image.Image` """ image1.load() image2.load() return image1._new(image1.im.chop_and(image2.im))
[ "def", "logical_and", "(", "image1", ",", "image2", ")", ":", "image1", ".", "load", "(", ")", "image2", ".", "load", "(", ")", "return", "image1", ".", "_new", "(", "image1", ".", "im", ".", "chop_and", "(", "image2", ".", "im", ")", ")" ]
[ 239, 0 ]
[ 256, 53 ]
python
en
['en', 'en', 'en']
True
logical_or
(image1, image2)
Logical OR between two images. Both of the images must have mode "1". .. code-block:: python out = ((image1 or image2) % MAX) :rtype: :py:class:`~PIL.Image.Image`
Logical OR between two images.
def logical_or(image1, image2): """Logical OR between two images. Both of the images must have mode "1". .. code-block:: python out = ((image1 or image2) % MAX) :rtype: :py:class:`~PIL.Image.Image` """ image1.load() image2.load() return image1._new(image1.im.chop_or(image2.im))
[ "def", "logical_or", "(", "image1", ",", "image2", ")", ":", "image1", ".", "load", "(", ")", "image2", ".", "load", "(", ")", "return", "image1", ".", "_new", "(", "image1", ".", "im", ".", "chop_or", "(", "image2", ".", "im", ")", ")" ]
[ 259, 0 ]
[ 273, 52 ]
python
en
['en', 'en', 'en']
True
logical_xor
(image1, image2)
Logical XOR between two images. Both of the images must have mode "1". .. code-block:: python out = ((bool(image1) != bool(image2)) % MAX) :rtype: :py:class:`~PIL.Image.Image`
Logical XOR between two images.
def logical_xor(image1, image2): """Logical XOR between two images. Both of the images must have mode "1". .. code-block:: python out = ((bool(image1) != bool(image2)) % MAX) :rtype: :py:class:`~PIL.Image.Image` """ image1.load() image2.load() return image1._new(image1.im.chop_xor(image2.im))
[ "def", "logical_xor", "(", "image1", ",", "image2", ")", ":", "image1", ".", "load", "(", ")", "image2", ".", "load", "(", ")", "return", "image1", ".", "_new", "(", "image1", ".", "im", ".", "chop_xor", "(", "image2", ".", "im", ")", ")" ]
[ 276, 0 ]
[ 290, 53 ]
python
en
['en', 'en', 'en']
True
blend
(image1, image2, alpha)
Blend images using constant transparency weight. Alias for :py:func:`PIL.Image.blend`. :rtype: :py:class:`~PIL.Image.Image`
Blend images using constant transparency weight. Alias for :py:func:`PIL.Image.blend`.
def blend(image1, image2, alpha): """Blend images using constant transparency weight. Alias for :py:func:`PIL.Image.blend`. :rtype: :py:class:`~PIL.Image.Image` """ return Image.blend(image1, image2, alpha)
[ "def", "blend", "(", "image1", ",", "image2", ",", "alpha", ")", ":", "return", "Image", ".", "blend", "(", "image1", ",", "image2", ",", "alpha", ")" ]
[ 293, 0 ]
[ 300, 45 ]
python
en
['en', 'en', 'en']
True
composite
(image1, image2, mask)
Create composite using transparency mask. Alias for :py:func:`PIL.Image.composite`. :rtype: :py:class:`~PIL.Image.Image`
Create composite using transparency mask. Alias for :py:func:`PIL.Image.composite`.
def composite(image1, image2, mask): """Create composite using transparency mask. Alias for :py:func:`PIL.Image.composite`. :rtype: :py:class:`~PIL.Image.Image` """ return Image.composite(image1, image2, mask)
[ "def", "composite", "(", "image1", ",", "image2", ",", "mask", ")", ":", "return", "Image", ".", "composite", "(", "image1", ",", "image2", ",", "mask", ")" ]
[ 303, 0 ]
[ 310, 48 ]
python
en
['en', 'en', 'en']
True
offset
(image, xoffset, yoffset=None)
Returns a copy of the image where data has been offset by the given distances. Data wraps around the edges. If ``yoffset`` is omitted, it is assumed to be equal to ``xoffset``. :param xoffset: The horizontal distance. :param yoffset: The vertical distance. If omitted, both distances are set to the same value. :rtype: :py:class:`~PIL.Image.Image`
Returns a copy of the image where data has been offset by the given distances. Data wraps around the edges. If ``yoffset`` is omitted, it is assumed to be equal to ``xoffset``.
def offset(image, xoffset, yoffset=None): """Returns a copy of the image where data has been offset by the given distances. Data wraps around the edges. If ``yoffset`` is omitted, it is assumed to be equal to ``xoffset``. :param xoffset: The horizontal distance. :param yoffset: The vertical distance. If omitted, both distances are set to the same value. :rtype: :py:class:`~PIL.Image.Image` """ if yoffset is None: yoffset = xoffset image.load() return image._new(image.im.offset(xoffset, yoffset))
[ "def", "offset", "(", "image", ",", "xoffset", ",", "yoffset", "=", "None", ")", ":", "if", "yoffset", "is", "None", ":", "yoffset", "=", "xoffset", "image", ".", "load", "(", ")", "return", "image", ".", "_new", "(", "image", ".", "im", ".", "offset", "(", "xoffset", ",", "yoffset", ")", ")" ]
[ 313, 0 ]
[ 327, 56 ]
python
en
['en', 'en', 'en']
True
to_genshi
(walker)
Convert a tree to a genshi tree :arg walker: the treewalker to use to walk the tree to convert it :returns: generator of genshi nodes
Convert a tree to a genshi tree
def to_genshi(walker): """Convert a tree to a genshi tree :arg walker: the treewalker to use to walk the tree to convert it :returns: generator of genshi nodes """ text = [] for token in walker: type = token["type"] if type in ("Characters", "SpaceCharacters"): text.append(token["data"]) elif text: yield TEXT, "".join(text), (None, -1, -1) text = [] if type in ("StartTag", "EmptyTag"): if token["namespace"]: name = "{%s}%s" % (token["namespace"], token["name"]) else: name = token["name"] attrs = Attrs([(QName("{%s}%s" % attr if attr[0] is not None else attr[1]), value) for attr, value in token["data"].items()]) yield (START, (QName(name), attrs), (None, -1, -1)) if type == "EmptyTag": type = "EndTag" if type == "EndTag": if token["namespace"]: name = "{%s}%s" % (token["namespace"], token["name"]) else: name = token["name"] yield END, QName(name), (None, -1, -1) elif type == "Comment": yield COMMENT, token["data"], (None, -1, -1) elif type == "Doctype": yield DOCTYPE, (token["name"], token["publicId"], token["systemId"]), (None, -1, -1) else: pass # FIXME: What to do? if text: yield TEXT, "".join(text), (None, -1, -1)
[ "def", "to_genshi", "(", "walker", ")", ":", "text", "=", "[", "]", "for", "token", "in", "walker", ":", "type", "=", "token", "[", "\"type\"", "]", "if", "type", "in", "(", "\"Characters\"", ",", "\"SpaceCharacters\"", ")", ":", "text", ".", "append", "(", "token", "[", "\"data\"", "]", ")", "elif", "text", ":", "yield", "TEXT", ",", "\"\"", ".", "join", "(", "text", ")", ",", "(", "None", ",", "-", "1", ",", "-", "1", ")", "text", "=", "[", "]", "if", "type", "in", "(", "\"StartTag\"", ",", "\"EmptyTag\"", ")", ":", "if", "token", "[", "\"namespace\"", "]", ":", "name", "=", "\"{%s}%s\"", "%", "(", "token", "[", "\"namespace\"", "]", ",", "token", "[", "\"name\"", "]", ")", "else", ":", "name", "=", "token", "[", "\"name\"", "]", "attrs", "=", "Attrs", "(", "[", "(", "QName", "(", "\"{%s}%s\"", "%", "attr", "if", "attr", "[", "0", "]", "is", "not", "None", "else", "attr", "[", "1", "]", ")", ",", "value", ")", "for", "attr", ",", "value", "in", "token", "[", "\"data\"", "]", ".", "items", "(", ")", "]", ")", "yield", "(", "START", ",", "(", "QName", "(", "name", ")", ",", "attrs", ")", ",", "(", "None", ",", "-", "1", ",", "-", "1", ")", ")", "if", "type", "==", "\"EmptyTag\"", ":", "type", "=", "\"EndTag\"", "if", "type", "==", "\"EndTag\"", ":", "if", "token", "[", "\"namespace\"", "]", ":", "name", "=", "\"{%s}%s\"", "%", "(", "token", "[", "\"namespace\"", "]", ",", "token", "[", "\"name\"", "]", ")", "else", ":", "name", "=", "token", "[", "\"name\"", "]", "yield", "END", ",", "QName", "(", "name", ")", ",", "(", "None", ",", "-", "1", ",", "-", "1", ")", "elif", "type", "==", "\"Comment\"", ":", "yield", "COMMENT", ",", "token", "[", "\"data\"", "]", ",", "(", "None", ",", "-", "1", ",", "-", "1", ")", "elif", "type", "==", "\"Doctype\"", ":", "yield", "DOCTYPE", ",", "(", "token", "[", "\"name\"", "]", ",", "token", "[", "\"publicId\"", "]", ",", "token", "[", "\"systemId\"", "]", ")", ",", "(", "None", ",", "-", "1", ",", "-", "1", ")", "else", ":", "pass", "# FIXME: What to do?", "if", "text", ":", "yield", "TEXT", ",", "\"\"", ".", "join", "(", "text", ")", ",", "(", "None", ",", "-", "1", ",", "-", "1", ")" ]
[ 6, 0 ]
[ 53, 49 ]
python
en
['en', 'mk', 'en']
True
urlquote
(url, safe='/')
A legacy compatibility wrapper to Python's urllib.parse.quote() function. (was used for unicode handling on Python 2)
A legacy compatibility wrapper to Python's urllib.parse.quote() function. (was used for unicode handling on Python 2)
def urlquote(url, safe='/'): """ A legacy compatibility wrapper to Python's urllib.parse.quote() function. (was used for unicode handling on Python 2) """ warnings.warn( 'django.utils.http.urlquote() is deprecated in favor of ' 'urllib.parse.quote().', RemovedInDjango40Warning, stacklevel=2, ) return quote(url, safe)
[ "def", "urlquote", "(", "url", ",", "safe", "=", "'/'", ")", ":", "warnings", ".", "warn", "(", "'django.utils.http.urlquote() is deprecated in favor of '", "'urllib.parse.quote().'", ",", "RemovedInDjango40Warning", ",", "stacklevel", "=", "2", ",", ")", "return", "quote", "(", "url", ",", "safe", ")" ]
[ 45, 0 ]
[ 55, 27 ]
python
en
['en', 'error', 'th']
False
urlquote_plus
(url, safe='')
A legacy compatibility wrapper to Python's urllib.parse.quote_plus() function. (was used for unicode handling on Python 2)
A legacy compatibility wrapper to Python's urllib.parse.quote_plus() function. (was used for unicode handling on Python 2)
def urlquote_plus(url, safe=''): """ A legacy compatibility wrapper to Python's urllib.parse.quote_plus() function. (was used for unicode handling on Python 2) """ warnings.warn( 'django.utils.http.urlquote_plus() is deprecated in favor of ' 'urllib.parse.quote_plus(),', RemovedInDjango40Warning, stacklevel=2, ) return quote_plus(url, safe)
[ "def", "urlquote_plus", "(", "url", ",", "safe", "=", "''", ")", ":", "warnings", ".", "warn", "(", "'django.utils.http.urlquote_plus() is deprecated in favor of '", "'urllib.parse.quote_plus(),'", ",", "RemovedInDjango40Warning", ",", "stacklevel", "=", "2", ",", ")", "return", "quote_plus", "(", "url", ",", "safe", ")" ]
[ 59, 0 ]
[ 69, 32 ]
python
en
['en', 'error', 'th']
False
urlunquote
(quoted_url)
A legacy compatibility wrapper to Python's urllib.parse.unquote() function. (was used for unicode handling on Python 2)
A legacy compatibility wrapper to Python's urllib.parse.unquote() function. (was used for unicode handling on Python 2)
def urlunquote(quoted_url): """ A legacy compatibility wrapper to Python's urllib.parse.unquote() function. (was used for unicode handling on Python 2) """ warnings.warn( 'django.utils.http.urlunquote() is deprecated in favor of ' 'urllib.parse.unquote().', RemovedInDjango40Warning, stacklevel=2, ) return unquote(quoted_url)
[ "def", "urlunquote", "(", "quoted_url", ")", ":", "warnings", ".", "warn", "(", "'django.utils.http.urlunquote() is deprecated in favor of '", "'urllib.parse.unquote().'", ",", "RemovedInDjango40Warning", ",", "stacklevel", "=", "2", ",", ")", "return", "unquote", "(", "quoted_url", ")" ]
[ 73, 0 ]
[ 83, 30 ]
python
en
['en', 'error', 'th']
False
urlunquote_plus
(quoted_url)
A legacy compatibility wrapper to Python's urllib.parse.unquote_plus() function. (was used for unicode handling on Python 2)
A legacy compatibility wrapper to Python's urllib.parse.unquote_plus() function. (was used for unicode handling on Python 2)
def urlunquote_plus(quoted_url): """ A legacy compatibility wrapper to Python's urllib.parse.unquote_plus() function. (was used for unicode handling on Python 2) """ warnings.warn( 'django.utils.http.urlunquote_plus() is deprecated in favor of ' 'urllib.parse.unquote_plus().', RemovedInDjango40Warning, stacklevel=2, ) return unquote_plus(quoted_url)
[ "def", "urlunquote_plus", "(", "quoted_url", ")", ":", "warnings", ".", "warn", "(", "'django.utils.http.urlunquote_plus() is deprecated in favor of '", "'urllib.parse.unquote_plus().'", ",", "RemovedInDjango40Warning", ",", "stacklevel", "=", "2", ",", ")", "return", "unquote_plus", "(", "quoted_url", ")" ]
[ 87, 0 ]
[ 97, 35 ]
python
en
['en', 'error', 'th']
False
urlencode
(query, doseq=False)
A version of Python's urllib.parse.urlencode() function that can operate on MultiValueDict and non-string values.
A version of Python's urllib.parse.urlencode() function that can operate on MultiValueDict and non-string values.
def urlencode(query, doseq=False): """ A version of Python's urllib.parse.urlencode() function that can operate on MultiValueDict and non-string values. """ if isinstance(query, MultiValueDict): query = query.lists() elif hasattr(query, 'items'): query = query.items() query_params = [] for key, value in query: if value is None: raise TypeError( "Cannot encode None for key '%s' in a query string. Did you " "mean to pass an empty string or omit the value?" % key ) elif not doseq or isinstance(value, (str, bytes)): query_val = value else: try: itr = iter(value) except TypeError: query_val = value else: # Consume generators and iterators, when doseq=True, to # work around https://bugs.python.org/issue31706. query_val = [] for item in itr: if item is None: raise TypeError( "Cannot encode None for key '%s' in a query " "string. Did you mean to pass an empty string or " "omit the value?" % key ) elif not isinstance(item, bytes): item = str(item) query_val.append(item) query_params.append((key, query_val)) return original_urlencode(query_params, doseq)
[ "def", "urlencode", "(", "query", ",", "doseq", "=", "False", ")", ":", "if", "isinstance", "(", "query", ",", "MultiValueDict", ")", ":", "query", "=", "query", ".", "lists", "(", ")", "elif", "hasattr", "(", "query", ",", "'items'", ")", ":", "query", "=", "query", ".", "items", "(", ")", "query_params", "=", "[", "]", "for", "key", ",", "value", "in", "query", ":", "if", "value", "is", "None", ":", "raise", "TypeError", "(", "\"Cannot encode None for key '%s' in a query string. Did you \"", "\"mean to pass an empty string or omit the value?\"", "%", "key", ")", "elif", "not", "doseq", "or", "isinstance", "(", "value", ",", "(", "str", ",", "bytes", ")", ")", ":", "query_val", "=", "value", "else", ":", "try", ":", "itr", "=", "iter", "(", "value", ")", "except", "TypeError", ":", "query_val", "=", "value", "else", ":", "# Consume generators and iterators, when doseq=True, to", "# work around https://bugs.python.org/issue31706.", "query_val", "=", "[", "]", "for", "item", "in", "itr", ":", "if", "item", "is", "None", ":", "raise", "TypeError", "(", "\"Cannot encode None for key '%s' in a query \"", "\"string. Did you mean to pass an empty string or \"", "\"omit the value?\"", "%", "key", ")", "elif", "not", "isinstance", "(", "item", ",", "bytes", ")", ":", "item", "=", "str", "(", "item", ")", "query_val", ".", "append", "(", "item", ")", "query_params", ".", "append", "(", "(", "key", ",", "query_val", ")", ")", "return", "original_urlencode", "(", "query_params", ",", "doseq", ")" ]
[ 100, 0 ]
[ 138, 50 ]
python
en
['en', 'error', 'th']
False
http_date
(epoch_seconds=None)
Format the time to match the RFC1123 date format as specified by HTTP RFC7231 section 7.1.1.1. `epoch_seconds` is a floating point number expressed in seconds since the epoch, in UTC - such as that outputted by time.time(). If set to None, it defaults to the current time. Output a string in the format 'Wdy, DD Mon YYYY HH:MM:SS GMT'.
Format the time to match the RFC1123 date format as specified by HTTP RFC7231 section 7.1.1.1.
def http_date(epoch_seconds=None): """ Format the time to match the RFC1123 date format as specified by HTTP RFC7231 section 7.1.1.1. `epoch_seconds` is a floating point number expressed in seconds since the epoch, in UTC - such as that outputted by time.time(). If set to None, it defaults to the current time. Output a string in the format 'Wdy, DD Mon YYYY HH:MM:SS GMT'. """ return formatdate(epoch_seconds, usegmt=True)
[ "def", "http_date", "(", "epoch_seconds", "=", "None", ")", ":", "return", "formatdate", "(", "epoch_seconds", ",", "usegmt", "=", "True", ")" ]
[ 141, 0 ]
[ 152, 49 ]
python
en
['en', 'error', 'th']
False
parse_http_date
(date)
Parse a date format as specified by HTTP RFC7231 section 7.1.1.1. The three formats allowed by the RFC are accepted, even if only the first one is still in widespread use. Return an integer expressed in seconds since the epoch, in UTC.
Parse a date format as specified by HTTP RFC7231 section 7.1.1.1.
def parse_http_date(date): """ Parse a date format as specified by HTTP RFC7231 section 7.1.1.1. The three formats allowed by the RFC are accepted, even if only the first one is still in widespread use. Return an integer expressed in seconds since the epoch, in UTC. """ # email.utils.parsedate() does the job for RFC1123 dates; unfortunately # RFC7231 makes it mandatory to support RFC850 dates too. So we roll # our own RFC-compliant parsing. for regex in RFC1123_DATE, RFC850_DATE, ASCTIME_DATE: m = regex.match(date) if m is not None: break else: raise ValueError("%r is not in a valid HTTP date format" % date) try: year = int(m['year']) if year < 100: current_year = datetime.datetime.utcnow().year current_century = current_year - (current_year % 100) if year - (current_year % 100) > 50: # year that appears to be more than 50 years in the future are # interpreted as representing the past. year += current_century - 100 else: year += current_century month = MONTHS.index(m['mon'].lower()) + 1 day = int(m['day']) hour = int(m['hour']) min = int(m['min']) sec = int(m['sec']) result = datetime.datetime(year, month, day, hour, min, sec) return calendar.timegm(result.utctimetuple()) except Exception as exc: raise ValueError("%r is not a valid date" % date) from exc
[ "def", "parse_http_date", "(", "date", ")", ":", "# email.utils.parsedate() does the job for RFC1123 dates; unfortunately", "# RFC7231 makes it mandatory to support RFC850 dates too. So we roll", "# our own RFC-compliant parsing.", "for", "regex", "in", "RFC1123_DATE", ",", "RFC850_DATE", ",", "ASCTIME_DATE", ":", "m", "=", "regex", ".", "match", "(", "date", ")", "if", "m", "is", "not", "None", ":", "break", "else", ":", "raise", "ValueError", "(", "\"%r is not in a valid HTTP date format\"", "%", "date", ")", "try", ":", "year", "=", "int", "(", "m", "[", "'year'", "]", ")", "if", "year", "<", "100", ":", "current_year", "=", "datetime", ".", "datetime", ".", "utcnow", "(", ")", ".", "year", "current_century", "=", "current_year", "-", "(", "current_year", "%", "100", ")", "if", "year", "-", "(", "current_year", "%", "100", ")", ">", "50", ":", "# year that appears to be more than 50 years in the future are", "# interpreted as representing the past.", "year", "+=", "current_century", "-", "100", "else", ":", "year", "+=", "current_century", "month", "=", "MONTHS", ".", "index", "(", "m", "[", "'mon'", "]", ".", "lower", "(", ")", ")", "+", "1", "day", "=", "int", "(", "m", "[", "'day'", "]", ")", "hour", "=", "int", "(", "m", "[", "'hour'", "]", ")", "min", "=", "int", "(", "m", "[", "'min'", "]", ")", "sec", "=", "int", "(", "m", "[", "'sec'", "]", ")", "result", "=", "datetime", ".", "datetime", "(", "year", ",", "month", ",", "day", ",", "hour", ",", "min", ",", "sec", ")", "return", "calendar", ".", "timegm", "(", "result", ".", "utctimetuple", "(", ")", ")", "except", "Exception", "as", "exc", ":", "raise", "ValueError", "(", "\"%r is not a valid date\"", "%", "date", ")", "from", "exc" ]
[ 155, 0 ]
[ 192, 66 ]
python
en
['en', 'error', 'th']
False
parse_http_date_safe
(date)
Same as parse_http_date, but return None if the input is invalid.
Same as parse_http_date, but return None if the input is invalid.
def parse_http_date_safe(date): """ Same as parse_http_date, but return None if the input is invalid. """ try: return parse_http_date(date) except Exception: pass
[ "def", "parse_http_date_safe", "(", "date", ")", ":", "try", ":", "return", "parse_http_date", "(", "date", ")", "except", "Exception", ":", "pass" ]
[ 195, 0 ]
[ 202, 12 ]
python
en
['en', 'error', 'th']
False
base36_to_int
(s)
Convert a base 36 string to an int. Raise ValueError if the input won't fit into an int.
Convert a base 36 string to an int. Raise ValueError if the input won't fit into an int.
def base36_to_int(s): """ Convert a base 36 string to an int. Raise ValueError if the input won't fit into an int. """ # To prevent overconsumption of server resources, reject any # base36 string that is longer than 13 base36 digits (13 digits # is sufficient to base36-encode any 64-bit integer) if len(s) > 13: raise ValueError("Base36 input too large") return int(s, 36)
[ "def", "base36_to_int", "(", "s", ")", ":", "# To prevent overconsumption of server resources, reject any", "# base36 string that is longer than 13 base36 digits (13 digits", "# is sufficient to base36-encode any 64-bit integer)", "if", "len", "(", "s", ")", ">", "13", ":", "raise", "ValueError", "(", "\"Base36 input too large\"", ")", "return", "int", "(", "s", ",", "36", ")" ]
[ 207, 0 ]
[ 217, 21 ]
python
en
['en', 'error', 'th']
False
int_to_base36
(i)
Convert an integer to a base36 string.
Convert an integer to a base36 string.
def int_to_base36(i): """Convert an integer to a base36 string.""" char_set = '0123456789abcdefghijklmnopqrstuvwxyz' if i < 0: raise ValueError("Negative base36 conversion input.") if i < 36: return char_set[i] b36 = '' while i != 0: i, n = divmod(i, 36) b36 = char_set[n] + b36 return b36
[ "def", "int_to_base36", "(", "i", ")", ":", "char_set", "=", "'0123456789abcdefghijklmnopqrstuvwxyz'", "if", "i", "<", "0", ":", "raise", "ValueError", "(", "\"Negative base36 conversion input.\"", ")", "if", "i", "<", "36", ":", "return", "char_set", "[", "i", "]", "b36", "=", "''", "while", "i", "!=", "0", ":", "i", ",", "n", "=", "divmod", "(", "i", ",", "36", ")", "b36", "=", "char_set", "[", "n", "]", "+", "b36", "return", "b36" ]
[ 220, 0 ]
[ 231, 14 ]
python
en
['en', 'lb', 'en']
True
urlsafe_base64_encode
(s)
Encode a bytestring to a base64 string for use in URLs. Strip any trailing equal signs.
Encode a bytestring to a base64 string for use in URLs. Strip any trailing equal signs.
def urlsafe_base64_encode(s): """ Encode a bytestring to a base64 string for use in URLs. Strip any trailing equal signs. """ return base64.urlsafe_b64encode(s).rstrip(b'\n=').decode('ascii')
[ "def", "urlsafe_base64_encode", "(", "s", ")", ":", "return", "base64", ".", "urlsafe_b64encode", "(", "s", ")", ".", "rstrip", "(", "b'\\n='", ")", ".", "decode", "(", "'ascii'", ")" ]
[ 234, 0 ]
[ 239, 69 ]
python
en
['en', 'error', 'th']
False
urlsafe_base64_decode
(s)
Decode a base64 encoded string. Add back any trailing equal signs that might have been stripped.
Decode a base64 encoded string. Add back any trailing equal signs that might have been stripped.
def urlsafe_base64_decode(s): """ Decode a base64 encoded string. Add back any trailing equal signs that might have been stripped. """ s = s.encode() try: return base64.urlsafe_b64decode(s.ljust(len(s) + len(s) % 4, b'=')) except (LookupError, BinasciiError) as e: raise ValueError(e)
[ "def", "urlsafe_base64_decode", "(", "s", ")", ":", "s", "=", "s", ".", "encode", "(", ")", "try", ":", "return", "base64", ".", "urlsafe_b64decode", "(", "s", ".", "ljust", "(", "len", "(", "s", ")", "+", "len", "(", "s", ")", "%", "4", ",", "b'='", ")", ")", "except", "(", "LookupError", ",", "BinasciiError", ")", "as", "e", ":", "raise", "ValueError", "(", "e", ")" ]
[ 242, 0 ]
[ 251, 27 ]
python
en
['en', 'error', 'th']
False
parse_etags
(etag_str)
Parse a string of ETags given in an If-None-Match or If-Match header as defined by RFC 7232. Return a list of quoted ETags, or ['*'] if all ETags should be matched.
Parse a string of ETags given in an If-None-Match or If-Match header as defined by RFC 7232. Return a list of quoted ETags, or ['*'] if all ETags should be matched.
def parse_etags(etag_str): """ Parse a string of ETags given in an If-None-Match or If-Match header as defined by RFC 7232. Return a list of quoted ETags, or ['*'] if all ETags should be matched. """ if etag_str.strip() == '*': return ['*'] else: # Parse each ETag individually, and return any that are valid. etag_matches = (ETAG_MATCH.match(etag.strip()) for etag in etag_str.split(',')) return [match[1] for match in etag_matches if match]
[ "def", "parse_etags", "(", "etag_str", ")", ":", "if", "etag_str", ".", "strip", "(", ")", "==", "'*'", ":", "return", "[", "'*'", "]", "else", ":", "# Parse each ETag individually, and return any that are valid.", "etag_matches", "=", "(", "ETAG_MATCH", ".", "match", "(", "etag", ".", "strip", "(", ")", ")", "for", "etag", "in", "etag_str", ".", "split", "(", "','", ")", ")", "return", "[", "match", "[", "1", "]", "for", "match", "in", "etag_matches", "if", "match", "]" ]
[ 254, 0 ]
[ 265, 60 ]
python
en
['en', 'error', 'th']
False
quote_etag
(etag_str)
If the provided string is already a quoted ETag, return it. Otherwise, wrap the string in quotes, making it a strong ETag.
If the provided string is already a quoted ETag, return it. Otherwise, wrap the string in quotes, making it a strong ETag.
def quote_etag(etag_str): """ If the provided string is already a quoted ETag, return it. Otherwise, wrap the string in quotes, making it a strong ETag. """ if ETAG_MATCH.match(etag_str): return etag_str else: return '"%s"' % etag_str
[ "def", "quote_etag", "(", "etag_str", ")", ":", "if", "ETAG_MATCH", ".", "match", "(", "etag_str", ")", ":", "return", "etag_str", "else", ":", "return", "'\"%s\"'", "%", "etag_str" ]
[ 268, 0 ]
[ 276, 32 ]
python
en
['en', 'error', 'th']
False
is_same_domain
(host, pattern)
Return ``True`` if the host is either an exact match or a match to the wildcard pattern. Any pattern beginning with a period matches a domain and all of its subdomains. (e.g. ``.example.com`` matches ``example.com`` and ``foo.example.com``). Anything else is an exact string match.
Return ``True`` if the host is either an exact match or a match to the wildcard pattern.
def is_same_domain(host, pattern): """ Return ``True`` if the host is either an exact match or a match to the wildcard pattern. Any pattern beginning with a period matches a domain and all of its subdomains. (e.g. ``.example.com`` matches ``example.com`` and ``foo.example.com``). Anything else is an exact string match. """ if not pattern: return False pattern = pattern.lower() return ( pattern[0] == '.' and (host.endswith(pattern) or host == pattern[1:]) or pattern == host )
[ "def", "is_same_domain", "(", "host", ",", "pattern", ")", ":", "if", "not", "pattern", ":", "return", "False", "pattern", "=", "pattern", ".", "lower", "(", ")", "return", "(", "pattern", "[", "0", "]", "==", "'.'", "and", "(", "host", ".", "endswith", "(", "pattern", ")", "or", "host", "==", "pattern", "[", "1", ":", "]", ")", "or", "pattern", "==", "host", ")" ]
[ 279, 0 ]
[ 295, 5 ]
python
en
['en', 'error', 'th']
False
url_has_allowed_host_and_scheme
(url, allowed_hosts, require_https=False)
Return ``True`` if the url uses an allowed host and a safe scheme. Always return ``False`` on an empty url. If ``require_https`` is ``True``, only 'https' will be considered a valid scheme, as opposed to 'http' and 'https' with the default, ``False``. Note: "True" doesn't entail that a URL is "safe". It may still be e.g. quoted incorrectly. Ensure to also use django.utils.encoding.iri_to_uri() on the path component of untrusted URLs.
Return ``True`` if the url uses an allowed host and a safe scheme.
def url_has_allowed_host_and_scheme(url, allowed_hosts, require_https=False): """ Return ``True`` if the url uses an allowed host and a safe scheme. Always return ``False`` on an empty url. If ``require_https`` is ``True``, only 'https' will be considered a valid scheme, as opposed to 'http' and 'https' with the default, ``False``. Note: "True" doesn't entail that a URL is "safe". It may still be e.g. quoted incorrectly. Ensure to also use django.utils.encoding.iri_to_uri() on the path component of untrusted URLs. """ if url is not None: url = url.strip() if not url: return False if allowed_hosts is None: allowed_hosts = set() elif isinstance(allowed_hosts, str): allowed_hosts = {allowed_hosts} # Chrome treats \ completely as / in paths but it could be part of some # basic auth credentials so we need to check both URLs. return ( _url_has_allowed_host_and_scheme(url, allowed_hosts, require_https=require_https) and _url_has_allowed_host_and_scheme(url.replace('\\', '/'), allowed_hosts, require_https=require_https) )
[ "def", "url_has_allowed_host_and_scheme", "(", "url", ",", "allowed_hosts", ",", "require_https", "=", "False", ")", ":", "if", "url", "is", "not", "None", ":", "url", "=", "url", ".", "strip", "(", ")", "if", "not", "url", ":", "return", "False", "if", "allowed_hosts", "is", "None", ":", "allowed_hosts", "=", "set", "(", ")", "elif", "isinstance", "(", "allowed_hosts", ",", "str", ")", ":", "allowed_hosts", "=", "{", "allowed_hosts", "}", "# Chrome treats \\ completely as / in paths but it could be part of some", "# basic auth credentials so we need to check both URLs.", "return", "(", "_url_has_allowed_host_and_scheme", "(", "url", ",", "allowed_hosts", ",", "require_https", "=", "require_https", ")", "and", "_url_has_allowed_host_and_scheme", "(", "url", ".", "replace", "(", "'\\\\'", ",", "'/'", ")", ",", "allowed_hosts", ",", "require_https", "=", "require_https", ")", ")" ]
[ 298, 0 ]
[ 324, 5 ]
python
en
['en', 'error', 'th']
False
_urlparse
(url, scheme='', allow_fragments=True)
Parse a URL into 6 components: <scheme>://<netloc>/<path>;<params>?<query>#<fragment> Return a 6-tuple: (scheme, netloc, path, params, query, fragment). Note that we don't break the components up in smaller bits (e.g. netloc is a single string) and we don't expand % escapes.
Parse a URL into 6 components: <scheme>://<netloc>/<path>;<params>?<query>#<fragment> Return a 6-tuple: (scheme, netloc, path, params, query, fragment). Note that we don't break the components up in smaller bits (e.g. netloc is a single string) and we don't expand % escapes.
def _urlparse(url, scheme='', allow_fragments=True): """Parse a URL into 6 components: <scheme>://<netloc>/<path>;<params>?<query>#<fragment> Return a 6-tuple: (scheme, netloc, path, params, query, fragment). Note that we don't break the components up in smaller bits (e.g. netloc is a single string) and we don't expand % escapes.""" url, scheme, _coerce_result = _coerce_args(url, scheme) splitresult = _urlsplit(url, scheme, allow_fragments) scheme, netloc, url, query, fragment = splitresult if scheme in uses_params and ';' in url: url, params = _splitparams(url) else: params = '' result = ParseResult(scheme, netloc, url, params, query, fragment) return _coerce_result(result)
[ "def", "_urlparse", "(", "url", ",", "scheme", "=", "''", ",", "allow_fragments", "=", "True", ")", ":", "url", ",", "scheme", ",", "_coerce_result", "=", "_coerce_args", "(", "url", ",", "scheme", ")", "splitresult", "=", "_urlsplit", "(", "url", ",", "scheme", ",", "allow_fragments", ")", "scheme", ",", "netloc", ",", "url", ",", "query", ",", "fragment", "=", "splitresult", "if", "scheme", "in", "uses_params", "and", "';'", "in", "url", ":", "url", ",", "params", "=", "_splitparams", "(", "url", ")", "else", ":", "params", "=", "''", "result", "=", "ParseResult", "(", "scheme", ",", "netloc", ",", "url", ",", "params", ",", "query", ",", "fragment", ")", "return", "_coerce_result", "(", "result", ")" ]
[ 337, 0 ]
[ 351, 33 ]
python
en
['en', 'en', 'en']
True
_urlsplit
(url, scheme='', allow_fragments=True)
Parse a URL into 5 components: <scheme>://<netloc>/<path>?<query>#<fragment> Return a 5-tuple: (scheme, netloc, path, query, fragment). Note that we don't break the components up in smaller bits (e.g. netloc is a single string) and we don't expand % escapes.
Parse a URL into 5 components: <scheme>://<netloc>/<path>?<query>#<fragment> Return a 5-tuple: (scheme, netloc, path, query, fragment). Note that we don't break the components up in smaller bits (e.g. netloc is a single string) and we don't expand % escapes.
def _urlsplit(url, scheme='', allow_fragments=True): """Parse a URL into 5 components: <scheme>://<netloc>/<path>?<query>#<fragment> Return a 5-tuple: (scheme, netloc, path, query, fragment). Note that we don't break the components up in smaller bits (e.g. netloc is a single string) and we don't expand % escapes.""" url, scheme, _coerce_result = _coerce_args(url, scheme) netloc = query = fragment = '' i = url.find(':') if i > 0: for c in url[:i]: if c not in scheme_chars: break else: scheme, url = url[:i].lower(), url[i + 1:] if url[:2] == '//': netloc, url = _splitnetloc(url, 2) if (('[' in netloc and ']' not in netloc) or (']' in netloc and '[' not in netloc)): raise ValueError("Invalid IPv6 URL") if allow_fragments and '#' in url: url, fragment = url.split('#', 1) if '?' in url: url, query = url.split('?', 1) v = SplitResult(scheme, netloc, url, query, fragment) return _coerce_result(v)
[ "def", "_urlsplit", "(", "url", ",", "scheme", "=", "''", ",", "allow_fragments", "=", "True", ")", ":", "url", ",", "scheme", ",", "_coerce_result", "=", "_coerce_args", "(", "url", ",", "scheme", ")", "netloc", "=", "query", "=", "fragment", "=", "''", "i", "=", "url", ".", "find", "(", "':'", ")", "if", "i", ">", "0", ":", "for", "c", "in", "url", "[", ":", "i", "]", ":", "if", "c", "not", "in", "scheme_chars", ":", "break", "else", ":", "scheme", ",", "url", "=", "url", "[", ":", "i", "]", ".", "lower", "(", ")", ",", "url", "[", "i", "+", "1", ":", "]", "if", "url", "[", ":", "2", "]", "==", "'//'", ":", "netloc", ",", "url", "=", "_splitnetloc", "(", "url", ",", "2", ")", "if", "(", "(", "'['", "in", "netloc", "and", "']'", "not", "in", "netloc", ")", "or", "(", "']'", "in", "netloc", "and", "'['", "not", "in", "netloc", ")", ")", ":", "raise", "ValueError", "(", "\"Invalid IPv6 URL\"", ")", "if", "allow_fragments", "and", "'#'", "in", "url", ":", "url", ",", "fragment", "=", "url", ".", "split", "(", "'#'", ",", "1", ")", "if", "'?'", "in", "url", ":", "url", ",", "query", "=", "url", ".", "split", "(", "'?'", ",", "1", ")", "v", "=", "SplitResult", "(", "scheme", ",", "netloc", ",", "url", ",", "query", ",", "fragment", ")", "return", "_coerce_result", "(", "v", ")" ]
[ 356, 0 ]
[ 382, 28 ]
python
en
['en', 'en', 'en']
True
parse_qsl
( qs, keep_blank_values=False, strict_parsing=False, encoding='utf-8', errors='replace', max_num_fields=None, separator='&', )
Return a list of key/value tuples parsed from query string. Backport of urllib.parse.parse_qsl() from Python 3.8.8. Copyright (C) 2021 Python Software Foundation (see LICENSE.python). ---- Parse a query given as a string argument. Arguments: qs: percent-encoded query string to be parsed keep_blank_values: flag indicating whether blank values in percent-encoded queries should be treated as blank strings. A true value indicates that blanks should be retained as blank strings. The default false value indicates that blank values are to be ignored and treated as if they were not included. strict_parsing: flag indicating what to do with parsing errors. If false (the default), errors are silently ignored. If true, errors raise a ValueError exception. encoding and errors: specify how to decode percent-encoded sequences into Unicode characters, as accepted by the bytes.decode() method. max_num_fields: int. If set, then throws a ValueError if there are more than n fields read by parse_qsl(). separator: str. The symbol to use for separating the query arguments. Defaults to &. Returns a list, as G-d intended.
Return a list of key/value tuples parsed from query string.
def parse_qsl( qs, keep_blank_values=False, strict_parsing=False, encoding='utf-8', errors='replace', max_num_fields=None, separator='&', ): """ Return a list of key/value tuples parsed from query string. Backport of urllib.parse.parse_qsl() from Python 3.8.8. Copyright (C) 2021 Python Software Foundation (see LICENSE.python). ---- Parse a query given as a string argument. Arguments: qs: percent-encoded query string to be parsed keep_blank_values: flag indicating whether blank values in percent-encoded queries should be treated as blank strings. A true value indicates that blanks should be retained as blank strings. The default false value indicates that blank values are to be ignored and treated as if they were not included. strict_parsing: flag indicating what to do with parsing errors. If false (the default), errors are silently ignored. If true, errors raise a ValueError exception. encoding and errors: specify how to decode percent-encoded sequences into Unicode characters, as accepted by the bytes.decode() method. max_num_fields: int. If set, then throws a ValueError if there are more than n fields read by parse_qsl(). separator: str. The symbol to use for separating the query arguments. Defaults to &. Returns a list, as G-d intended. """ qs, _coerce_result = _coerce_args(qs) if not separator or not isinstance(separator, (str, bytes)): raise ValueError('Separator must be of type string or bytes.') # If max_num_fields is defined then check that the number of fields is less # than max_num_fields. This prevents a memory exhaustion DOS attack via # post bodies with many fields. if max_num_fields is not None: num_fields = 1 + qs.count(separator) if max_num_fields < num_fields: raise ValueError('Max number of fields exceeded') pairs = [s1 for s1 in qs.split(separator)] r = [] for name_value in pairs: if not name_value and not strict_parsing: continue nv = name_value.split('=', 1) if len(nv) != 2: if strict_parsing: raise ValueError("bad query field: %r" % (name_value,)) # Handle case of a control-name with no equal sign. if keep_blank_values: nv.append('') else: continue if len(nv[1]) or keep_blank_values: name = nv[0].replace('+', ' ') name = unquote(name, encoding=encoding, errors=errors) name = _coerce_result(name) value = nv[1].replace('+', ' ') value = unquote(value, encoding=encoding, errors=errors) value = _coerce_result(value) r.append((name, value)) return r
[ "def", "parse_qsl", "(", "qs", ",", "keep_blank_values", "=", "False", ",", "strict_parsing", "=", "False", ",", "encoding", "=", "'utf-8'", ",", "errors", "=", "'replace'", ",", "max_num_fields", "=", "None", ",", "separator", "=", "'&'", ",", ")", ":", "qs", ",", "_coerce_result", "=", "_coerce_args", "(", "qs", ")", "if", "not", "separator", "or", "not", "isinstance", "(", "separator", ",", "(", "str", ",", "bytes", ")", ")", ":", "raise", "ValueError", "(", "'Separator must be of type string or bytes.'", ")", "# If max_num_fields is defined then check that the number of fields is less", "# than max_num_fields. This prevents a memory exhaustion DOS attack via", "# post bodies with many fields.", "if", "max_num_fields", "is", "not", "None", ":", "num_fields", "=", "1", "+", "qs", ".", "count", "(", "separator", ")", "if", "max_num_fields", "<", "num_fields", ":", "raise", "ValueError", "(", "'Max number of fields exceeded'", ")", "pairs", "=", "[", "s1", "for", "s1", "in", "qs", ".", "split", "(", "separator", ")", "]", "r", "=", "[", "]", "for", "name_value", "in", "pairs", ":", "if", "not", "name_value", "and", "not", "strict_parsing", ":", "continue", "nv", "=", "name_value", ".", "split", "(", "'='", ",", "1", ")", "if", "len", "(", "nv", ")", "!=", "2", ":", "if", "strict_parsing", ":", "raise", "ValueError", "(", "\"bad query field: %r\"", "%", "(", "name_value", ",", ")", ")", "# Handle case of a control-name with no equal sign.", "if", "keep_blank_values", ":", "nv", ".", "append", "(", "''", ")", "else", ":", "continue", "if", "len", "(", "nv", "[", "1", "]", ")", "or", "keep_blank_values", ":", "name", "=", "nv", "[", "0", "]", ".", "replace", "(", "'+'", ",", "' '", ")", "name", "=", "unquote", "(", "name", ",", "encoding", "=", "encoding", ",", "errors", "=", "errors", ")", "name", "=", "_coerce_result", "(", "name", ")", "value", "=", "nv", "[", "1", "]", ".", "replace", "(", "'+'", ",", "' '", ")", "value", "=", "unquote", "(", "value", ",", "encoding", "=", "encoding", ",", "errors", "=", "errors", ")", "value", "=", "_coerce_result", "(", "value", ")", "r", ".", "append", "(", "(", "name", ",", "value", ")", ")", "return", "r" ]
[ 415, 0 ]
[ 489, 12 ]
python
en
['en', 'error', 'th']
False
escape_leading_slashes
(url)
If redirecting to an absolute path (two leading slashes), a slash must be escaped to prevent browsers from handling the path as schemaless and redirecting to another host.
If redirecting to an absolute path (two leading slashes), a slash must be escaped to prevent browsers from handling the path as schemaless and redirecting to another host.
def escape_leading_slashes(url): """ If redirecting to an absolute path (two leading slashes), a slash must be escaped to prevent browsers from handling the path as schemaless and redirecting to another host. """ if url.startswith('//'): url = '/%2F{}'.format(url[2:]) return url
[ "def", "escape_leading_slashes", "(", "url", ")", ":", "if", "url", ".", "startswith", "(", "'//'", ")", ":", "url", "=", "'/%2F{}'", ".", "format", "(", "url", "[", "2", ":", "]", ")", "return", "url" ]
[ 492, 0 ]
[ 500, 14 ]
python
en
['en', 'error', 'th']
False
ForeignObjectRel.target_field
(self)
When filtering against this relation, return the field on the remote model against which the filtering should happen.
When filtering against this relation, return the field on the remote model against which the filtering should happen.
def target_field(self): """ When filtering against this relation, return the field on the remote model against which the filtering should happen. """ target_fields = self.get_path_info()[-1].target_fields if len(target_fields) > 1: raise exceptions.FieldError("Can't use target_field for multicolumn relations.") return target_fields[0]
[ "def", "target_field", "(", "self", ")", ":", "target_fields", "=", "self", ".", "get_path_info", "(", ")", "[", "-", "1", "]", ".", "target_fields", "if", "len", "(", "target_fields", ")", ">", "1", ":", "raise", "exceptions", ".", "FieldError", "(", "\"Can't use target_field for multicolumn relations.\"", ")", "return", "target_fields", "[", "0", "]" ]
[ 68, 4 ]
[ 76, 31 ]
python
en
['en', 'error', 'th']
False
ForeignObjectRel.get_choices
( self, include_blank=True, blank_choice=BLANK_CHOICE_DASH, limit_choices_to=None, ordering=(), )
Return choices with a default blank choices included, for use as <select> choices for this field. Analog of django.db.models.fields.Field.get_choices(), provided initially for utilization by RelatedFieldListFilter.
Return choices with a default blank choices included, for use as <select> choices for this field.
def get_choices( self, include_blank=True, blank_choice=BLANK_CHOICE_DASH, limit_choices_to=None, ordering=(), ): """ Return choices with a default blank choices included, for use as <select> choices for this field. Analog of django.db.models.fields.Field.get_choices(), provided initially for utilization by RelatedFieldListFilter. """ limit_choices_to = limit_choices_to or self.limit_choices_to qs = self.related_model._default_manager.complex_filter(limit_choices_to) if ordering: qs = qs.order_by(*ordering) return (blank_choice if include_blank else []) + [ (x.pk, str(x)) for x in qs ]
[ "def", "get_choices", "(", "self", ",", "include_blank", "=", "True", ",", "blank_choice", "=", "BLANK_CHOICE_DASH", ",", "limit_choices_to", "=", "None", ",", "ordering", "=", "(", ")", ",", ")", ":", "limit_choices_to", "=", "limit_choices_to", "or", "self", ".", "limit_choices_to", "qs", "=", "self", ".", "related_model", ".", "_default_manager", ".", "complex_filter", "(", "limit_choices_to", ")", "if", "ordering", ":", "qs", "=", "qs", ".", "order_by", "(", "*", "ordering", ")", "return", "(", "blank_choice", "if", "include_blank", "else", "[", "]", ")", "+", "[", "(", "x", ".", "pk", ",", "str", "(", "x", ")", ")", "for", "x", "in", "qs", "]" ]
[ 140, 4 ]
[ 157, 9 ]
python
en
['en', 'error', 'th']
False
ForeignObjectRel.is_hidden
(self)
Should the related object be hidden?
Should the related object be hidden?
def is_hidden(self): """Should the related object be hidden?""" return bool(self.related_name) and self.related_name[-1] == '+'
[ "def", "is_hidden", "(", "self", ")", ":", "return", "bool", "(", "self", ".", "related_name", ")", "and", "self", ".", "related_name", "[", "-", "1", "]", "==", "'+'" ]
[ 159, 4 ]
[ 161, 71 ]
python
en
['en', 'en', 'en']
True
ForeignObjectRel.set_field_name
(self)
Set the related field's name, this is not available until later stages of app loading, so set_field_name is called from set_attributes_from_rel()
Set the related field's name, this is not available until later stages of app loading, so set_field_name is called from set_attributes_from_rel()
def set_field_name(self): """ Set the related field's name, this is not available until later stages of app loading, so set_field_name is called from set_attributes_from_rel() """ # By default foreign object doesn't relate to any remote field (for # example custom multicolumn joins currently have no remote field). self.field_name = None
[ "def", "set_field_name", "(", "self", ")", ":", "# By default foreign object doesn't relate to any remote field (for", "# example custom multicolumn joins currently have no remote field).", "self", ".", "field_name", "=", "None" ]
[ 169, 4 ]
[ 177, 30 ]
python
en
['en', 'error', 'th']
False
ForeignObjectRel.get_cache_name
(self)
Return the name of the cache key to use for storing an instance of the forward model on the reverse model.
Return the name of the cache key to use for storing an instance of the forward model on the reverse model.
def get_cache_name(self): """ Return the name of the cache key to use for storing an instance of the forward model on the reverse model. """ return self.get_accessor_name()
[ "def", "get_cache_name", "(", "self", ")", ":", "return", "self", ".", "get_accessor_name", "(", ")" ]
[ 199, 4 ]
[ 204, 39 ]
python
en
['en', 'error', 'th']
False
ManyToOneRel.get_related_field
(self)
Return the Field in the 'to' object to which this relationship is tied.
Return the Field in the 'to' object to which this relationship is tied.
def get_related_field(self): """ Return the Field in the 'to' object to which this relationship is tied. """ field = self.model._meta.get_field(self.field_name) if not field.concrete: raise exceptions.FieldDoesNotExist("No related field named '%s'" % self.field_name) return field
[ "def", "get_related_field", "(", "self", ")", ":", "field", "=", "self", ".", "model", ".", "_meta", ".", "get_field", "(", "self", ".", "field_name", ")", "if", "not", "field", ".", "concrete", ":", "raise", "exceptions", ".", "FieldDoesNotExist", "(", "\"No related field named '%s'\"", "%", "self", ".", "field_name", ")", "return", "field" ]
[ 244, 4 ]
[ 251, 20 ]
python
en
['en', 'error', 'th']
False
ManyToManyRel.get_related_field
(self)
Return the field in the 'to' object to which this relationship is tied. Provided for symmetry with ManyToOneRel.
Return the field in the 'to' object to which this relationship is tied. Provided for symmetry with ManyToOneRel.
def get_related_field(self): """ Return the field in the 'to' object to which this relationship is tied. Provided for symmetry with ManyToOneRel. """ opts = self.through._meta if self.through_fields: field = opts.get_field(self.through_fields[0]) else: for field in opts.fields: rel = getattr(field, 'remote_field', None) if rel and rel.model == self.model: break return field.foreign_related_fields[0]
[ "def", "get_related_field", "(", "self", ")", ":", "opts", "=", "self", ".", "through", ".", "_meta", "if", "self", ".", "through_fields", ":", "field", "=", "opts", ".", "get_field", "(", "self", ".", "through_fields", "[", "0", "]", ")", "else", ":", "for", "field", "in", "opts", ".", "fields", ":", "rel", "=", "getattr", "(", "field", ",", "'remote_field'", ",", "None", ")", "if", "rel", "and", "rel", ".", "model", "==", "self", ".", "model", ":", "break", "return", "field", ".", "foreign_related_fields", "[", "0", "]" ]
[ 316, 4 ]
[ 329, 46 ]
python
en
['en', 'error', 'th']
False
_verify_python3_env
()
Ensures that the environment is good for unicode on Python 3.
Ensures that the environment is good for unicode on Python 3.
def _verify_python3_env(): """Ensures that the environment is good for unicode on Python 3.""" if PY2: return try: import locale fs_enc = codecs.lookup(locale.getpreferredencoding()).name except Exception: fs_enc = 'ascii' if fs_enc != 'ascii': return extra = '' if os.name == 'posix': import subprocess try: rv = subprocess.Popen(['locale', '-a'], stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0] except OSError: rv = b'' good_locales = set() has_c_utf8 = False # Make sure we're operating on text here. if isinstance(rv, bytes): rv = rv.decode('ascii', 'replace') for line in rv.splitlines(): locale = line.strip() if locale.lower().endswith(('.utf-8', '.utf8')): good_locales.add(locale) if locale.lower() in ('c.utf8', 'c.utf-8'): has_c_utf8 = True extra += '\n\n' if not good_locales: extra += ( 'Additional information: on this system no suitable UTF-8\n' 'locales were discovered. This most likely requires resolving\n' 'by reconfiguring the locale system.' ) elif has_c_utf8: extra += ( 'This system supports the C.UTF-8 locale which is recommended.\n' 'You might be able to resolve your issue by exporting the\n' 'following environment variables:\n\n' ' export LC_ALL=C.UTF-8\n' ' export LANG=C.UTF-8' ) else: extra += ( 'This system lists a couple of UTF-8 supporting locales that\n' 'you can pick from. The following suitable locales were\n' 'discovered: %s' ) % ', '.join(sorted(good_locales)) bad_locale = None for locale in os.environ.get('LC_ALL'), os.environ.get('LANG'): if locale and locale.lower().endswith(('.utf-8', '.utf8')): bad_locale = locale if locale is not None: break if bad_locale is not None: extra += ( '\n\nClick discovered that you exported a UTF-8 locale\n' 'but the locale system could not pick up from it because\n' 'it does not exist. The exported locale is "%s" but it\n' 'is not supported' ) % bad_locale raise RuntimeError( 'Click will abort further execution because Python 3 was' ' configured to use ASCII as encoding for the environment.' ' Consult https://click.palletsprojects.com/en/7.x/python3/ for' ' mitigation steps.' + extra )
[ "def", "_verify_python3_env", "(", ")", ":", "if", "PY2", ":", "return", "try", ":", "import", "locale", "fs_enc", "=", "codecs", ".", "lookup", "(", "locale", ".", "getpreferredencoding", "(", ")", ")", ".", "name", "except", "Exception", ":", "fs_enc", "=", "'ascii'", "if", "fs_enc", "!=", "'ascii'", ":", "return", "extra", "=", "''", "if", "os", ".", "name", "==", "'posix'", ":", "import", "subprocess", "try", ":", "rv", "=", "subprocess", ".", "Popen", "(", "[", "'locale'", ",", "'-a'", "]", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ")", ".", "communicate", "(", ")", "[", "0", "]", "except", "OSError", ":", "rv", "=", "b''", "good_locales", "=", "set", "(", ")", "has_c_utf8", "=", "False", "# Make sure we're operating on text here.", "if", "isinstance", "(", "rv", ",", "bytes", ")", ":", "rv", "=", "rv", ".", "decode", "(", "'ascii'", ",", "'replace'", ")", "for", "line", "in", "rv", ".", "splitlines", "(", ")", ":", "locale", "=", "line", ".", "strip", "(", ")", "if", "locale", ".", "lower", "(", ")", ".", "endswith", "(", "(", "'.utf-8'", ",", "'.utf8'", ")", ")", ":", "good_locales", ".", "add", "(", "locale", ")", "if", "locale", ".", "lower", "(", ")", "in", "(", "'c.utf8'", ",", "'c.utf-8'", ")", ":", "has_c_utf8", "=", "True", "extra", "+=", "'\\n\\n'", "if", "not", "good_locales", ":", "extra", "+=", "(", "'Additional information: on this system no suitable UTF-8\\n'", "'locales were discovered. This most likely requires resolving\\n'", "'by reconfiguring the locale system.'", ")", "elif", "has_c_utf8", ":", "extra", "+=", "(", "'This system supports the C.UTF-8 locale which is recommended.\\n'", "'You might be able to resolve your issue by exporting the\\n'", "'following environment variables:\\n\\n'", "' export LC_ALL=C.UTF-8\\n'", "' export LANG=C.UTF-8'", ")", "else", ":", "extra", "+=", "(", "'This system lists a couple of UTF-8 supporting locales that\\n'", "'you can pick from. The following suitable locales were\\n'", "'discovered: %s'", ")", "%", "', '", ".", "join", "(", "sorted", "(", "good_locales", ")", ")", "bad_locale", "=", "None", "for", "locale", "in", "os", ".", "environ", ".", "get", "(", "'LC_ALL'", ")", ",", "os", ".", "environ", ".", "get", "(", "'LANG'", ")", ":", "if", "locale", "and", "locale", ".", "lower", "(", ")", ".", "endswith", "(", "(", "'.utf-8'", ",", "'.utf8'", ")", ")", ":", "bad_locale", "=", "locale", "if", "locale", "is", "not", "None", ":", "break", "if", "bad_locale", "is", "not", "None", ":", "extra", "+=", "(", "'\\n\\nClick discovered that you exported a UTF-8 locale\\n'", "'but the locale system could not pick up from it because\\n'", "'it does not exist. The exported locale is \"%s\" but it\\n'", "'is not supported'", ")", "%", "bad_locale", "raise", "RuntimeError", "(", "'Click will abort further execution because Python 3 was'", "' configured to use ASCII as encoding for the environment.'", "' Consult https://click.palletsprojects.com/en/7.x/python3/ for'", "' mitigation steps.'", "+", "extra", ")" ]
[ 49, 0 ]
[ 124, 5 ]
python
en
['en', 'en', 'en']
True
pre_sql_setup
(self)
Do any necessary class setup immediately prior to producing SQL. This is for things that can't necessarily be done in __init__ because we might not have all the pieces in place at that time.
Do any necessary class setup immediately prior to producing SQL. This is for things that can't necessarily be done in __init__ because we might not have all the pieces in place at that time.
def pre_sql_setup(self): """ Do any necessary class setup immediately prior to producing SQL. This is for things that can't necessarily be done in __init__ because we might not have all the pieces in place at that time. """ self.setup_query() order_by = self.get_order_by() self.where, self.having = self.query.where.split_having() extra_select = self.get_extra_select(order_by, self.select) self.has_extra_select = bool(extra_select) group_by = self.get_group_by(self.select + extra_select, order_by) return extra_select, order_by, group_by
[ "def", "pre_sql_setup", "(", "self", ")", ":", "self", ".", "setup_query", "(", ")", "order_by", "=", "self", ".", "get_order_by", "(", ")", "self", ".", "where", ",", "self", ".", "having", "=", "self", ".", "query", ".", "where", ".", "split_having", "(", ")", "extra_select", "=", "self", ".", "get_extra_select", "(", "order_by", ",", "self", ".", "select", ")", "self", ".", "has_extra_select", "=", "bool", "(", "extra_select", ")", "group_by", "=", "self", ".", "get_group_by", "(", "self", ".", "select", "+", "extra_select", ",", "order_by", ")", "return", "extra_select", ",", "order_by", ",", "group_by" ]
[ 48, 4 ]
[ 60, 47 ]
python
en
['en', 'error', 'th']
False
get_group_by
(self, select, order_by)
Return a list of 2-tuples of form (sql, params). The logic of what exactly the GROUP BY clause contains is hard to describe in other words than "if it passes the test suite, then it is correct".
Return a list of 2-tuples of form (sql, params).
def get_group_by(self, select, order_by): """ Return a list of 2-tuples of form (sql, params). The logic of what exactly the GROUP BY clause contains is hard to describe in other words than "if it passes the test suite, then it is correct". """ # Some examples: # SomeModel.objects.annotate(Count('somecol')) # GROUP BY: all fields of the model # # SomeModel.objects.values('name').annotate(Count('somecol')) # GROUP BY: name # # SomeModel.objects.annotate(Count('somecol')).values('name') # GROUP BY: all cols of the model # # SomeModel.objects.values('name', 'pk').annotate(Count('somecol')).values('pk') # GROUP BY: name, pk # # SomeModel.objects.values('name').annotate(Count('somecol')).values('pk') # GROUP BY: name, pk # # In fact, the self.query.group_by is the minimal set to GROUP BY. It # can't be ever restricted to a smaller set, but additional columns in # HAVING, ORDER BY, and SELECT clauses are added to it. Unfortunately # the end result is that it is impossible to force the query to have # a chosen GROUP BY clause - you can almost do this by using the form: # .values(*wanted_cols).annotate(AnAggregate()) # but any later annotations, extra selects, values calls that # refer some column outside of the wanted_cols, order_by, or even # filter calls can alter the GROUP BY clause. # The query.group_by is either None (no GROUP BY at all), True # (group by select fields), or a list of expressions to be added # to the group by. if self.query.group_by is None: return [] expressions = [] if self.query.group_by is not True: # If the group by is set to a list (by .values() call most likely), # then we need to add everything in it to the GROUP BY clause. # Backwards compatibility hack for setting query.group_by. Remove # when we have public API way of forcing the GROUP BY clause. # Converts string references to expressions. for expr in self.query.group_by: if not hasattr(expr, 'as_sql'): expressions.append(self.query.resolve_ref(expr)) else: expressions.append(expr) # Note that even if the group_by is set, it is only the minimal # set to group by. So, we need to add cols in select, order_by, and # having into the select in any case. ref_sources = { expr.source for expr in expressions if isinstance(expr, Ref) } for expr, _, _ in select: # Skip members of the select clause that are already included # by reference. if expr in ref_sources: continue cols = expr.get_group_by_cols() for col in cols: expressions.append(col) for expr, (sql, params, is_ref) in order_by: # Skip References to the select clause, as all expressions in the # select clause are already part of the group by. if not is_ref: expressions.extend(expr.get_group_by_cols()) having_group_by = self.having.get_group_by_cols() if self.having else () for expr in having_group_by: expressions.append(expr) result = [] seen = set() expressions = self.collapse_group_by(expressions, having_group_by) for expr in expressions: sql, params = self.compile(expr) sql, params = expr.select_format(self, sql, params) params_hash = make_hashable(params) if (sql, params_hash) not in seen: result.append((sql, params)) seen.add((sql, params_hash)) return result
[ "def", "get_group_by", "(", "self", ",", "select", ",", "order_by", ")", ":", "# Some examples:", "# SomeModel.objects.annotate(Count('somecol'))", "# GROUP BY: all fields of the model", "#", "# SomeModel.objects.values('name').annotate(Count('somecol'))", "# GROUP BY: name", "#", "# SomeModel.objects.annotate(Count('somecol')).values('name')", "# GROUP BY: all cols of the model", "#", "# SomeModel.objects.values('name', 'pk').annotate(Count('somecol')).values('pk')", "# GROUP BY: name, pk", "#", "# SomeModel.objects.values('name').annotate(Count('somecol')).values('pk')", "# GROUP BY: name, pk", "#", "# In fact, the self.query.group_by is the minimal set to GROUP BY. It", "# can't be ever restricted to a smaller set, but additional columns in", "# HAVING, ORDER BY, and SELECT clauses are added to it. Unfortunately", "# the end result is that it is impossible to force the query to have", "# a chosen GROUP BY clause - you can almost do this by using the form:", "# .values(*wanted_cols).annotate(AnAggregate())", "# but any later annotations, extra selects, values calls that", "# refer some column outside of the wanted_cols, order_by, or even", "# filter calls can alter the GROUP BY clause.", "# The query.group_by is either None (no GROUP BY at all), True", "# (group by select fields), or a list of expressions to be added", "# to the group by.", "if", "self", ".", "query", ".", "group_by", "is", "None", ":", "return", "[", "]", "expressions", "=", "[", "]", "if", "self", ".", "query", ".", "group_by", "is", "not", "True", ":", "# If the group by is set to a list (by .values() call most likely),", "# then we need to add everything in it to the GROUP BY clause.", "# Backwards compatibility hack for setting query.group_by. Remove", "# when we have public API way of forcing the GROUP BY clause.", "# Converts string references to expressions.", "for", "expr", "in", "self", ".", "query", ".", "group_by", ":", "if", "not", "hasattr", "(", "expr", ",", "'as_sql'", ")", ":", "expressions", ".", "append", "(", "self", ".", "query", ".", "resolve_ref", "(", "expr", ")", ")", "else", ":", "expressions", ".", "append", "(", "expr", ")", "# Note that even if the group_by is set, it is only the minimal", "# set to group by. So, we need to add cols in select, order_by, and", "# having into the select in any case.", "ref_sources", "=", "{", "expr", ".", "source", "for", "expr", "in", "expressions", "if", "isinstance", "(", "expr", ",", "Ref", ")", "}", "for", "expr", ",", "_", ",", "_", "in", "select", ":", "# Skip members of the select clause that are already included", "# by reference.", "if", "expr", "in", "ref_sources", ":", "continue", "cols", "=", "expr", ".", "get_group_by_cols", "(", ")", "for", "col", "in", "cols", ":", "expressions", ".", "append", "(", "col", ")", "for", "expr", ",", "(", "sql", ",", "params", ",", "is_ref", ")", "in", "order_by", ":", "# Skip References to the select clause, as all expressions in the", "# select clause are already part of the group by.", "if", "not", "is_ref", ":", "expressions", ".", "extend", "(", "expr", ".", "get_group_by_cols", "(", ")", ")", "having_group_by", "=", "self", ".", "having", ".", "get_group_by_cols", "(", ")", "if", "self", ".", "having", "else", "(", ")", "for", "expr", "in", "having_group_by", ":", "expressions", ".", "append", "(", "expr", ")", "result", "=", "[", "]", "seen", "=", "set", "(", ")", "expressions", "=", "self", ".", "collapse_group_by", "(", "expressions", ",", "having_group_by", ")", "for", "expr", "in", "expressions", ":", "sql", ",", "params", "=", "self", ".", "compile", "(", "expr", ")", "sql", ",", "params", "=", "expr", ".", "select_format", "(", "self", ",", "sql", ",", "params", ")", "params_hash", "=", "make_hashable", "(", "params", ")", "if", "(", "sql", ",", "params_hash", ")", "not", "in", "seen", ":", "result", ".", "append", "(", "(", "sql", ",", "params", ")", ")", "seen", ".", "add", "(", "(", "sql", ",", "params_hash", ")", ")", "return", "result" ]
[ 62, 4 ]
[ 146, 21 ]
python
en
['en', 'error', 'th']
False
get_select
(self)
Return three values: - a list of 3-tuples of (expression, (sql, params), alias) - a klass_info structure, - a dictionary of annotations The (sql, params) is what the expression will produce, and alias is the "AS alias" for the column (possibly None). The klass_info structure contains the following information: - The base model of the query. - Which columns for that model are present in the query (by position of the select clause). - related_klass_infos: [f, klass_info] to descent into The annotations is a dictionary of {'attname': column position} values.
Return three values: - a list of 3-tuples of (expression, (sql, params), alias) - a klass_info structure, - a dictionary of annotations
def get_select(self): """ Return three values: - a list of 3-tuples of (expression, (sql, params), alias) - a klass_info structure, - a dictionary of annotations The (sql, params) is what the expression will produce, and alias is the "AS alias" for the column (possibly None). The klass_info structure contains the following information: - The base model of the query. - Which columns for that model are present in the query (by position of the select clause). - related_klass_infos: [f, klass_info] to descent into The annotations is a dictionary of {'attname': column position} values. """ select = [] klass_info = None annotations = {} select_idx = 0 for alias, (sql, params) in self.query.extra_select.items(): annotations[alias] = select_idx select.append((RawSQL(sql, params), alias)) select_idx += 1 assert not (self.query.select and self.query.default_cols) if self.query.default_cols: cols = self.get_default_columns() else: # self.query.select is a special case. These columns never go to # any model. cols = self.query.select if cols: select_list = [] for col in cols: select_list.append(select_idx) select.append((col, None)) select_idx += 1 klass_info = { 'model': self.query.model, 'select_fields': select_list, } for alias, annotation in self.query.annotation_select.items(): annotations[alias] = select_idx select.append((annotation, alias)) select_idx += 1 if self.query.select_related: related_klass_infos = self.get_related_selections(select) klass_info['related_klass_infos'] = related_klass_infos def get_select_from_parent(klass_info): for ki in klass_info['related_klass_infos']: if ki['from_parent']: ki['select_fields'] = (klass_info['select_fields'] + ki['select_fields']) get_select_from_parent(ki) get_select_from_parent(klass_info) ret = [] for col, alias in select: try: sql, params = self.compile(col) except EmptyResultSet: # Select a predicate that's always False. sql, params = '0', () else: sql, params = col.select_format(self, sql, params) ret.append((col, (sql, params), alias)) return ret, klass_info, annotations
[ "def", "get_select", "(", "self", ")", ":", "select", "=", "[", "]", "klass_info", "=", "None", "annotations", "=", "{", "}", "select_idx", "=", "0", "for", "alias", ",", "(", "sql", ",", "params", ")", "in", "self", ".", "query", ".", "extra_select", ".", "items", "(", ")", ":", "annotations", "[", "alias", "]", "=", "select_idx", "select", ".", "append", "(", "(", "RawSQL", "(", "sql", ",", "params", ")", ",", "alias", ")", ")", "select_idx", "+=", "1", "assert", "not", "(", "self", ".", "query", ".", "select", "and", "self", ".", "query", ".", "default_cols", ")", "if", "self", ".", "query", ".", "default_cols", ":", "cols", "=", "self", ".", "get_default_columns", "(", ")", "else", ":", "# self.query.select is a special case. These columns never go to", "# any model.", "cols", "=", "self", ".", "query", ".", "select", "if", "cols", ":", "select_list", "=", "[", "]", "for", "col", "in", "cols", ":", "select_list", ".", "append", "(", "select_idx", ")", "select", ".", "append", "(", "(", "col", ",", "None", ")", ")", "select_idx", "+=", "1", "klass_info", "=", "{", "'model'", ":", "self", ".", "query", ".", "model", ",", "'select_fields'", ":", "select_list", ",", "}", "for", "alias", ",", "annotation", "in", "self", ".", "query", ".", "annotation_select", ".", "items", "(", ")", ":", "annotations", "[", "alias", "]", "=", "select_idx", "select", ".", "append", "(", "(", "annotation", ",", "alias", ")", ")", "select_idx", "+=", "1", "if", "self", ".", "query", ".", "select_related", ":", "related_klass_infos", "=", "self", ".", "get_related_selections", "(", "select", ")", "klass_info", "[", "'related_klass_infos'", "]", "=", "related_klass_infos", "def", "get_select_from_parent", "(", "klass_info", ")", ":", "for", "ki", "in", "klass_info", "[", "'related_klass_infos'", "]", ":", "if", "ki", "[", "'from_parent'", "]", ":", "ki", "[", "'select_fields'", "]", "=", "(", "klass_info", "[", "'select_fields'", "]", "+", "ki", "[", "'select_fields'", "]", ")", "get_select_from_parent", "(", "ki", ")", "get_select_from_parent", "(", "klass_info", ")", "ret", "=", "[", "]", "for", "col", ",", "alias", "in", "select", ":", "try", ":", "sql", ",", "params", "=", "self", ".", "compile", "(", "col", ")", "except", "EmptyResultSet", ":", "# Select a predicate that's always False.", "sql", ",", "params", "=", "'0'", ",", "(", ")", "else", ":", "sql", ",", "params", "=", "col", ".", "select_format", "(", "self", ",", "sql", ",", "params", ")", "ret", ".", "append", "(", "(", "col", ",", "(", "sql", ",", "params", ")", ",", "alias", ")", ")", "return", "ret", ",", "klass_info", ",", "annotations" ]
[ 198, 4 ]
[ 268, 43 ]
python
en
['en', 'error', 'th']
False
get_order_by
(self)
Return a list of 2-tuples of form (expr, (sql, params, is_ref)) for the ORDER BY clause. The order_by clause can alter the select clause (for example it can add aliases to clauses that do not yet have one, or it can add totally new select clauses).
Return a list of 2-tuples of form (expr, (sql, params, is_ref)) for the ORDER BY clause.
def get_order_by(self): """ Return a list of 2-tuples of form (expr, (sql, params, is_ref)) for the ORDER BY clause. The order_by clause can alter the select clause (for example it can add aliases to clauses that do not yet have one, or it can add totally new select clauses). """ if self.query.extra_order_by: ordering = self.query.extra_order_by elif not self.query.default_ordering: ordering = self.query.order_by elif self.query.order_by: ordering = self.query.order_by elif self.query.get_meta().ordering: ordering = self.query.get_meta().ordering self._meta_ordering = ordering else: ordering = [] if self.query.standard_ordering: asc, desc = ORDER_DIR['ASC'] else: asc, desc = ORDER_DIR['DESC'] order_by = [] for field in ordering: if hasattr(field, 'resolve_expression'): if isinstance(field, Value): # output_field must be resolved for constants. field = Cast(field, field.output_field) if not isinstance(field, OrderBy): field = field.asc() if not self.query.standard_ordering: field = field.copy() field.reverse_ordering() order_by.append((field, False)) continue if field == '?': # random order_by.append((OrderBy(Random()), False)) continue col, order = get_order_dir(field, asc) descending = order == 'DESC' if col in self.query.annotation_select: # Reference to expression in SELECT clause order_by.append(( OrderBy(Ref(col, self.query.annotation_select[col]), descending=descending), True)) continue if col in self.query.annotations: # References to an expression which is masked out of the SELECT # clause. if self.query.combinator and self.select: # Don't use the resolved annotation because other # combinated queries might define it differently. expr = F(col) else: expr = self.query.annotations[col] if isinstance(expr, Value): # output_field must be resolved for constants. expr = Cast(expr, expr.output_field) order_by.append((OrderBy(expr, descending=descending), False)) continue if '.' in field: # This came in through an extra(order_by=...) addition. Pass it # on verbatim. table, col = col.split('.', 1) order_by.append(( OrderBy( RawSQL('%s.%s' % (self.quote_name_unless_alias(table), col), []), descending=descending ), False)) continue if not self.query.extra or col not in self.query.extra: if self.query.combinator and self.select: # Don't use the first model's field because other # combinated queries might define it differently. order_by.append((OrderBy(F(col), descending=descending), False)) else: # 'col' is of the form 'field' or 'field1__field2' or # '-field1__field2__field', etc. order_by.extend(self.find_ordering_name( field, self.query.get_meta(), default_order=asc, )) else: if col not in self.query.extra_select: order_by.append(( OrderBy(RawSQL(*self.query.extra[col]), descending=descending), False)) else: order_by.append(( OrderBy(Ref(col, RawSQL(*self.query.extra[col])), descending=descending), True)) result = [] seen = set() for expr, is_ref in order_by: resolved = expr.resolve_expression(self.query, allow_joins=True, reuse=None) if self.query.combinator and self.select: src = resolved.get_source_expressions()[0] expr_src = expr.get_source_expressions()[0] # Relabel order by columns to raw numbers if this is a combined # query; necessary since the columns can't be referenced by the # fully qualified name and the simple column names may collide. for idx, (sel_expr, _, col_alias) in enumerate(self.select): if is_ref and col_alias == src.refs: src = src.source elif col_alias and not ( isinstance(expr_src, F) and col_alias == expr_src.name ): continue if src == sel_expr: resolved.set_source_expressions([RawSQL('%d' % (idx + 1), ())]) break else: if col_alias: raise DatabaseError('ORDER BY term does not match any column in the result set.') # Add column used in ORDER BY clause to the selected # columns and to each combined query. order_by_idx = len(self.query.select) + 1 col_name = f'__orderbycol{order_by_idx}' for q in self.query.combined_queries: q.add_annotation(expr_src, col_name) self.query.add_select_col(resolved, col_name) resolved.set_source_expressions([RawSQL(f'{order_by_idx}', ())]) sql, params = self.compile(resolved) # Don't add the same column twice, but the order direction is # not taken into account so we strip it. When this entire method # is refactored into expressions, then we can check each part as we # generate it. without_ordering = self.ordering_parts.search(sql)[1] params_hash = make_hashable(params) if (without_ordering, params_hash) in seen: continue seen.add((without_ordering, params_hash)) result.append((resolved, (sql, params, is_ref))) return result
[ "def", "get_order_by", "(", "self", ")", ":", "if", "self", ".", "query", ".", "extra_order_by", ":", "ordering", "=", "self", ".", "query", ".", "extra_order_by", "elif", "not", "self", ".", "query", ".", "default_ordering", ":", "ordering", "=", "self", ".", "query", ".", "order_by", "elif", "self", ".", "query", ".", "order_by", ":", "ordering", "=", "self", ".", "query", ".", "order_by", "elif", "self", ".", "query", ".", "get_meta", "(", ")", ".", "ordering", ":", "ordering", "=", "self", ".", "query", ".", "get_meta", "(", ")", ".", "ordering", "self", ".", "_meta_ordering", "=", "ordering", "else", ":", "ordering", "=", "[", "]", "if", "self", ".", "query", ".", "standard_ordering", ":", "asc", ",", "desc", "=", "ORDER_DIR", "[", "'ASC'", "]", "else", ":", "asc", ",", "desc", "=", "ORDER_DIR", "[", "'DESC'", "]", "order_by", "=", "[", "]", "for", "field", "in", "ordering", ":", "if", "hasattr", "(", "field", ",", "'resolve_expression'", ")", ":", "if", "isinstance", "(", "field", ",", "Value", ")", ":", "# output_field must be resolved for constants.", "field", "=", "Cast", "(", "field", ",", "field", ".", "output_field", ")", "if", "not", "isinstance", "(", "field", ",", "OrderBy", ")", ":", "field", "=", "field", ".", "asc", "(", ")", "if", "not", "self", ".", "query", ".", "standard_ordering", ":", "field", "=", "field", ".", "copy", "(", ")", "field", ".", "reverse_ordering", "(", ")", "order_by", ".", "append", "(", "(", "field", ",", "False", ")", ")", "continue", "if", "field", "==", "'?'", ":", "# random", "order_by", ".", "append", "(", "(", "OrderBy", "(", "Random", "(", ")", ")", ",", "False", ")", ")", "continue", "col", ",", "order", "=", "get_order_dir", "(", "field", ",", "asc", ")", "descending", "=", "order", "==", "'DESC'", "if", "col", "in", "self", ".", "query", ".", "annotation_select", ":", "# Reference to expression in SELECT clause", "order_by", ".", "append", "(", "(", "OrderBy", "(", "Ref", "(", "col", ",", "self", ".", "query", ".", "annotation_select", "[", "col", "]", ")", ",", "descending", "=", "descending", ")", ",", "True", ")", ")", "continue", "if", "col", "in", "self", ".", "query", ".", "annotations", ":", "# References to an expression which is masked out of the SELECT", "# clause.", "if", "self", ".", "query", ".", "combinator", "and", "self", ".", "select", ":", "# Don't use the resolved annotation because other", "# combinated queries might define it differently.", "expr", "=", "F", "(", "col", ")", "else", ":", "expr", "=", "self", ".", "query", ".", "annotations", "[", "col", "]", "if", "isinstance", "(", "expr", ",", "Value", ")", ":", "# output_field must be resolved for constants.", "expr", "=", "Cast", "(", "expr", ",", "expr", ".", "output_field", ")", "order_by", ".", "append", "(", "(", "OrderBy", "(", "expr", ",", "descending", "=", "descending", ")", ",", "False", ")", ")", "continue", "if", "'.'", "in", "field", ":", "# This came in through an extra(order_by=...) addition. Pass it", "# on verbatim.", "table", ",", "col", "=", "col", ".", "split", "(", "'.'", ",", "1", ")", "order_by", ".", "append", "(", "(", "OrderBy", "(", "RawSQL", "(", "'%s.%s'", "%", "(", "self", ".", "quote_name_unless_alias", "(", "table", ")", ",", "col", ")", ",", "[", "]", ")", ",", "descending", "=", "descending", ")", ",", "False", ")", ")", "continue", "if", "not", "self", ".", "query", ".", "extra", "or", "col", "not", "in", "self", ".", "query", ".", "extra", ":", "if", "self", ".", "query", ".", "combinator", "and", "self", ".", "select", ":", "# Don't use the first model's field because other", "# combinated queries might define it differently.", "order_by", ".", "append", "(", "(", "OrderBy", "(", "F", "(", "col", ")", ",", "descending", "=", "descending", ")", ",", "False", ")", ")", "else", ":", "# 'col' is of the form 'field' or 'field1__field2' or", "# '-field1__field2__field', etc.", "order_by", ".", "extend", "(", "self", ".", "find_ordering_name", "(", "field", ",", "self", ".", "query", ".", "get_meta", "(", ")", ",", "default_order", "=", "asc", ",", ")", ")", "else", ":", "if", "col", "not", "in", "self", ".", "query", ".", "extra_select", ":", "order_by", ".", "append", "(", "(", "OrderBy", "(", "RawSQL", "(", "*", "self", ".", "query", ".", "extra", "[", "col", "]", ")", ",", "descending", "=", "descending", ")", ",", "False", ")", ")", "else", ":", "order_by", ".", "append", "(", "(", "OrderBy", "(", "Ref", "(", "col", ",", "RawSQL", "(", "*", "self", ".", "query", ".", "extra", "[", "col", "]", ")", ")", ",", "descending", "=", "descending", ")", ",", "True", ")", ")", "result", "=", "[", "]", "seen", "=", "set", "(", ")", "for", "expr", ",", "is_ref", "in", "order_by", ":", "resolved", "=", "expr", ".", "resolve_expression", "(", "self", ".", "query", ",", "allow_joins", "=", "True", ",", "reuse", "=", "None", ")", "if", "self", ".", "query", ".", "combinator", "and", "self", ".", "select", ":", "src", "=", "resolved", ".", "get_source_expressions", "(", ")", "[", "0", "]", "expr_src", "=", "expr", ".", "get_source_expressions", "(", ")", "[", "0", "]", "# Relabel order by columns to raw numbers if this is a combined", "# query; necessary since the columns can't be referenced by the", "# fully qualified name and the simple column names may collide.", "for", "idx", ",", "(", "sel_expr", ",", "_", ",", "col_alias", ")", "in", "enumerate", "(", "self", ".", "select", ")", ":", "if", "is_ref", "and", "col_alias", "==", "src", ".", "refs", ":", "src", "=", "src", ".", "source", "elif", "col_alias", "and", "not", "(", "isinstance", "(", "expr_src", ",", "F", ")", "and", "col_alias", "==", "expr_src", ".", "name", ")", ":", "continue", "if", "src", "==", "sel_expr", ":", "resolved", ".", "set_source_expressions", "(", "[", "RawSQL", "(", "'%d'", "%", "(", "idx", "+", "1", ")", ",", "(", ")", ")", "]", ")", "break", "else", ":", "if", "col_alias", ":", "raise", "DatabaseError", "(", "'ORDER BY term does not match any column in the result set.'", ")", "# Add column used in ORDER BY clause to the selected", "# columns and to each combined query.", "order_by_idx", "=", "len", "(", "self", ".", "query", ".", "select", ")", "+", "1", "col_name", "=", "f'__orderbycol{order_by_idx}'", "for", "q", "in", "self", ".", "query", ".", "combined_queries", ":", "q", ".", "add_annotation", "(", "expr_src", ",", "col_name", ")", "self", ".", "query", ".", "add_select_col", "(", "resolved", ",", "col_name", ")", "resolved", ".", "set_source_expressions", "(", "[", "RawSQL", "(", "f'{order_by_idx}'", ",", "(", ")", ")", "]", ")", "sql", ",", "params", "=", "self", ".", "compile", "(", "resolved", ")", "# Don't add the same column twice, but the order direction is", "# not taken into account so we strip it. When this entire method", "# is refactored into expressions, then we can check each part as we", "# generate it.", "without_ordering", "=", "self", ".", "ordering_parts", ".", "search", "(", "sql", ")", "[", "1", "]", "params_hash", "=", "make_hashable", "(", "params", ")", "if", "(", "without_ordering", ",", "params_hash", ")", "in", "seen", ":", "continue", "seen", ".", "add", "(", "(", "without_ordering", ",", "params_hash", ")", ")", "result", ".", "append", "(", "(", "resolved", ",", "(", "sql", ",", "params", ",", "is_ref", ")", ")", ")", "return", "result" ]
[ 270, 4 ]
[ 410, 21 ]
python
en
['en', 'error', 'th']
False
quote_name_unless_alias
(self, name)
A wrapper around connection.ops.quote_name that doesn't quote aliases for table names. This avoids problems with some SQL dialects that treat quoted strings specially (e.g. PostgreSQL).
A wrapper around connection.ops.quote_name that doesn't quote aliases for table names. This avoids problems with some SQL dialects that treat quoted strings specially (e.g. PostgreSQL).
def quote_name_unless_alias(self, name): """ A wrapper around connection.ops.quote_name that doesn't quote aliases for table names. This avoids problems with some SQL dialects that treat quoted strings specially (e.g. PostgreSQL). """ if name in self.quote_cache: return self.quote_cache[name] if ((name in self.query.alias_map and name not in self.query.table_map) or name in self.query.extra_select or ( self.query.external_aliases.get(name) and name not in self.query.table_map)): self.quote_cache[name] = name return name r = self.connection.ops.quote_name(name) self.quote_cache[name] = r return r
[ "def", "quote_name_unless_alias", "(", "self", ",", "name", ")", ":", "if", "name", "in", "self", ".", "quote_cache", ":", "return", "self", ".", "quote_cache", "[", "name", "]", "if", "(", "(", "name", "in", "self", ".", "query", ".", "alias_map", "and", "name", "not", "in", "self", ".", "query", ".", "table_map", ")", "or", "name", "in", "self", ".", "query", ".", "extra_select", "or", "(", "self", ".", "query", ".", "external_aliases", ".", "get", "(", "name", ")", "and", "name", "not", "in", "self", ".", "query", ".", "table_map", ")", ")", ":", "self", ".", "quote_cache", "[", "name", "]", "=", "name", "return", "name", "r", "=", "self", ".", "connection", ".", "ops", ".", "quote_name", "(", "name", ")", "self", ".", "quote_cache", "[", "name", "]", "=", "r", "return", "r" ]
[ 422, 4 ]
[ 437, 16 ]
python
en
['en', 'error', 'th']
False
pixelwise_weighted_loss
(original_loss_func, y_true_channels=None, weight_channels=None, sum_channels=True, reshape_axis=None)
This function implements pixel-wise weighted loss if y_true has 2n channels, weight maps are the Weight map are [n, 2n) channels; otherwise y_true channels and weight channels can be specified as lists of length 2
This function implements pixel-wise weighted loss if y_true has 2n channels, weight maps are the Weight map are [n, 2n) channels; otherwise y_true channels and weight channels can be specified as lists of length 2
def pixelwise_weighted_loss(original_loss_func, y_true_channels=None, weight_channels=None, sum_channels=True, reshape_axis=None): ''' This function implements pixel-wise weighted loss if y_true has 2n channels, weight maps are the Weight map are [n, 2n) channels; otherwise y_true channels and weight channels can be specified as lists of length 2 ''' #@tf.function if y_true_channels is None: def loss_func(y_true, y_pred): y_true, weightMap = tf.split(y_true, 2, axis=-1) loss = original_loss_func(y_true, y_pred) if reshape_axis is not None: loss = K.reshape(loss, reshape_axis) loss = loss * weightMap if sum_channels: return K.sum(loss, -1) else: return loss else: def loss_func(y_true, y_pred): weightMap = y_true[...,weight_channels[0]:weight_channels[1]] y_true = y_true[...,y_true_channels[0]:y_true_channels[1]] loss = original_loss_func(y_true, y_pred) if reshape_axis is not None: loss = K.reshape(loss, reshape_axis) loss = loss * weightMap if sum_channels: return K.sum(loss, -1) else: return loss return loss_func
[ "def", "pixelwise_weighted_loss", "(", "original_loss_func", ",", "y_true_channels", "=", "None", ",", "weight_channels", "=", "None", ",", "sum_channels", "=", "True", ",", "reshape_axis", "=", "None", ")", ":", "#@tf.function", "if", "y_true_channels", "is", "None", ":", "def", "loss_func", "(", "y_true", ",", "y_pred", ")", ":", "y_true", ",", "weightMap", "=", "tf", ".", "split", "(", "y_true", ",", "2", ",", "axis", "=", "-", "1", ")", "loss", "=", "original_loss_func", "(", "y_true", ",", "y_pred", ")", "if", "reshape_axis", "is", "not", "None", ":", "loss", "=", "K", ".", "reshape", "(", "loss", ",", "reshape_axis", ")", "loss", "=", "loss", "*", "weightMap", "if", "sum_channels", ":", "return", "K", ".", "sum", "(", "loss", ",", "-", "1", ")", "else", ":", "return", "loss", "else", ":", "def", "loss_func", "(", "y_true", ",", "y_pred", ")", ":", "weightMap", "=", "y_true", "[", "...", ",", "weight_channels", "[", "0", "]", ":", "weight_channels", "[", "1", "]", "]", "y_true", "=", "y_true", "[", "...", ",", "y_true_channels", "[", "0", "]", ":", "y_true_channels", "[", "1", "]", "]", "loss", "=", "original_loss_func", "(", "y_true", ",", "y_pred", ")", "if", "reshape_axis", "is", "not", "None", ":", "loss", "=", "K", ".", "reshape", "(", "loss", ",", "reshape_axis", ")", "loss", "=", "loss", "*", "weightMap", "if", "sum_channels", ":", "return", "K", ".", "sum", "(", "loss", ",", "-", "1", ")", "else", ":", "return", "loss", "return", "loss_func" ]
[ 101, 0 ]
[ 130, 20 ]
python
en
['en', 'error', 'th']
False
soft_generalized_dice_loss
(batch_mean = False, square_weights = True, sparse = True, exclude_background=False, spatial_dim_axes=[1, 2])
Generalised Dice Loss function defined in Sudre, C. et. al. (2017) Generalised Dice overlap as a deep learning loss function for highly unbalanced segmentations. DLMIA 2017 TF1x implementation : https://github.com/NifTK/NiftyNet/blob/dev/niftynet/layer/loss_segmentation.py Assumes class axis = -1 Parameters ---------- batch_mean : type if true loss is computed for the whole batch else for each image of the batch square_weights : bool if true class weight is inverse squared volume of the class else inverse volume sparse : boolean wheter y_true is sparse Returns ------- type loss function
Generalised Dice Loss function defined in Sudre, C. et. al. (2017) Generalised Dice overlap as a deep learning loss function for highly unbalanced segmentations. DLMIA 2017
def soft_generalized_dice_loss(batch_mean = False, square_weights = True, sparse = True, exclude_background=False, spatial_dim_axes=[1, 2]): """ Generalised Dice Loss function defined in Sudre, C. et. al. (2017) Generalised Dice overlap as a deep learning loss function for highly unbalanced segmentations. DLMIA 2017 TF1x implementation : https://github.com/NifTK/NiftyNet/blob/dev/niftynet/layer/loss_segmentation.py Assumes class axis = -1 Parameters ---------- batch_mean : type if true loss is computed for the whole batch else for each image of the batch square_weights : bool if true class weight is inverse squared volume of the class else inverse volume sparse : boolean wheter y_true is sparse Returns ------- type loss function """ def loss_fun(y_true, y_pred): if sparse: y_true = K.cast(y_true[...,0], "int32") y_true = K.one_hot(y_true, K.shape(y_pred)[-1]) if exclude_background: y_true = y_true[...,1:] y_pred = y_pred[...,1:] inter = K.sum(y_true * y_pred, spatial_dim_axes) tv = K.sum(y_true, spatial_dim_axes) pv = K.sum(y_true, spatial_dim_axes) if batch_mean: tv = K.mean(tv, 0, keepdims=True) pv = K.mean(pv, 0, keepdims=True) if square_weights: w = 1. / K.square(tv) else: w = 1. / tv w = tf.where(tf.math.is_inf(w), K.ones_like(w), w) # regularize 0 div by zero return 1 - (K.sum(w * inter, -1) + 1e-10) / (K.sum(w * 0.5 * (tv + pv), -1) + 1e-10) # sum for each class return loss_fun
[ "def", "soft_generalized_dice_loss", "(", "batch_mean", "=", "False", ",", "square_weights", "=", "True", ",", "sparse", "=", "True", ",", "exclude_background", "=", "False", ",", "spatial_dim_axes", "=", "[", "1", ",", "2", "]", ")", ":", "def", "loss_fun", "(", "y_true", ",", "y_pred", ")", ":", "if", "sparse", ":", "y_true", "=", "K", ".", "cast", "(", "y_true", "[", "...", ",", "0", "]", ",", "\"int32\"", ")", "y_true", "=", "K", ".", "one_hot", "(", "y_true", ",", "K", ".", "shape", "(", "y_pred", ")", "[", "-", "1", "]", ")", "if", "exclude_background", ":", "y_true", "=", "y_true", "[", "...", ",", "1", ":", "]", "y_pred", "=", "y_pred", "[", "...", ",", "1", ":", "]", "inter", "=", "K", ".", "sum", "(", "y_true", "*", "y_pred", ",", "spatial_dim_axes", ")", "tv", "=", "K", ".", "sum", "(", "y_true", ",", "spatial_dim_axes", ")", "pv", "=", "K", ".", "sum", "(", "y_true", ",", "spatial_dim_axes", ")", "if", "batch_mean", ":", "tv", "=", "K", ".", "mean", "(", "tv", ",", "0", ",", "keepdims", "=", "True", ")", "pv", "=", "K", ".", "mean", "(", "pv", ",", "0", ",", "keepdims", "=", "True", ")", "if", "square_weights", ":", "w", "=", "1.", "/", "K", ".", "square", "(", "tv", ")", "else", ":", "w", "=", "1.", "/", "tv", "w", "=", "tf", ".", "where", "(", "tf", ".", "math", ".", "is_inf", "(", "w", ")", ",", "K", ".", "ones_like", "(", "w", ")", ",", "w", ")", "# regularize 0 div by zero", "return", "1", "-", "(", "K", ".", "sum", "(", "w", "*", "inter", ",", "-", "1", ")", "+", "1e-10", ")", "/", "(", "K", ".", "sum", "(", "w", "*", "0.5", "*", "(", "tv", "+", "pv", ")", ",", "-", "1", ")", "+", "1e-10", ")", "# sum for each class", "return", "loss_fun" ]
[ 187, 0 ]
[ 229, 19 ]
python
en
['en', 'en', 'en']
True
binary_tversky_loss
(alpha=0.3, beta=0.7, smooth=1e-7, batch_mean=False)
Return the Tversky loss for imbalanced data Sadegh et al. (2017) Tversky loss function for image segmentation using 3D fully convolutional deep networks Parameters ---------- alpha : type weight of false positives (penalize false positives) beta : type weight of false negatives (penalize false negatives) smooth : type Description of parameter `smooth`. batch_mean : type sum over batch dimension Returns ------- type loss function
Return the Tversky loss for imbalanced data Sadegh et al. (2017) Tversky loss function for image segmentation using 3D fully convolutional deep networks Parameters ---------- alpha : type weight of false positives (penalize false positives) beta : type weight of false negatives (penalize false negatives) smooth : type Description of parameter `smooth`. batch_mean : type sum over batch dimension
def binary_tversky_loss(alpha=0.3, beta=0.7, smooth=1e-7, batch_mean=False): """Return the Tversky loss for imbalanced data Sadegh et al. (2017) Tversky loss function for image segmentation using 3D fully convolutional deep networks Parameters ---------- alpha : type weight of false positives (penalize false positives) beta : type weight of false negatives (penalize false negatives) smooth : type Description of parameter `smooth`. batch_mean : type sum over batch dimension Returns ------- type loss function """ def loss_fun(y_true, y_pred): batchSize = K.shape(y_true)[0] t = K.reshape(y_true, shape=(batchSize, -1)) p = K.reshape(y_pred, shape=(batchSize, -1)) tp = K.sum(t * p, -1) fp = K.sum((1 - t) * p, -1) fn = K.sum(t * (1 - p), -1) if batch_mean: fp = K.mean(fp, 0, keepdims=True) fn = K.mean(fn, 0, keepdims=True) tpm = K.mean(tp, 0, keepdims=True) else: tpm = tp return 1 - (tp + smooth) / (tpm + alpha * fp + beta * fn + smooth) return loss_fun
[ "def", "binary_tversky_loss", "(", "alpha", "=", "0.3", ",", "beta", "=", "0.7", ",", "smooth", "=", "1e-7", ",", "batch_mean", "=", "False", ")", ":", "def", "loss_fun", "(", "y_true", ",", "y_pred", ")", ":", "batchSize", "=", "K", ".", "shape", "(", "y_true", ")", "[", "0", "]", "t", "=", "K", ".", "reshape", "(", "y_true", ",", "shape", "=", "(", "batchSize", ",", "-", "1", ")", ")", "p", "=", "K", ".", "reshape", "(", "y_pred", ",", "shape", "=", "(", "batchSize", ",", "-", "1", ")", ")", "tp", "=", "K", ".", "sum", "(", "t", "*", "p", ",", "-", "1", ")", "fp", "=", "K", ".", "sum", "(", "(", "1", "-", "t", ")", "*", "p", ",", "-", "1", ")", "fn", "=", "K", ".", "sum", "(", "t", "*", "(", "1", "-", "p", ")", ",", "-", "1", ")", "if", "batch_mean", ":", "fp", "=", "K", ".", "mean", "(", "fp", ",", "0", ",", "keepdims", "=", "True", ")", "fn", "=", "K", ".", "mean", "(", "fn", ",", "0", ",", "keepdims", "=", "True", ")", "tpm", "=", "K", ".", "mean", "(", "tp", ",", "0", ",", "keepdims", "=", "True", ")", "else", ":", "tpm", "=", "tp", "return", "1", "-", "(", "tp", "+", "smooth", ")", "/", "(", "tpm", "+", "alpha", "*", "fp", "+", "beta", "*", "fn", "+", "smooth", ")", "return", "loss_fun" ]
[ 231, 0 ]
[ 267, 19 ]
python
en
['en', 'ceb', 'en']
True
boundary_regional_loss
(alpha, regional_loss, mul_coeff=1, y_true_channels=None, levelset_channels=None)
Mixed boundary loss with regional loss function as in https://arxiv.org/abs/1812.07032 Parameters ---------- alpha : type number / Keras variable in range [0,1] importance given to regional loss over boundary loss regional_loss : function. returns a tensor with shape (batch_size, ) mul_coeff : multiplicative coefficient applied to the boundary loss to balance with regional loss Returns ------- type loss function that inputs: y_true : type ground truth tensor, concatenated with level sel (distance map from bounds, negative inside and positive outside) y_pred : type predicted tensor PyTorch implementation : https://github.com/LIVIAETS/surface-loss/blob/master/losses.py
Mixed boundary loss with regional loss function as in https://arxiv.org/abs/1812.07032
def boundary_regional_loss(alpha, regional_loss, mul_coeff=1, y_true_channels=None, levelset_channels=None): """Mixed boundary loss with regional loss function as in https://arxiv.org/abs/1812.07032 Parameters ---------- alpha : type number / Keras variable in range [0,1] importance given to regional loss over boundary loss regional_loss : function. returns a tensor with shape (batch_size, ) mul_coeff : multiplicative coefficient applied to the boundary loss to balance with regional loss Returns ------- type loss function that inputs: y_true : type ground truth tensor, concatenated with level sel (distance map from bounds, negative inside and positive outside) y_pred : type predicted tensor PyTorch implementation : https://github.com/LIVIAETS/surface-loss/blob/master/losses.py """ def loss_fun(y_true, y_pred): if y_true_channels is None: channels = K.shape(y_true)[-1] mid = channels // 2 levelset = y_true[...,mid:] y_true = y_true[...,0:mid] else: levelset = y_true[...,levelset_channels[0]:levelset_channels[1]] y_true = y_true[...,y_true_channels[0]:y_true_channels[1]] rl = regional_loss(y_true, y_pred) bl = K.sum(levelset * y_pred, [1, 2, 3]) if mul_coeff!=1: bl = bl * mul_coeff return alpha * rl + (1 - alpha) * bl return loss_fun
[ "def", "boundary_regional_loss", "(", "alpha", ",", "regional_loss", ",", "mul_coeff", "=", "1", ",", "y_true_channels", "=", "None", ",", "levelset_channels", "=", "None", ")", ":", "def", "loss_fun", "(", "y_true", ",", "y_pred", ")", ":", "if", "y_true_channels", "is", "None", ":", "channels", "=", "K", ".", "shape", "(", "y_true", ")", "[", "-", "1", "]", "mid", "=", "channels", "//", "2", "levelset", "=", "y_true", "[", "...", ",", "mid", ":", "]", "y_true", "=", "y_true", "[", "...", ",", "0", ":", "mid", "]", "else", ":", "levelset", "=", "y_true", "[", "...", ",", "levelset_channels", "[", "0", "]", ":", "levelset_channels", "[", "1", "]", "]", "y_true", "=", "y_true", "[", "...", ",", "y_true_channels", "[", "0", "]", ":", "y_true_channels", "[", "1", "]", "]", "rl", "=", "regional_loss", "(", "y_true", ",", "y_pred", ")", "bl", "=", "K", ".", "sum", "(", "levelset", "*", "y_pred", ",", "[", "1", ",", "2", ",", "3", "]", ")", "if", "mul_coeff", "!=", "1", ":", "bl", "=", "bl", "*", "mul_coeff", "return", "alpha", "*", "rl", "+", "(", "1", "-", "alpha", ")", "*", "bl", "return", "loss_fun" ]
[ 269, 0 ]
[ 303, 19 ]
python
en
['en', 'en', 'en']
True
Apps.populate
(self, installed_apps=None)
Load application configurations and models. Import each application module and then each model module. It is thread-safe and idempotent, but not reentrant.
Load application configurations and models.
def populate(self, installed_apps=None): """ Load application configurations and models. Import each application module and then each model module. It is thread-safe and idempotent, but not reentrant. """ if self.ready: return # populate() might be called by two threads in parallel on servers # that create threads before initializing the WSGI callable. with self._lock: if self.ready: return # An RLock prevents other threads from entering this section. The # compare and set operation below is atomic. if self.loading: # Prevent reentrant calls to avoid running AppConfig.ready() # methods twice. raise RuntimeError("populate() isn't reentrant") self.loading = True # Phase 1: initialize app configs and import app modules. for entry in installed_apps: if isinstance(entry, AppConfig): app_config = entry else: app_config = AppConfig.create(entry) if app_config.label in self.app_configs: raise ImproperlyConfigured( "Application labels aren't unique, " "duplicates: %s" % app_config.label) self.app_configs[app_config.label] = app_config app_config.apps = self # Check for duplicate app names. counts = Counter( app_config.name for app_config in self.app_configs.values()) duplicates = [ name for name, count in counts.most_common() if count > 1] if duplicates: raise ImproperlyConfigured( "Application names aren't unique, " "duplicates: %s" % ", ".join(duplicates)) self.apps_ready = True # Phase 2: import models modules. for app_config in self.app_configs.values(): app_config.import_models() self.clear_cache() self.models_ready = True # Phase 3: run ready() methods of app configs. for app_config in self.get_app_configs(): app_config.ready() self.ready = True self.ready_event.set()
[ "def", "populate", "(", "self", ",", "installed_apps", "=", "None", ")", ":", "if", "self", ".", "ready", ":", "return", "# populate() might be called by two threads in parallel on servers", "# that create threads before initializing the WSGI callable.", "with", "self", ".", "_lock", ":", "if", "self", ".", "ready", ":", "return", "# An RLock prevents other threads from entering this section. The", "# compare and set operation below is atomic.", "if", "self", ".", "loading", ":", "# Prevent reentrant calls to avoid running AppConfig.ready()", "# methods twice.", "raise", "RuntimeError", "(", "\"populate() isn't reentrant\"", ")", "self", ".", "loading", "=", "True", "# Phase 1: initialize app configs and import app modules.", "for", "entry", "in", "installed_apps", ":", "if", "isinstance", "(", "entry", ",", "AppConfig", ")", ":", "app_config", "=", "entry", "else", ":", "app_config", "=", "AppConfig", ".", "create", "(", "entry", ")", "if", "app_config", ".", "label", "in", "self", ".", "app_configs", ":", "raise", "ImproperlyConfigured", "(", "\"Application labels aren't unique, \"", "\"duplicates: %s\"", "%", "app_config", ".", "label", ")", "self", ".", "app_configs", "[", "app_config", ".", "label", "]", "=", "app_config", "app_config", ".", "apps", "=", "self", "# Check for duplicate app names.", "counts", "=", "Counter", "(", "app_config", ".", "name", "for", "app_config", "in", "self", ".", "app_configs", ".", "values", "(", ")", ")", "duplicates", "=", "[", "name", "for", "name", ",", "count", "in", "counts", ".", "most_common", "(", ")", "if", "count", ">", "1", "]", "if", "duplicates", ":", "raise", "ImproperlyConfigured", "(", "\"Application names aren't unique, \"", "\"duplicates: %s\"", "%", "\", \"", ".", "join", "(", "duplicates", ")", ")", "self", ".", "apps_ready", "=", "True", "# Phase 2: import models modules.", "for", "app_config", "in", "self", ".", "app_configs", ".", "values", "(", ")", ":", "app_config", ".", "import_models", "(", ")", "self", ".", "clear_cache", "(", ")", "self", ".", "models_ready", "=", "True", "# Phase 3: run ready() methods of app configs.", "for", "app_config", "in", "self", ".", "get_app_configs", "(", ")", ":", "app_config", ".", "ready", "(", ")", "self", ".", "ready", "=", "True", "self", ".", "ready_event", ".", "set", "(", ")" ]
[ 60, 4 ]
[ 124, 34 ]
python
en
['en', 'error', 'th']
False
Apps.check_apps_ready
(self)
Raise an exception if all apps haven't been imported yet.
Raise an exception if all apps haven't been imported yet.
def check_apps_ready(self): """Raise an exception if all apps haven't been imported yet.""" if not self.apps_ready: from django.conf import settings # If "not ready" is due to unconfigured settings, accessing # INSTALLED_APPS raises a more helpful ImproperlyConfigured # exception. settings.INSTALLED_APPS raise AppRegistryNotReady("Apps aren't loaded yet.")
[ "def", "check_apps_ready", "(", "self", ")", ":", "if", "not", "self", ".", "apps_ready", ":", "from", "django", ".", "conf", "import", "settings", "# If \"not ready\" is due to unconfigured settings, accessing", "# INSTALLED_APPS raises a more helpful ImproperlyConfigured", "# exception.", "settings", ".", "INSTALLED_APPS", "raise", "AppRegistryNotReady", "(", "\"Apps aren't loaded yet.\"", ")" ]
[ 126, 4 ]
[ 135, 64 ]
python
en
['en', 'en', 'en']
True
Apps.check_models_ready
(self)
Raise an exception if all models haven't been imported yet.
Raise an exception if all models haven't been imported yet.
def check_models_ready(self): """Raise an exception if all models haven't been imported yet.""" if not self.models_ready: raise AppRegistryNotReady("Models aren't loaded yet.")
[ "def", "check_models_ready", "(", "self", ")", ":", "if", "not", "self", ".", "models_ready", ":", "raise", "AppRegistryNotReady", "(", "\"Models aren't loaded yet.\"", ")" ]
[ 137, 4 ]
[ 140, 66 ]
python
en
['en', 'en', 'en']
True
Apps.get_app_configs
(self)
Import applications and return an iterable of app configs.
Import applications and return an iterable of app configs.
def get_app_configs(self): """Import applications and return an iterable of app configs.""" self.check_apps_ready() return self.app_configs.values()
[ "def", "get_app_configs", "(", "self", ")", ":", "self", ".", "check_apps_ready", "(", ")", "return", "self", ".", "app_configs", ".", "values", "(", ")" ]
[ 142, 4 ]
[ 145, 40 ]
python
en
['en', 'en', 'en']
True
Apps.get_app_config
(self, app_label)
Import applications and returns an app config for the given label. Raise LookupError if no application exists with this label.
Import applications and returns an app config for the given label.
def get_app_config(self, app_label): """ Import applications and returns an app config for the given label. Raise LookupError if no application exists with this label. """ self.check_apps_ready() try: return self.app_configs[app_label] except KeyError: message = "No installed app with label '%s'." % app_label for app_config in self.get_app_configs(): if app_config.name == app_label: message += " Did you mean '%s'?" % app_config.label break raise LookupError(message)
[ "def", "get_app_config", "(", "self", ",", "app_label", ")", ":", "self", ".", "check_apps_ready", "(", ")", "try", ":", "return", "self", ".", "app_configs", "[", "app_label", "]", "except", "KeyError", ":", "message", "=", "\"No installed app with label '%s'.\"", "%", "app_label", "for", "app_config", "in", "self", ".", "get_app_configs", "(", ")", ":", "if", "app_config", ".", "name", "==", "app_label", ":", "message", "+=", "\" Did you mean '%s'?\"", "%", "app_config", ".", "label", "break", "raise", "LookupError", "(", "message", ")" ]
[ 147, 4 ]
[ 162, 38 ]
python
en
['en', 'error', 'th']
False
Apps.get_models
(self, include_auto_created=False, include_swapped=False)
Return a list of all installed models. By default, the following models aren't included: - auto-created models for many-to-many relations without an explicit intermediate table, - models that have been swapped out. Set the corresponding keyword argument to True to include such models.
Return a list of all installed models.
def get_models(self, include_auto_created=False, include_swapped=False): """ Return a list of all installed models. By default, the following models aren't included: - auto-created models for many-to-many relations without an explicit intermediate table, - models that have been swapped out. Set the corresponding keyword argument to True to include such models. """ self.check_models_ready() result = [] for app_config in self.app_configs.values(): result.extend(app_config.get_models(include_auto_created, include_swapped)) return result
[ "def", "get_models", "(", "self", ",", "include_auto_created", "=", "False", ",", "include_swapped", "=", "False", ")", ":", "self", ".", "check_models_ready", "(", ")", "result", "=", "[", "]", "for", "app_config", "in", "self", ".", "app_configs", ".", "values", "(", ")", ":", "result", ".", "extend", "(", "app_config", ".", "get_models", "(", "include_auto_created", ",", "include_swapped", ")", ")", "return", "result" ]
[ 166, 4 ]
[ 183, 21 ]
python
en
['en', 'error', 'th']
False
Apps.get_model
(self, app_label, model_name=None, require_ready=True)
Return the model matching the given app_label and model_name. As a shortcut, app_label may be in the form <app_label>.<model_name>. model_name is case-insensitive. Raise LookupError if no application exists with this label, or no model exists with this name in the application. Raise ValueError if called with a single argument that doesn't contain exactly one dot.
Return the model matching the given app_label and model_name.
def get_model(self, app_label, model_name=None, require_ready=True): """ Return the model matching the given app_label and model_name. As a shortcut, app_label may be in the form <app_label>.<model_name>. model_name is case-insensitive. Raise LookupError if no application exists with this label, or no model exists with this name in the application. Raise ValueError if called with a single argument that doesn't contain exactly one dot. """ if require_ready: self.check_models_ready() else: self.check_apps_ready() if model_name is None: app_label, model_name = app_label.split('.') app_config = self.get_app_config(app_label) if not require_ready and app_config.models is None: app_config.import_models() return app_config.get_model(model_name, require_ready=require_ready)
[ "def", "get_model", "(", "self", ",", "app_label", ",", "model_name", "=", "None", ",", "require_ready", "=", "True", ")", ":", "if", "require_ready", ":", "self", ".", "check_models_ready", "(", ")", "else", ":", "self", ".", "check_apps_ready", "(", ")", "if", "model_name", "is", "None", ":", "app_label", ",", "model_name", "=", "app_label", ".", "split", "(", "'.'", ")", "app_config", "=", "self", ".", "get_app_config", "(", "app_label", ")", "if", "not", "require_ready", "and", "app_config", ".", "models", "is", "None", ":", "app_config", ".", "import_models", "(", ")", "return", "app_config", ".", "get_model", "(", "model_name", ",", "require_ready", "=", "require_ready", ")" ]
[ 185, 4 ]
[ 210, 76 ]
python
en
['en', 'error', 'th']
False
Apps.is_installed
(self, app_name)
Check whether an application with this name exists in the registry. app_name is the full name of the app e.g. 'django.contrib.admin'.
Check whether an application with this name exists in the registry.
def is_installed(self, app_name): """ Check whether an application with this name exists in the registry. app_name is the full name of the app e.g. 'django.contrib.admin'. """ self.check_apps_ready() return any(ac.name == app_name for ac in self.app_configs.values())
[ "def", "is_installed", "(", "self", ",", "app_name", ")", ":", "self", ".", "check_apps_ready", "(", ")", "return", "any", "(", "ac", ".", "name", "==", "app_name", "for", "ac", "in", "self", ".", "app_configs", ".", "values", "(", ")", ")" ]
[ 234, 4 ]
[ 241, 75 ]
python
en
['en', 'error', 'th']
False
Apps.get_containing_app_config
(self, object_name)
Look for an app config containing a given object. object_name is the dotted Python path to the object. Return the app config for the inner application in case of nesting. Return None if the object isn't in any registered app config.
Look for an app config containing a given object.
def get_containing_app_config(self, object_name): """ Look for an app config containing a given object. object_name is the dotted Python path to the object. Return the app config for the inner application in case of nesting. Return None if the object isn't in any registered app config. """ self.check_apps_ready() candidates = [] for app_config in self.app_configs.values(): if object_name.startswith(app_config.name): subpath = object_name[len(app_config.name):] if subpath == '' or subpath[0] == '.': candidates.append(app_config) if candidates: return sorted(candidates, key=lambda ac: -len(ac.name))[0]
[ "def", "get_containing_app_config", "(", "self", ",", "object_name", ")", ":", "self", ".", "check_apps_ready", "(", ")", "candidates", "=", "[", "]", "for", "app_config", "in", "self", ".", "app_configs", ".", "values", "(", ")", ":", "if", "object_name", ".", "startswith", "(", "app_config", ".", "name", ")", ":", "subpath", "=", "object_name", "[", "len", "(", "app_config", ".", "name", ")", ":", "]", "if", "subpath", "==", "''", "or", "subpath", "[", "0", "]", "==", "'.'", ":", "candidates", ".", "append", "(", "app_config", ")", "if", "candidates", ":", "return", "sorted", "(", "candidates", ",", "key", "=", "lambda", "ac", ":", "-", "len", "(", "ac", ".", "name", ")", ")", "[", "0", "]" ]
[ 243, 4 ]
[ 260, 70 ]
python
en
['en', 'error', 'th']
False
Apps.get_registered_model
(self, app_label, model_name)
Similar to get_model(), but doesn't require that an app exists with the given app_label. It's safe to call this method at import time, even while the registry is being populated.
Similar to get_model(), but doesn't require that an app exists with the given app_label.
def get_registered_model(self, app_label, model_name): """ Similar to get_model(), but doesn't require that an app exists with the given app_label. It's safe to call this method at import time, even while the registry is being populated. """ model = self.all_models[app_label].get(model_name.lower()) if model is None: raise LookupError( "Model '%s.%s' not registered." % (app_label, model_name)) return model
[ "def", "get_registered_model", "(", "self", ",", "app_label", ",", "model_name", ")", ":", "model", "=", "self", ".", "all_models", "[", "app_label", "]", ".", "get", "(", "model_name", ".", "lower", "(", ")", ")", "if", "model", "is", "None", ":", "raise", "LookupError", "(", "\"Model '%s.%s' not registered.\"", "%", "(", "app_label", ",", "model_name", ")", ")", "return", "model" ]
[ 262, 4 ]
[ 274, 20 ]
python
en
['en', 'error', 'th']
False
Apps.get_swappable_settings_name
(self, to_string)
For a given model string (e.g. "auth.User"), return the name of the corresponding settings name if it refers to a swappable model. If the referred model is not swappable, return None. This method is decorated with lru_cache because it's performance critical when it comes to migrations. Since the swappable settings don't change after Django has loaded the settings, there is no reason to get the respective settings attribute over and over again.
For a given model string (e.g. "auth.User"), return the name of the corresponding settings name if it refers to a swappable model. If the referred model is not swappable, return None.
def get_swappable_settings_name(self, to_string): """ For a given model string (e.g. "auth.User"), return the name of the corresponding settings name if it refers to a swappable model. If the referred model is not swappable, return None. This method is decorated with lru_cache because it's performance critical when it comes to migrations. Since the swappable settings don't change after Django has loaded the settings, there is no reason to get the respective settings attribute over and over again. """ for model in self.get_models(include_swapped=True): swapped = model._meta.swapped # Is this model swapped out for the model given by to_string? if swapped and swapped == to_string: return model._meta.swappable # Is this model swappable and the one given by to_string? if model._meta.swappable and model._meta.label == to_string: return model._meta.swappable return None
[ "def", "get_swappable_settings_name", "(", "self", ",", "to_string", ")", ":", "for", "model", "in", "self", ".", "get_models", "(", "include_swapped", "=", "True", ")", ":", "swapped", "=", "model", ".", "_meta", ".", "swapped", "# Is this model swapped out for the model given by to_string?", "if", "swapped", "and", "swapped", "==", "to_string", ":", "return", "model", ".", "_meta", ".", "swappable", "# Is this model swappable and the one given by to_string?", "if", "model", ".", "_meta", ".", "swappable", "and", "model", ".", "_meta", ".", "label", "==", "to_string", ":", "return", "model", ".", "_meta", ".", "swappable", "return", "None" ]
[ 277, 4 ]
[ 296, 19 ]
python
en
['en', 'error', 'th']
False
Apps.set_available_apps
(self, available)
Restrict the set of installed apps used by get_app_config[s]. available must be an iterable of application names. set_available_apps() must be balanced with unset_available_apps(). Primarily used for performance optimization in TransactionTestCase. This method is safe in the sense that it doesn't trigger any imports.
Restrict the set of installed apps used by get_app_config[s].
def set_available_apps(self, available): """ Restrict the set of installed apps used by get_app_config[s]. available must be an iterable of application names. set_available_apps() must be balanced with unset_available_apps(). Primarily used for performance optimization in TransactionTestCase. This method is safe in the sense that it doesn't trigger any imports. """ available = set(available) installed = {app_config.name for app_config in self.get_app_configs()} if not available.issubset(installed): raise ValueError( "Available apps isn't a subset of installed apps, extra apps: %s" % ", ".join(available - installed) ) self.stored_app_configs.append(self.app_configs) self.app_configs = { label: app_config for label, app_config in self.app_configs.items() if app_config.name in available } self.clear_cache()
[ "def", "set_available_apps", "(", "self", ",", "available", ")", ":", "available", "=", "set", "(", "available", ")", "installed", "=", "{", "app_config", ".", "name", "for", "app_config", "in", "self", ".", "get_app_configs", "(", ")", "}", "if", "not", "available", ".", "issubset", "(", "installed", ")", ":", "raise", "ValueError", "(", "\"Available apps isn't a subset of installed apps, extra apps: %s\"", "%", "\", \"", ".", "join", "(", "available", "-", "installed", ")", ")", "self", ".", "stored_app_configs", ".", "append", "(", "self", ".", "app_configs", ")", "self", ".", "app_configs", "=", "{", "label", ":", "app_config", "for", "label", ",", "app_config", "in", "self", ".", "app_configs", ".", "items", "(", ")", "if", "app_config", ".", "name", "in", "available", "}", "self", ".", "clear_cache", "(", ")" ]
[ 298, 4 ]
[ 324, 26 ]
python
en
['en', 'error', 'th']
False
Apps.unset_available_apps
(self)
Cancel a previous call to set_available_apps().
Cancel a previous call to set_available_apps().
def unset_available_apps(self): """Cancel a previous call to set_available_apps().""" self.app_configs = self.stored_app_configs.pop() self.clear_cache()
[ "def", "unset_available_apps", "(", "self", ")", ":", "self", ".", "app_configs", "=", "self", ".", "stored_app_configs", ".", "pop", "(", ")", "self", ".", "clear_cache", "(", ")" ]
[ 326, 4 ]
[ 329, 26 ]
python
en
['en', 'en', 'en']
True
Apps.set_installed_apps
(self, installed)
Enable a different set of installed apps for get_app_config[s]. installed must be an iterable in the same format as INSTALLED_APPS. set_installed_apps() must be balanced with unset_installed_apps(), even if it exits with an exception. Primarily used as a receiver of the setting_changed signal in tests. This method may trigger new imports, which may add new models to the registry of all imported models. They will stay in the registry even after unset_installed_apps(). Since it isn't possible to replay imports safely (e.g. that could lead to registering listeners twice), models are registered when they're imported and never removed.
Enable a different set of installed apps for get_app_config[s].
def set_installed_apps(self, installed): """ Enable a different set of installed apps for get_app_config[s]. installed must be an iterable in the same format as INSTALLED_APPS. set_installed_apps() must be balanced with unset_installed_apps(), even if it exits with an exception. Primarily used as a receiver of the setting_changed signal in tests. This method may trigger new imports, which may add new models to the registry of all imported models. They will stay in the registry even after unset_installed_apps(). Since it isn't possible to replay imports safely (e.g. that could lead to registering listeners twice), models are registered when they're imported and never removed. """ if not self.ready: raise AppRegistryNotReady("App registry isn't ready yet.") self.stored_app_configs.append(self.app_configs) self.app_configs = {} self.apps_ready = self.models_ready = self.loading = self.ready = False self.clear_cache() self.populate(installed)
[ "def", "set_installed_apps", "(", "self", ",", "installed", ")", ":", "if", "not", "self", ".", "ready", ":", "raise", "AppRegistryNotReady", "(", "\"App registry isn't ready yet.\"", ")", "self", ".", "stored_app_configs", ".", "append", "(", "self", ".", "app_configs", ")", "self", ".", "app_configs", "=", "{", "}", "self", ".", "apps_ready", "=", "self", ".", "models_ready", "=", "self", ".", "loading", "=", "self", ".", "ready", "=", "False", "self", ".", "clear_cache", "(", ")", "self", ".", "populate", "(", "installed", ")" ]
[ 331, 4 ]
[ 354, 32 ]
python
en
['en', 'error', 'th']
False
Apps.unset_installed_apps
(self)
Cancel a previous call to set_installed_apps().
Cancel a previous call to set_installed_apps().
def unset_installed_apps(self): """Cancel a previous call to set_installed_apps().""" self.app_configs = self.stored_app_configs.pop() self.apps_ready = self.models_ready = self.ready = True self.clear_cache()
[ "def", "unset_installed_apps", "(", "self", ")", ":", "self", ".", "app_configs", "=", "self", ".", "stored_app_configs", ".", "pop", "(", ")", "self", ".", "apps_ready", "=", "self", ".", "models_ready", "=", "self", ".", "ready", "=", "True", "self", ".", "clear_cache", "(", ")" ]
[ 356, 4 ]
[ 360, 26 ]
python
en
['en', 'en', 'en']
True
Apps.clear_cache
(self)
Clear all internal caches, for methods that alter the app registry. This is mostly used in tests.
Clear all internal caches, for methods that alter the app registry.
def clear_cache(self): """ Clear all internal caches, for methods that alter the app registry. This is mostly used in tests. """ # Call expire cache on each model. This will purge # the relation tree and the fields cache. self.get_models.cache_clear() if self.ready: # Circumvent self.get_models() to prevent that the cache is refilled. # This particularly prevents that an empty value is cached while cloning. for app_config in self.app_configs.values(): for model in app_config.get_models(include_auto_created=True): model._meta._expire_cache()
[ "def", "clear_cache", "(", "self", ")", ":", "# Call expire cache on each model. This will purge", "# the relation tree and the fields cache.", "self", ".", "get_models", ".", "cache_clear", "(", ")", "if", "self", ".", "ready", ":", "# Circumvent self.get_models() to prevent that the cache is refilled.", "# This particularly prevents that an empty value is cached while cloning.", "for", "app_config", "in", "self", ".", "app_configs", ".", "values", "(", ")", ":", "for", "model", "in", "app_config", ".", "get_models", "(", "include_auto_created", "=", "True", ")", ":", "model", ".", "_meta", ".", "_expire_cache", "(", ")" ]
[ 362, 4 ]
[ 376, 47 ]
python
en
['en', 'error', 'th']
False
Apps.lazy_model_operation
(self, function, *model_keys)
Take a function and a number of ("app_label", "modelname") tuples, and when all the corresponding models have been imported and registered, call the function with the model classes as its arguments. The function passed to this method must accept exactly n models as arguments, where n=len(model_keys).
Take a function and a number of ("app_label", "modelname") tuples, and when all the corresponding models have been imported and registered, call the function with the model classes as its arguments.
def lazy_model_operation(self, function, *model_keys): """ Take a function and a number of ("app_label", "modelname") tuples, and when all the corresponding models have been imported and registered, call the function with the model classes as its arguments. The function passed to this method must accept exactly n models as arguments, where n=len(model_keys). """ # Base case: no arguments, just execute the function. if not model_keys: function() # Recursive case: take the head of model_keys, wait for the # corresponding model class to be imported and registered, then apply # that argument to the supplied function. Pass the resulting partial # to lazy_model_operation() along with the remaining model args and # repeat until all models are loaded and all arguments are applied. else: next_model, *more_models = model_keys # This will be executed after the class corresponding to next_model # has been imported and registered. The `func` attribute provides # duck-type compatibility with partials. def apply_next_model(model): next_function = partial(apply_next_model.func, model) self.lazy_model_operation(next_function, *more_models) apply_next_model.func = function # If the model has already been imported and registered, partially # apply it to the function now. If not, add it to the list of # pending operations for the model, where it will be executed with # the model class as its sole argument once the model is ready. try: model_class = self.get_registered_model(*next_model) except LookupError: self._pending_operations[next_model].append(apply_next_model) else: apply_next_model(model_class)
[ "def", "lazy_model_operation", "(", "self", ",", "function", ",", "*", "model_keys", ")", ":", "# Base case: no arguments, just execute the function.", "if", "not", "model_keys", ":", "function", "(", ")", "# Recursive case: take the head of model_keys, wait for the", "# corresponding model class to be imported and registered, then apply", "# that argument to the supplied function. Pass the resulting partial", "# to lazy_model_operation() along with the remaining model args and", "# repeat until all models are loaded and all arguments are applied.", "else", ":", "next_model", ",", "", "*", "more_models", "=", "model_keys", "# This will be executed after the class corresponding to next_model", "# has been imported and registered. The `func` attribute provides", "# duck-type compatibility with partials.", "def", "apply_next_model", "(", "model", ")", ":", "next_function", "=", "partial", "(", "apply_next_model", ".", "func", ",", "model", ")", "self", ".", "lazy_model_operation", "(", "next_function", ",", "*", "more_models", ")", "apply_next_model", ".", "func", "=", "function", "# If the model has already been imported and registered, partially", "# apply it to the function now. If not, add it to the list of", "# pending operations for the model, where it will be executed with", "# the model class as its sole argument once the model is ready.", "try", ":", "model_class", "=", "self", ".", "get_registered_model", "(", "*", "next_model", ")", "except", "LookupError", ":", "self", ".", "_pending_operations", "[", "next_model", "]", ".", "append", "(", "apply_next_model", ")", "else", ":", "apply_next_model", "(", "model_class", ")" ]
[ 378, 4 ]
[ 415, 45 ]
python
en
['en', 'error', 'th']
False
Apps.do_pending_operations
(self, model)
Take a newly-prepared model and pass it to each function waiting for it. This is called at the very end of Apps.register_model().
Take a newly-prepared model and pass it to each function waiting for it. This is called at the very end of Apps.register_model().
def do_pending_operations(self, model): """ Take a newly-prepared model and pass it to each function waiting for it. This is called at the very end of Apps.register_model(). """ key = model._meta.app_label, model._meta.model_name for function in self._pending_operations.pop(key, []): function(model)
[ "def", "do_pending_operations", "(", "self", ",", "model", ")", ":", "key", "=", "model", ".", "_meta", ".", "app_label", ",", "model", ".", "_meta", ".", "model_name", "for", "function", "in", "self", ".", "_pending_operations", ".", "pop", "(", "key", ",", "[", "]", ")", ":", "function", "(", "model", ")" ]
[ 417, 4 ]
[ 424, 27 ]
python
en
['en', 'error', 'th']
False
staticfiles_urlpatterns
(prefix=None)
Helper function to return a URL pattern for serving static files.
Helper function to return a URL pattern for serving static files.
def staticfiles_urlpatterns(prefix=None): """ Helper function to return a URL pattern for serving static files. """ if prefix is None: prefix = settings.STATIC_URL return static(prefix, view=serve)
[ "def", "staticfiles_urlpatterns", "(", "prefix", "=", "None", ")", ":", "if", "prefix", "is", "None", ":", "prefix", "=", "settings", ".", "STATIC_URL", "return", "static", "(", "prefix", ",", "view", "=", "serve", ")" ]
[ 7, 0 ]
[ 13, 37 ]
python
en
['en', 'error', 'th']
False
bytes2int
(raw_bytes)
r"""Converts a list of bytes or an 8-bit string to an integer. When using unicode strings, encode it to some encoding like UTF8 first. >>> (((128 * 256) + 64) * 256) + 15 8405007 >>> bytes2int(b'\x80@\x0f') 8405007
r"""Converts a list of bytes or an 8-bit string to an integer.
def bytes2int(raw_bytes): r"""Converts a list of bytes or an 8-bit string to an integer. When using unicode strings, encode it to some encoding like UTF8 first. >>> (((128 * 256) + 64) * 256) + 15 8405007 >>> bytes2int(b'\x80@\x0f') 8405007 """ return int(binascii.hexlify(raw_bytes), 16)
[ "def", "bytes2int", "(", "raw_bytes", ")", ":", "return", "int", "(", "binascii", ".", "hexlify", "(", "raw_bytes", ")", ",", "16", ")" ]
[ 30, 0 ]
[ 42, 47 ]
python
en
['en', 'en', 'en']
True
_int2bytes
(number, block_size=None)
r"""Converts a number to a string of bytes. Usage:: >>> _int2bytes(123456789) b'\x07[\xcd\x15' >>> bytes2int(_int2bytes(123456789)) 123456789 >>> _int2bytes(123456789, 6) b'\x00\x00\x07[\xcd\x15' >>> bytes2int(_int2bytes(123456789, 128)) 123456789 >>> _int2bytes(123456789, 3) Traceback (most recent call last): ... OverflowError: Needed 4 bytes for number, but block size is 3 @param number: the number to convert @param block_size: the number of bytes to output. If the number encoded to bytes is less than this, the block will be zero-padded. When not given, the returned block is not padded. @throws OverflowError when block_size is given and the number takes up more bytes than fit into the block.
r"""Converts a number to a string of bytes.
def _int2bytes(number, block_size=None): r"""Converts a number to a string of bytes. Usage:: >>> _int2bytes(123456789) b'\x07[\xcd\x15' >>> bytes2int(_int2bytes(123456789)) 123456789 >>> _int2bytes(123456789, 6) b'\x00\x00\x07[\xcd\x15' >>> bytes2int(_int2bytes(123456789, 128)) 123456789 >>> _int2bytes(123456789, 3) Traceback (most recent call last): ... OverflowError: Needed 4 bytes for number, but block size is 3 @param number: the number to convert @param block_size: the number of bytes to output. If the number encoded to bytes is less than this, the block will be zero-padded. When not given, the returned block is not padded. @throws OverflowError when block_size is given and the number takes up more bytes than fit into the block. """ # Type checking if not is_integer(number): raise TypeError("You must pass an integer for 'number', not %s" % number.__class__) if number < 0: raise ValueError('Negative numbers cannot be used: %i' % number) # Do some bounds checking if number == 0: needed_bytes = 1 raw_bytes = [b'\x00'] else: needed_bytes = common.byte_size(number) raw_bytes = [] # You cannot compare None > 0 in Python 3x. It will fail with a TypeError. if block_size and block_size > 0: if needed_bytes > block_size: raise OverflowError('Needed %i bytes for number, but block size ' 'is %i' % (needed_bytes, block_size)) # Convert the number to bytes. while number > 0: raw_bytes.insert(0, byte(number & 0xFF)) number >>= 8 # Pad with zeroes to fill the block if block_size and block_size > 0: padding = (block_size - needed_bytes) * b'\x00' else: padding = b'' return padding + b''.join(raw_bytes)
[ "def", "_int2bytes", "(", "number", ",", "block_size", "=", "None", ")", ":", "# Type checking", "if", "not", "is_integer", "(", "number", ")", ":", "raise", "TypeError", "(", "\"You must pass an integer for 'number', not %s\"", "%", "number", ".", "__class__", ")", "if", "number", "<", "0", ":", "raise", "ValueError", "(", "'Negative numbers cannot be used: %i'", "%", "number", ")", "# Do some bounds checking", "if", "number", "==", "0", ":", "needed_bytes", "=", "1", "raw_bytes", "=", "[", "b'\\x00'", "]", "else", ":", "needed_bytes", "=", "common", ".", "byte_size", "(", "number", ")", "raw_bytes", "=", "[", "]", "# You cannot compare None > 0 in Python 3x. It will fail with a TypeError.", "if", "block_size", "and", "block_size", ">", "0", ":", "if", "needed_bytes", ">", "block_size", ":", "raise", "OverflowError", "(", "'Needed %i bytes for number, but block size '", "'is %i'", "%", "(", "needed_bytes", ",", "block_size", ")", ")", "# Convert the number to bytes.", "while", "number", ">", "0", ":", "raw_bytes", ".", "insert", "(", "0", ",", "byte", "(", "number", "&", "0xFF", ")", ")", "number", ">>=", "8", "# Pad with zeroes to fill the block", "if", "block_size", "and", "block_size", ">", "0", ":", "padding", "=", "(", "block_size", "-", "needed_bytes", ")", "*", "b'\\x00'", "else", ":", "padding", "=", "b''", "return", "padding", "+", "b''", ".", "join", "(", "raw_bytes", ")" ]
[ 45, 0 ]
[ 107, 40 ]
python
en
['en', 'en', 'en']
True
bytes_leading
(raw_bytes, needle=b'\x00')
Finds the number of prefixed byte occurrences in the haystack. Useful when you want to deal with padding. :param raw_bytes: Raw bytes. :param needle: The byte to count. Default \x00. :returns: The number of leading needle bytes.
Finds the number of prefixed byte occurrences in the haystack.
def bytes_leading(raw_bytes, needle=b'\x00'): """ Finds the number of prefixed byte occurrences in the haystack. Useful when you want to deal with padding. :param raw_bytes: Raw bytes. :param needle: The byte to count. Default \x00. :returns: The number of leading needle bytes. """ leading = 0 # Indexing keeps compatibility between Python 2.x and Python 3.x _byte = needle[0] for x in raw_bytes: if x == _byte: leading += 1 else: break return leading
[ "def", "bytes_leading", "(", "raw_bytes", ",", "needle", "=", "b'\\x00'", ")", ":", "leading", "=", "0", "# Indexing keeps compatibility between Python 2.x and Python 3.x", "_byte", "=", "needle", "[", "0", "]", "for", "x", "in", "raw_bytes", ":", "if", "x", "==", "_byte", ":", "leading", "+=", "1", "else", ":", "break", "return", "leading" ]
[ 110, 0 ]
[ 132, 18 ]
python
en
['en', 'error', 'th']
False
int2bytes
(number, fill_size=None, chunk_size=None, overflow=False)
Convert an unsigned integer to bytes (base-256 representation):: Does not preserve leading zeros if you don't specify a chunk size or fill size. .. NOTE: You must not specify both fill_size and chunk_size. Only one of them is allowed. :param number: Integer value :param fill_size: If the optional fill size is given the length of the resulting byte string is expected to be the fill size and will be padded with prefix zero bytes to satisfy that length. :param chunk_size: If optional chunk size is given and greater than zero, pad the front of the byte string with binary zeros so that the length is a multiple of ``chunk_size``. :param overflow: ``False`` (default). If this is ``True``, no ``OverflowError`` will be raised when the fill_size is shorter than the length of the generated byte sequence. Instead the byte sequence will be returned as is. :returns: Raw bytes (base-256 representation). :raises: ``OverflowError`` when fill_size is given and the number takes up more bytes than fit into the block. This requires the ``overflow`` argument to this function to be set to ``False`` otherwise, no error will be raised.
Convert an unsigned integer to bytes (base-256 representation)::
def int2bytes(number, fill_size=None, chunk_size=None, overflow=False): """ Convert an unsigned integer to bytes (base-256 representation):: Does not preserve leading zeros if you don't specify a chunk size or fill size. .. NOTE: You must not specify both fill_size and chunk_size. Only one of them is allowed. :param number: Integer value :param fill_size: If the optional fill size is given the length of the resulting byte string is expected to be the fill size and will be padded with prefix zero bytes to satisfy that length. :param chunk_size: If optional chunk size is given and greater than zero, pad the front of the byte string with binary zeros so that the length is a multiple of ``chunk_size``. :param overflow: ``False`` (default). If this is ``True``, no ``OverflowError`` will be raised when the fill_size is shorter than the length of the generated byte sequence. Instead the byte sequence will be returned as is. :returns: Raw bytes (base-256 representation). :raises: ``OverflowError`` when fill_size is given and the number takes up more bytes than fit into the block. This requires the ``overflow`` argument to this function to be set to ``False`` otherwise, no error will be raised. """ if number < 0: raise ValueError("Number must be an unsigned integer: %d" % number) if fill_size and chunk_size: raise ValueError("You can either fill or pad chunks, but not both") # Ensure these are integers. number & 1 raw_bytes = b'' # Pack the integer one machine word at a time into bytes. num = number word_bits, _, max_uint, pack_type = machine_size.get_word_alignment(num) pack_format = ">%s" % pack_type while num > 0: raw_bytes = pack(pack_format, num & max_uint) + raw_bytes num >>= word_bits # Obtain the index of the first non-zero byte. zero_leading = bytes_leading(raw_bytes) if number == 0: raw_bytes = b'\x00' # De-padding. raw_bytes = raw_bytes[zero_leading:] length = len(raw_bytes) if fill_size and fill_size > 0: if not overflow and length > fill_size: raise OverflowError( "Need %d bytes for number, but fill size is %d" % (length, fill_size) ) raw_bytes = raw_bytes.rjust(fill_size, b'\x00') elif chunk_size and chunk_size > 0: remainder = length % chunk_size if remainder: padding_size = chunk_size - remainder raw_bytes = raw_bytes.rjust(length + padding_size, b'\x00') return raw_bytes
[ "def", "int2bytes", "(", "number", ",", "fill_size", "=", "None", ",", "chunk_size", "=", "None", ",", "overflow", "=", "False", ")", ":", "if", "number", "<", "0", ":", "raise", "ValueError", "(", "\"Number must be an unsigned integer: %d\"", "%", "number", ")", "if", "fill_size", "and", "chunk_size", ":", "raise", "ValueError", "(", "\"You can either fill or pad chunks, but not both\"", ")", "# Ensure these are integers.", "number", "&", "1", "raw_bytes", "=", "b''", "# Pack the integer one machine word at a time into bytes.", "num", "=", "number", "word_bits", ",", "_", ",", "max_uint", ",", "pack_type", "=", "machine_size", ".", "get_word_alignment", "(", "num", ")", "pack_format", "=", "\">%s\"", "%", "pack_type", "while", "num", ">", "0", ":", "raw_bytes", "=", "pack", "(", "pack_format", ",", "num", "&", "max_uint", ")", "+", "raw_bytes", "num", ">>=", "word_bits", "# Obtain the index of the first non-zero byte.", "zero_leading", "=", "bytes_leading", "(", "raw_bytes", ")", "if", "number", "==", "0", ":", "raw_bytes", "=", "b'\\x00'", "# De-padding.", "raw_bytes", "=", "raw_bytes", "[", "zero_leading", ":", "]", "length", "=", "len", "(", "raw_bytes", ")", "if", "fill_size", "and", "fill_size", ">", "0", ":", "if", "not", "overflow", "and", "length", ">", "fill_size", ":", "raise", "OverflowError", "(", "\"Need %d bytes for number, but fill size is %d\"", "%", "(", "length", ",", "fill_size", ")", ")", "raw_bytes", "=", "raw_bytes", ".", "rjust", "(", "fill_size", ",", "b'\\x00'", ")", "elif", "chunk_size", "and", "chunk_size", ">", "0", ":", "remainder", "=", "length", "%", "chunk_size", "if", "remainder", ":", "padding_size", "=", "chunk_size", "-", "remainder", "raw_bytes", "=", "raw_bytes", ".", "rjust", "(", "length", "+", "padding_size", ",", "b'\\x00'", ")", "return", "raw_bytes" ]
[ 135, 0 ]
[ 208, 20 ]
python
en
['en', 'error', 'th']
False
getimage
(photo)
Copies the contents of a PhotoImage to a PIL image memory.
Copies the contents of a PhotoImage to a PIL image memory.
def getimage(photo): """Copies the contents of a PhotoImage to a PIL image memory.""" im = Image.new("RGBA", (photo.width(), photo.height())) block = im.im photo.tk.call("PyImagingPhotoGet", photo, block.id) return im
[ "def", "getimage", "(", "photo", ")", ":", "im", "=", "Image", ".", "new", "(", "\"RGBA\"", ",", "(", "photo", ".", "width", "(", ")", ",", "photo", ".", "height", "(", ")", ")", ")", "block", "=", "im", ".", "im", "photo", ".", "tk", ".", "call", "(", "\"PyImagingPhotoGet\"", ",", "photo", ",", "block", ".", "id", ")", "return", "im" ]
[ 273, 0 ]
[ 280, 13 ]
python
en
['en', 'en', 'en']
True
_show
(image, title)
Helper for the Image.show method.
Helper for the Image.show method.
def _show(image, title): """Helper for the Image.show method.""" class UI(tkinter.Label): def __init__(self, master, im): if im.mode == "1": self.image = BitmapImage(im, foreground="white", master=master) else: self.image = PhotoImage(im, master=master) super().__init__(master, image=self.image, bg="black", bd=0) if not tkinter._default_root: raise OSError("tkinter not initialized") top = tkinter.Toplevel() if title: top.title(title) UI(top, image).pack()
[ "def", "_show", "(", "image", ",", "title", ")", ":", "class", "UI", "(", "tkinter", ".", "Label", ")", ":", "def", "__init__", "(", "self", ",", "master", ",", "im", ")", ":", "if", "im", ".", "mode", "==", "\"1\"", ":", "self", ".", "image", "=", "BitmapImage", "(", "im", ",", "foreground", "=", "\"white\"", ",", "master", "=", "master", ")", "else", ":", "self", ".", "image", "=", "PhotoImage", "(", "im", ",", "master", "=", "master", ")", "super", "(", ")", ".", "__init__", "(", "master", ",", "image", "=", "self", ".", "image", ",", "bg", "=", "\"black\"", ",", "bd", "=", "0", ")", "if", "not", "tkinter", ".", "_default_root", ":", "raise", "OSError", "(", "\"tkinter not initialized\"", ")", "top", "=", "tkinter", ".", "Toplevel", "(", ")", "if", "title", ":", "top", ".", "title", "(", "title", ")", "UI", "(", "top", ",", "image", ")", ".", "pack", "(", ")" ]
[ 283, 0 ]
[ 299, 25 ]
python
en
['en', 'en', 'en']
True
PhotoImage.__str__
(self)
Get the Tkinter photo image identifier. This method is automatically called by Tkinter whenever a PhotoImage object is passed to a Tkinter method. :return: A Tkinter photo image identifier (a string).
Get the Tkinter photo image identifier. This method is automatically called by Tkinter whenever a PhotoImage object is passed to a Tkinter method.
def __str__(self): """ Get the Tkinter photo image identifier. This method is automatically called by Tkinter whenever a PhotoImage object is passed to a Tkinter method. :return: A Tkinter photo image identifier (a string). """ return str(self.__photo)
[ "def", "__str__", "(", "self", ")", ":", "return", "str", "(", "self", ".", "__photo", ")" ]
[ 124, 4 ]
[ 132, 32 ]
python
en
['en', 'error', 'th']
False
PhotoImage.width
(self)
Get the width of the image. :return: The width, in pixels.
Get the width of the image.
def width(self): """ Get the width of the image. :return: The width, in pixels. """ return self.__size[0]
[ "def", "width", "(", "self", ")", ":", "return", "self", ".", "__size", "[", "0", "]" ]
[ 134, 4 ]
[ 140, 29 ]
python
en
['en', 'error', 'th']
False
PhotoImage.height
(self)
Get the height of the image. :return: The height, in pixels.
Get the height of the image.
def height(self): """ Get the height of the image. :return: The height, in pixels. """ return self.__size[1]
[ "def", "height", "(", "self", ")", ":", "return", "self", ".", "__size", "[", "1", "]" ]
[ 142, 4 ]
[ 148, 29 ]
python
en
['en', 'error', 'th']
False
PhotoImage.paste
(self, im, box=None)
Paste a PIL image into the photo image. Note that this can be very slow if the photo image is displayed. :param im: A PIL image. The size must match the target region. If the mode does not match, the image is converted to the mode of the bitmap image. :param box: A 4-tuple defining the left, upper, right, and lower pixel coordinate. See :ref:`coordinate-system`. If None is given instead of a tuple, all of the image is assumed.
Paste a PIL image into the photo image. Note that this can be very slow if the photo image is displayed.
def paste(self, im, box=None): """ Paste a PIL image into the photo image. Note that this can be very slow if the photo image is displayed. :param im: A PIL image. The size must match the target region. If the mode does not match, the image is converted to the mode of the bitmap image. :param box: A 4-tuple defining the left, upper, right, and lower pixel coordinate. See :ref:`coordinate-system`. If None is given instead of a tuple, all of the image is assumed. """ # convert to blittable im.load() image = im.im if image.isblock() and im.mode == self.__mode: block = image else: block = image.new_block(self.__mode, im.size) image.convert2(block, image) # convert directly between buffers tk = self.__photo.tk try: tk.call("PyImagingPhoto", self.__photo, block.id) except tkinter.TclError: # activate Tkinter hook try: from . import _imagingtk try: if hasattr(tk, "interp"): # Required for PyPy, which always has CFFI installed from cffi import FFI ffi = FFI() # PyPy is using an FFI CDATA element # (Pdb) self.tk.interp # <cdata 'Tcl_Interp *' 0x3061b50> _imagingtk.tkinit(int(ffi.cast("uintptr_t", tk.interp)), 1) else: _imagingtk.tkinit(tk.interpaddr(), 1) except AttributeError: _imagingtk.tkinit(id(tk), 0) tk.call("PyImagingPhoto", self.__photo, block.id) except (ImportError, AttributeError, tkinter.TclError): raise
[ "def", "paste", "(", "self", ",", "im", ",", "box", "=", "None", ")", ":", "# convert to blittable", "im", ".", "load", "(", ")", "image", "=", "im", ".", "im", "if", "image", ".", "isblock", "(", ")", "and", "im", ".", "mode", "==", "self", ".", "__mode", ":", "block", "=", "image", "else", ":", "block", "=", "image", ".", "new_block", "(", "self", ".", "__mode", ",", "im", ".", "size", ")", "image", ".", "convert2", "(", "block", ",", "image", ")", "# convert directly between buffers", "tk", "=", "self", ".", "__photo", ".", "tk", "try", ":", "tk", ".", "call", "(", "\"PyImagingPhoto\"", ",", "self", ".", "__photo", ",", "block", ".", "id", ")", "except", "tkinter", ".", "TclError", ":", "# activate Tkinter hook", "try", ":", "from", ".", "import", "_imagingtk", "try", ":", "if", "hasattr", "(", "tk", ",", "\"interp\"", ")", ":", "# Required for PyPy, which always has CFFI installed", "from", "cffi", "import", "FFI", "ffi", "=", "FFI", "(", ")", "# PyPy is using an FFI CDATA element", "# (Pdb) self.tk.interp", "# <cdata 'Tcl_Interp *' 0x3061b50>", "_imagingtk", ".", "tkinit", "(", "int", "(", "ffi", ".", "cast", "(", "\"uintptr_t\"", ",", "tk", ".", "interp", ")", ")", ",", "1", ")", "else", ":", "_imagingtk", ".", "tkinit", "(", "tk", ".", "interpaddr", "(", ")", ",", "1", ")", "except", "AttributeError", ":", "_imagingtk", ".", "tkinit", "(", "id", "(", "tk", ")", ",", "0", ")", "tk", ".", "call", "(", "\"PyImagingPhoto\"", ",", "self", ".", "__photo", ",", "block", ".", "id", ")", "except", "(", "ImportError", ",", "AttributeError", ",", "tkinter", ".", "TclError", ")", ":", "raise" ]
[ 150, 4 ]
[ 198, 21 ]
python
en
['en', 'error', 'th']
False
BitmapImage.width
(self)
Get the width of the image. :return: The width, in pixels.
Get the width of the image.
def width(self): """ Get the width of the image. :return: The width, in pixels. """ return self.__size[0]
[ "def", "width", "(", "self", ")", ":", "return", "self", ".", "__size", "[", "0", "]" ]
[ 246, 4 ]
[ 252, 29 ]
python
en
['en', 'error', 'th']
False
BitmapImage.height
(self)
Get the height of the image. :return: The height, in pixels.
Get the height of the image.
def height(self): """ Get the height of the image. :return: The height, in pixels. """ return self.__size[1]
[ "def", "height", "(", "self", ")", ":", "return", "self", ".", "__size", "[", "1", "]" ]
[ 254, 4 ]
[ 260, 29 ]
python
en
['en', 'error', 'th']
False
BitmapImage.__str__
(self)
Get the Tkinter bitmap image identifier. This method is automatically called by Tkinter whenever a BitmapImage object is passed to a Tkinter method. :return: A Tkinter bitmap image identifier (a string).
Get the Tkinter bitmap image identifier. This method is automatically called by Tkinter whenever a BitmapImage object is passed to a Tkinter method.
def __str__(self): """ Get the Tkinter bitmap image identifier. This method is automatically called by Tkinter whenever a BitmapImage object is passed to a Tkinter method. :return: A Tkinter bitmap image identifier (a string). """ return str(self.__photo)
[ "def", "__str__", "(", "self", ")", ":", "return", "str", "(", "self", ".", "__photo", ")" ]
[ 262, 4 ]
[ 270, 32 ]
python
en
['en', 'error', 'th']
False
_date_from_string
(year, year_format, month='', month_format='', day='', day_format='', delim='__')
Get a datetime.date object given a format string and a year, month, and day (only year is mandatory). Raise a 404 for an invalid date.
Get a datetime.date object given a format string and a year, month, and day (only year is mandatory). Raise a 404 for an invalid date.
def _date_from_string(year, year_format, month='', month_format='', day='', day_format='', delim='__'): """ Get a datetime.date object given a format string and a year, month, and day (only year is mandatory). Raise a 404 for an invalid date. """ format = year_format + delim + month_format + delim + day_format datestr = str(year) + delim + str(month) + delim + str(day) try: return datetime.datetime.strptime(datestr, format).date() except ValueError: raise Http404(_('Invalid date string “%(datestr)s” given format “%(format)s”') % { 'datestr': datestr, 'format': format, })
[ "def", "_date_from_string", "(", "year", ",", "year_format", ",", "month", "=", "''", ",", "month_format", "=", "''", ",", "day", "=", "''", ",", "day_format", "=", "''", ",", "delim", "=", "'__'", ")", ":", "format", "=", "year_format", "+", "delim", "+", "month_format", "+", "delim", "+", "day_format", "datestr", "=", "str", "(", "year", ")", "+", "delim", "+", "str", "(", "month", ")", "+", "delim", "+", "str", "(", "day", ")", "try", ":", "return", "datetime", ".", "datetime", ".", "strptime", "(", "datestr", ",", "format", ")", ".", "date", "(", ")", "except", "ValueError", ":", "raise", "Http404", "(", "_", "(", "'Invalid date string “%(datestr)s” given format “%(format)s”') % {", "", "", "", "'datestr'", ":", "datestr", ",", "'format'", ":", "format", ",", "}", ")" ]
[ 617, 0 ]
[ 630, 10 ]
python
en
['en', 'error', 'th']
False
_get_next_prev
(generic_view, date, is_previous, period)
Get the next or the previous valid date. The idea is to allow links on month/day views to never be 404s by never providing a date that'll be invalid for the given view. This is a bit complicated since it handles different intervals of time, hence the coupling to generic_view. However in essence the logic comes down to: * If allow_empty and allow_future are both true, this is easy: just return the naive result (just the next/previous day/week/month, regardless of object existence.) * If allow_empty is true, allow_future is false, and the naive result isn't in the future, then return it; otherwise return None. * If allow_empty is false and allow_future is true, return the next date *that contains a valid object*, even if it's in the future. If there are no next objects, return None. * If allow_empty is false and allow_future is false, return the next date that contains a valid object. If that date is in the future, or if there are no next objects, return None.
Get the next or the previous valid date. The idea is to allow links on month/day views to never be 404s by never providing a date that'll be invalid for the given view.
def _get_next_prev(generic_view, date, is_previous, period): """ Get the next or the previous valid date. The idea is to allow links on month/day views to never be 404s by never providing a date that'll be invalid for the given view. This is a bit complicated since it handles different intervals of time, hence the coupling to generic_view. However in essence the logic comes down to: * If allow_empty and allow_future are both true, this is easy: just return the naive result (just the next/previous day/week/month, regardless of object existence.) * If allow_empty is true, allow_future is false, and the naive result isn't in the future, then return it; otherwise return None. * If allow_empty is false and allow_future is true, return the next date *that contains a valid object*, even if it's in the future. If there are no next objects, return None. * If allow_empty is false and allow_future is false, return the next date that contains a valid object. If that date is in the future, or if there are no next objects, return None. """ date_field = generic_view.get_date_field() allow_empty = generic_view.get_allow_empty() allow_future = generic_view.get_allow_future() get_current = getattr(generic_view, '_get_current_%s' % period) get_next = getattr(generic_view, '_get_next_%s' % period) # Bounds of the current interval start, end = get_current(date), get_next(date) # If allow_empty is True, the naive result will be valid if allow_empty: if is_previous: result = get_current(start - datetime.timedelta(days=1)) else: result = end if allow_future or result <= timezone_today(): return result else: return None # Otherwise, we'll need to go to the database to look for an object # whose date_field is at least (greater than/less than) the given # naive result else: # Construct a lookup and an ordering depending on whether we're doing # a previous date or a next date lookup. if is_previous: lookup = {'%s__lt' % date_field: generic_view._make_date_lookup_arg(start)} ordering = '-%s' % date_field else: lookup = {'%s__gte' % date_field: generic_view._make_date_lookup_arg(end)} ordering = date_field # Filter out objects in the future if appropriate. if not allow_future: # Fortunately, to match the implementation of allow_future, # we need __lte, which doesn't conflict with __lt above. if generic_view.uses_datetime_field: now = timezone.now() else: now = timezone_today() lookup['%s__lte' % date_field] = now qs = generic_view.get_queryset().filter(**lookup).order_by(ordering) # Snag the first object from the queryset; if it doesn't exist that # means there's no next/previous link available. try: result = getattr(qs[0], date_field) except IndexError: return None # Convert datetimes to dates in the current time zone. if generic_view.uses_datetime_field: if settings.USE_TZ: result = timezone.localtime(result) result = result.date() # Return the first day of the period. return get_current(result)
[ "def", "_get_next_prev", "(", "generic_view", ",", "date", ",", "is_previous", ",", "period", ")", ":", "date_field", "=", "generic_view", ".", "get_date_field", "(", ")", "allow_empty", "=", "generic_view", ".", "get_allow_empty", "(", ")", "allow_future", "=", "generic_view", ".", "get_allow_future", "(", ")", "get_current", "=", "getattr", "(", "generic_view", ",", "'_get_current_%s'", "%", "period", ")", "get_next", "=", "getattr", "(", "generic_view", ",", "'_get_next_%s'", "%", "period", ")", "# Bounds of the current interval", "start", ",", "end", "=", "get_current", "(", "date", ")", ",", "get_next", "(", "date", ")", "# If allow_empty is True, the naive result will be valid", "if", "allow_empty", ":", "if", "is_previous", ":", "result", "=", "get_current", "(", "start", "-", "datetime", ".", "timedelta", "(", "days", "=", "1", ")", ")", "else", ":", "result", "=", "end", "if", "allow_future", "or", "result", "<=", "timezone_today", "(", ")", ":", "return", "result", "else", ":", "return", "None", "# Otherwise, we'll need to go to the database to look for an object", "# whose date_field is at least (greater than/less than) the given", "# naive result", "else", ":", "# Construct a lookup and an ordering depending on whether we're doing", "# a previous date or a next date lookup.", "if", "is_previous", ":", "lookup", "=", "{", "'%s__lt'", "%", "date_field", ":", "generic_view", ".", "_make_date_lookup_arg", "(", "start", ")", "}", "ordering", "=", "'-%s'", "%", "date_field", "else", ":", "lookup", "=", "{", "'%s__gte'", "%", "date_field", ":", "generic_view", ".", "_make_date_lookup_arg", "(", "end", ")", "}", "ordering", "=", "date_field", "# Filter out objects in the future if appropriate.", "if", "not", "allow_future", ":", "# Fortunately, to match the implementation of allow_future,", "# we need __lte, which doesn't conflict with __lt above.", "if", "generic_view", ".", "uses_datetime_field", ":", "now", "=", "timezone", ".", "now", "(", ")", "else", ":", "now", "=", "timezone_today", "(", ")", "lookup", "[", "'%s__lte'", "%", "date_field", "]", "=", "now", "qs", "=", "generic_view", ".", "get_queryset", "(", ")", ".", "filter", "(", "*", "*", "lookup", ")", ".", "order_by", "(", "ordering", ")", "# Snag the first object from the queryset; if it doesn't exist that", "# means there's no next/previous link available.", "try", ":", "result", "=", "getattr", "(", "qs", "[", "0", "]", ",", "date_field", ")", "except", "IndexError", ":", "return", "None", "# Convert datetimes to dates in the current time zone.", "if", "generic_view", ".", "uses_datetime_field", ":", "if", "settings", ".", "USE_TZ", ":", "result", "=", "timezone", ".", "localtime", "(", "result", ")", "result", "=", "result", ".", "date", "(", ")", "# Return the first day of the period.", "return", "get_current", "(", "result", ")" ]
[ 633, 0 ]
[ 720, 34 ]
python
en
['en', 'error', 'th']
False
timezone_today
()
Return the current date in the current time zone.
Return the current date in the current time zone.
def timezone_today(): """Return the current date in the current time zone.""" if settings.USE_TZ: return timezone.localdate() else: return datetime.date.today()
[ "def", "timezone_today", "(", ")", ":", "if", "settings", ".", "USE_TZ", ":", "return", "timezone", ".", "localdate", "(", ")", "else", ":", "return", "datetime", ".", "date", ".", "today", "(", ")" ]
[ 723, 0 ]
[ 728, 36 ]
python
en
['en', 'en', 'en']
True
YearMixin.get_year_format
(self)
Get a year format string in strptime syntax to be used to parse the year from url variables.
Get a year format string in strptime syntax to be used to parse the year from url variables.
def get_year_format(self): """ Get a year format string in strptime syntax to be used to parse the year from url variables. """ return self.year_format
[ "def", "get_year_format", "(", "self", ")", ":", "return", "self", ".", "year_format" ]
[ 23, 4 ]
[ 28, 31 ]
python
en
['en', 'error', 'th']
False
YearMixin.get_year
(self)
Return the year for which this view should display data.
Return the year for which this view should display data.
def get_year(self): """Return the year for which this view should display data.""" year = self.year if year is None: try: year = self.kwargs['year'] except KeyError: try: year = self.request.GET['year'] except KeyError: raise Http404(_("No year specified")) return year
[ "def", "get_year", "(", "self", ")", ":", "year", "=", "self", ".", "year", "if", "year", "is", "None", ":", "try", ":", "year", "=", "self", ".", "kwargs", "[", "'year'", "]", "except", "KeyError", ":", "try", ":", "year", "=", "self", ".", "request", ".", "GET", "[", "'year'", "]", "except", "KeyError", ":", "raise", "Http404", "(", "_", "(", "\"No year specified\"", ")", ")", "return", "year" ]
[ 30, 4 ]
[ 41, 19 ]
python
en
['en', 'en', 'en']
True
YearMixin.get_next_year
(self, date)
Get the next valid year.
Get the next valid year.
def get_next_year(self, date): """Get the next valid year.""" return _get_next_prev(self, date, is_previous=False, period='year')
[ "def", "get_next_year", "(", "self", ",", "date", ")", ":", "return", "_get_next_prev", "(", "self", ",", "date", ",", "is_previous", "=", "False", ",", "period", "=", "'year'", ")" ]
[ 43, 4 ]
[ 45, 75 ]
python
en
['en', 'en', 'en']
True
YearMixin.get_previous_year
(self, date)
Get the previous valid year.
Get the previous valid year.
def get_previous_year(self, date): """Get the previous valid year.""" return _get_next_prev(self, date, is_previous=True, period='year')
[ "def", "get_previous_year", "(", "self", ",", "date", ")", ":", "return", "_get_next_prev", "(", "self", ",", "date", ",", "is_previous", "=", "True", ",", "period", "=", "'year'", ")" ]
[ 47, 4 ]
[ 49, 74 ]
python
en
['en', 'en', 'en']
True
YearMixin._get_next_year
(self, date)
Return the start date of the next interval. The interval is defined by start date <= item date < next start date.
Return the start date of the next interval.
def _get_next_year(self, date): """ Return the start date of the next interval. The interval is defined by start date <= item date < next start date. """ try: return date.replace(year=date.year + 1, month=1, day=1) except ValueError: raise Http404(_("Date out of range"))
[ "def", "_get_next_year", "(", "self", ",", "date", ")", ":", "try", ":", "return", "date", ".", "replace", "(", "year", "=", "date", ".", "year", "+", "1", ",", "month", "=", "1", ",", "day", "=", "1", ")", "except", "ValueError", ":", "raise", "Http404", "(", "_", "(", "\"Date out of range\"", ")", ")" ]
[ 51, 4 ]
[ 60, 49 ]
python
en
['en', 'error', 'th']
False
YearMixin._get_current_year
(self, date)
Return the start date of the current interval.
Return the start date of the current interval.
def _get_current_year(self, date): """Return the start date of the current interval.""" return date.replace(month=1, day=1)
[ "def", "_get_current_year", "(", "self", ",", "date", ")", ":", "return", "date", ".", "replace", "(", "month", "=", "1", ",", "day", "=", "1", ")" ]
[ 62, 4 ]
[ 64, 43 ]
python
en
['en', 'en', 'en']
True
MonthMixin.get_month_format
(self)
Get a month format string in strptime syntax to be used to parse the month from url variables.
Get a month format string in strptime syntax to be used to parse the month from url variables.
def get_month_format(self): """ Get a month format string in strptime syntax to be used to parse the month from url variables. """ return self.month_format
[ "def", "get_month_format", "(", "self", ")", ":", "return", "self", ".", "month_format" ]
[ 72, 4 ]
[ 77, 32 ]
python
en
['en', 'error', 'th']
False
MonthMixin.get_month
(self)
Return the month for which this view should display data.
Return the month for which this view should display data.
def get_month(self): """Return the month for which this view should display data.""" month = self.month if month is None: try: month = self.kwargs['month'] except KeyError: try: month = self.request.GET['month'] except KeyError: raise Http404(_("No month specified")) return month
[ "def", "get_month", "(", "self", ")", ":", "month", "=", "self", ".", "month", "if", "month", "is", "None", ":", "try", ":", "month", "=", "self", ".", "kwargs", "[", "'month'", "]", "except", "KeyError", ":", "try", ":", "month", "=", "self", ".", "request", ".", "GET", "[", "'month'", "]", "except", "KeyError", ":", "raise", "Http404", "(", "_", "(", "\"No month specified\"", ")", ")", "return", "month" ]
[ 79, 4 ]
[ 90, 20 ]
python
en
['en', 'en', 'en']
True
MonthMixin.get_next_month
(self, date)
Get the next valid month.
Get the next valid month.
def get_next_month(self, date): """Get the next valid month.""" return _get_next_prev(self, date, is_previous=False, period='month')
[ "def", "get_next_month", "(", "self", ",", "date", ")", ":", "return", "_get_next_prev", "(", "self", ",", "date", ",", "is_previous", "=", "False", ",", "period", "=", "'month'", ")" ]
[ 92, 4 ]
[ 94, 76 ]
python
en
['en', 'en', 'en']
True
MonthMixin.get_previous_month
(self, date)
Get the previous valid month.
Get the previous valid month.
def get_previous_month(self, date): """Get the previous valid month.""" return _get_next_prev(self, date, is_previous=True, period='month')
[ "def", "get_previous_month", "(", "self", ",", "date", ")", ":", "return", "_get_next_prev", "(", "self", ",", "date", ",", "is_previous", "=", "True", ",", "period", "=", "'month'", ")" ]
[ 96, 4 ]
[ 98, 75 ]
python
en
['en', 'en', 'en']
True
MonthMixin._get_next_month
(self, date)
Return the start date of the next interval. The interval is defined by start date <= item date < next start date.
Return the start date of the next interval.
def _get_next_month(self, date): """ Return the start date of the next interval. The interval is defined by start date <= item date < next start date. """ if date.month == 12: try: return date.replace(year=date.year + 1, month=1, day=1) except ValueError: raise Http404(_("Date out of range")) else: return date.replace(month=date.month + 1, day=1)
[ "def", "_get_next_month", "(", "self", ",", "date", ")", ":", "if", "date", ".", "month", "==", "12", ":", "try", ":", "return", "date", ".", "replace", "(", "year", "=", "date", ".", "year", "+", "1", ",", "month", "=", "1", ",", "day", "=", "1", ")", "except", "ValueError", ":", "raise", "Http404", "(", "_", "(", "\"Date out of range\"", ")", ")", "else", ":", "return", "date", ".", "replace", "(", "month", "=", "date", ".", "month", "+", "1", ",", "day", "=", "1", ")" ]
[ 100, 4 ]
[ 112, 60 ]
python
en
['en', 'error', 'th']
False
MonthMixin._get_current_month
(self, date)
Return the start date of the previous interval.
Return the start date of the previous interval.
def _get_current_month(self, date): """Return the start date of the previous interval.""" return date.replace(day=1)
[ "def", "_get_current_month", "(", "self", ",", "date", ")", ":", "return", "date", ".", "replace", "(", "day", "=", "1", ")" ]
[ 114, 4 ]
[ 116, 34 ]
python
en
['en', 'en', 'en']
True
DayMixin.get_day_format
(self)
Get a day format string in strptime syntax to be used to parse the day from url variables.
Get a day format string in strptime syntax to be used to parse the day from url variables.
def get_day_format(self): """ Get a day format string in strptime syntax to be used to parse the day from url variables. """ return self.day_format
[ "def", "get_day_format", "(", "self", ")", ":", "return", "self", ".", "day_format" ]
[ 124, 4 ]
[ 129, 30 ]
python
en
['en', 'error', 'th']
False
DayMixin.get_day
(self)
Return the day for which this view should display data.
Return the day for which this view should display data.
def get_day(self): """Return the day for which this view should display data.""" day = self.day if day is None: try: day = self.kwargs['day'] except KeyError: try: day = self.request.GET['day'] except KeyError: raise Http404(_("No day specified")) return day
[ "def", "get_day", "(", "self", ")", ":", "day", "=", "self", ".", "day", "if", "day", "is", "None", ":", "try", ":", "day", "=", "self", ".", "kwargs", "[", "'day'", "]", "except", "KeyError", ":", "try", ":", "day", "=", "self", ".", "request", ".", "GET", "[", "'day'", "]", "except", "KeyError", ":", "raise", "Http404", "(", "_", "(", "\"No day specified\"", ")", ")", "return", "day" ]
[ 131, 4 ]
[ 142, 18 ]
python
en
['en', 'en', 'en']
True
DayMixin.get_next_day
(self, date)
Get the next valid day.
Get the next valid day.
def get_next_day(self, date): """Get the next valid day.""" return _get_next_prev(self, date, is_previous=False, period='day')
[ "def", "get_next_day", "(", "self", ",", "date", ")", ":", "return", "_get_next_prev", "(", "self", ",", "date", ",", "is_previous", "=", "False", ",", "period", "=", "'day'", ")" ]
[ 144, 4 ]
[ 146, 74 ]
python
en
['en', 'en', 'en']
True
DayMixin.get_previous_day
(self, date)
Get the previous valid day.
Get the previous valid day.
def get_previous_day(self, date): """Get the previous valid day.""" return _get_next_prev(self, date, is_previous=True, period='day')
[ "def", "get_previous_day", "(", "self", ",", "date", ")", ":", "return", "_get_next_prev", "(", "self", ",", "date", ",", "is_previous", "=", "True", ",", "period", "=", "'day'", ")" ]
[ 148, 4 ]
[ 150, 73 ]
python
en
['en', 'en', 'en']
True
DayMixin._get_next_day
(self, date)
Return the start date of the next interval. The interval is defined by start date <= item date < next start date.
Return the start date of the next interval.
def _get_next_day(self, date): """ Return the start date of the next interval. The interval is defined by start date <= item date < next start date. """ return date + datetime.timedelta(days=1)
[ "def", "_get_next_day", "(", "self", ",", "date", ")", ":", "return", "date", "+", "datetime", ".", "timedelta", "(", "days", "=", "1", ")" ]
[ 152, 4 ]
[ 158, 48 ]
python
en
['en', 'error', 'th']
False
DayMixin._get_current_day
(self, date)
Return the start date of the current interval.
Return the start date of the current interval.
def _get_current_day(self, date): """Return the start date of the current interval.""" return date
[ "def", "_get_current_day", "(", "self", ",", "date", ")", ":", "return", "date" ]
[ 160, 4 ]
[ 162, 19 ]
python
en
['en', 'en', 'en']
True
WeekMixin.get_week_format
(self)
Get a week format string in strptime syntax to be used to parse the week from url variables.
Get a week format string in strptime syntax to be used to parse the week from url variables.
def get_week_format(self): """ Get a week format string in strptime syntax to be used to parse the week from url variables. """ return self.week_format
[ "def", "get_week_format", "(", "self", ")", ":", "return", "self", ".", "week_format" ]
[ 170, 4 ]
[ 175, 31 ]
python
en
['en', 'error', 'th']
False
WeekMixin.get_week
(self)
Return the week for which this view should display data.
Return the week for which this view should display data.
def get_week(self): """Return the week for which this view should display data.""" week = self.week if week is None: try: week = self.kwargs['week'] except KeyError: try: week = self.request.GET['week'] except KeyError: raise Http404(_("No week specified")) return week
[ "def", "get_week", "(", "self", ")", ":", "week", "=", "self", ".", "week", "if", "week", "is", "None", ":", "try", ":", "week", "=", "self", ".", "kwargs", "[", "'week'", "]", "except", "KeyError", ":", "try", ":", "week", "=", "self", ".", "request", ".", "GET", "[", "'week'", "]", "except", "KeyError", ":", "raise", "Http404", "(", "_", "(", "\"No week specified\"", ")", ")", "return", "week" ]
[ 177, 4 ]
[ 188, 19 ]
python
en
['en', 'en', 'en']
True
WeekMixin.get_next_week
(self, date)
Get the next valid week.
Get the next valid week.
def get_next_week(self, date): """Get the next valid week.""" return _get_next_prev(self, date, is_previous=False, period='week')
[ "def", "get_next_week", "(", "self", ",", "date", ")", ":", "return", "_get_next_prev", "(", "self", ",", "date", ",", "is_previous", "=", "False", ",", "period", "=", "'week'", ")" ]
[ 190, 4 ]
[ 192, 75 ]
python
en
['en', 'af', 'en']
True