Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
get_current_url
( environ, root_only=False, strip_querystring=False, host_only=False, trusted_hosts=None, )
A handy helper function that recreates the full URL as IRI for the current request or parts of it. Here's an example: >>> from werkzeug.test import create_environ >>> env = create_environ("/?param=foo", "http://localhost/script") >>> get_current_url(env) 'http://localhost/script/?param=foo' >>> get_current_url(env, root_only=True) 'http://localhost/script/' >>> get_current_url(env, host_only=True) 'http://localhost/' >>> get_current_url(env, strip_querystring=True) 'http://localhost/script/' This optionally it verifies that the host is in a list of trusted hosts. If the host is not in there it will raise a :exc:`~werkzeug.exceptions.SecurityError`. Note that the string returned might contain unicode characters as the representation is an IRI not an URI. If you need an ASCII only representation you can use the :func:`~werkzeug.urls.iri_to_uri` function: >>> from werkzeug.urls import iri_to_uri >>> iri_to_uri(get_current_url(env)) 'http://localhost/script/?param=foo' :param environ: the WSGI environment to get the current URL from. :param root_only: set `True` if you only want the root URL. :param strip_querystring: set to `True` if you don't want the querystring. :param host_only: set to `True` if the host URL should be returned. :param trusted_hosts: a list of trusted hosts, see :func:`host_is_trusted` for more information.
A handy helper function that recreates the full URL as IRI for the current request or parts of it. Here's an example:
def get_current_url( environ, root_only=False, strip_querystring=False, host_only=False, trusted_hosts=None, ): """A handy helper function that recreates the full URL as IRI for the current request or parts of it. Here's an example: >>> from werkzeug.test import create_environ >>> env = create_environ("/?param=foo", "http://localhost/script") >>> get_current_url(env) 'http://localhost/script/?param=foo' >>> get_current_url(env, root_only=True) 'http://localhost/script/' >>> get_current_url(env, host_only=True) 'http://localhost/' >>> get_current_url(env, strip_querystring=True) 'http://localhost/script/' This optionally it verifies that the host is in a list of trusted hosts. If the host is not in there it will raise a :exc:`~werkzeug.exceptions.SecurityError`. Note that the string returned might contain unicode characters as the representation is an IRI not an URI. If you need an ASCII only representation you can use the :func:`~werkzeug.urls.iri_to_uri` function: >>> from werkzeug.urls import iri_to_uri >>> iri_to_uri(get_current_url(env)) 'http://localhost/script/?param=foo' :param environ: the WSGI environment to get the current URL from. :param root_only: set `True` if you only want the root URL. :param strip_querystring: set to `True` if you don't want the querystring. :param host_only: set to `True` if the host URL should be returned. :param trusted_hosts: a list of trusted hosts, see :func:`host_is_trusted` for more information. """ tmp = [environ["wsgi.url_scheme"], "://", get_host(environ, trusted_hosts)] cat = tmp.append if host_only: return uri_to_iri("".join(tmp) + "/") cat(url_quote(wsgi_get_bytes(environ.get("SCRIPT_NAME", ""))).rstrip("/")) cat("/") if not root_only: cat(url_quote(wsgi_get_bytes(environ.get("PATH_INFO", "")).lstrip(b"/"))) if not strip_querystring: qs = get_query_string(environ) if qs: cat("?" + qs) return uri_to_iri("".join(tmp))
[ "def", "get_current_url", "(", "environ", ",", "root_only", "=", "False", ",", "strip_querystring", "=", "False", ",", "host_only", "=", "False", ",", "trusted_hosts", "=", "None", ",", ")", ":", "tmp", "=", "[", "environ", "[", "\"wsgi.url_scheme\"", "]", ",", "\"://\"", ",", "get_host", "(", "environ", ",", "trusted_hosts", ")", "]", "cat", "=", "tmp", ".", "append", "if", "host_only", ":", "return", "uri_to_iri", "(", "\"\"", ".", "join", "(", "tmp", ")", "+", "\"/\"", ")", "cat", "(", "url_quote", "(", "wsgi_get_bytes", "(", "environ", ".", "get", "(", "\"SCRIPT_NAME\"", ",", "\"\"", ")", ")", ")", ".", "rstrip", "(", "\"/\"", ")", ")", "cat", "(", "\"/\"", ")", "if", "not", "root_only", ":", "cat", "(", "url_quote", "(", "wsgi_get_bytes", "(", "environ", ".", "get", "(", "\"PATH_INFO\"", ",", "\"\"", ")", ")", ".", "lstrip", "(", "b\"/\"", ")", ")", ")", "if", "not", "strip_querystring", ":", "qs", "=", "get_query_string", "(", "environ", ")", "if", "qs", ":", "cat", "(", "\"?\"", "+", "qs", ")", "return", "uri_to_iri", "(", "\"\"", ".", "join", "(", "tmp", ")", ")" ]
[ 45, 0 ]
[ 98, 35 ]
python
en
['en', 'en', 'en']
True
host_is_trusted
(hostname, trusted_list)
Checks if a host is trusted against a list. This also takes care of port normalization. .. versionadded:: 0.9 :param hostname: the hostname to check :param trusted_list: a list of hostnames to check against. If a hostname starts with a dot it will match against all subdomains as well.
Checks if a host is trusted against a list. This also takes care of port normalization.
def host_is_trusted(hostname, trusted_list): """Checks if a host is trusted against a list. This also takes care of port normalization. .. versionadded:: 0.9 :param hostname: the hostname to check :param trusted_list: a list of hostnames to check against. If a hostname starts with a dot it will match against all subdomains as well. """ if not hostname: return False if isinstance(trusted_list, string_types): trusted_list = [trusted_list] def _normalize(hostname): if ":" in hostname: hostname = hostname.rsplit(":", 1)[0] return _encode_idna(hostname) try: hostname = _normalize(hostname) except UnicodeError: return False for ref in trusted_list: if ref.startswith("."): ref = ref[1:] suffix_match = True else: suffix_match = False try: ref = _normalize(ref) except UnicodeError: return False if ref == hostname: return True if suffix_match and hostname.endswith(b"." + ref): return True return False
[ "def", "host_is_trusted", "(", "hostname", ",", "trusted_list", ")", ":", "if", "not", "hostname", ":", "return", "False", "if", "isinstance", "(", "trusted_list", ",", "string_types", ")", ":", "trusted_list", "=", "[", "trusted_list", "]", "def", "_normalize", "(", "hostname", ")", ":", "if", "\":\"", "in", "hostname", ":", "hostname", "=", "hostname", ".", "rsplit", "(", "\":\"", ",", "1", ")", "[", "0", "]", "return", "_encode_idna", "(", "hostname", ")", "try", ":", "hostname", "=", "_normalize", "(", "hostname", ")", "except", "UnicodeError", ":", "return", "False", "for", "ref", "in", "trusted_list", ":", "if", "ref", ".", "startswith", "(", "\".\"", ")", ":", "ref", "=", "ref", "[", "1", ":", "]", "suffix_match", "=", "True", "else", ":", "suffix_match", "=", "False", "try", ":", "ref", "=", "_normalize", "(", "ref", ")", "except", "UnicodeError", ":", "return", "False", "if", "ref", "==", "hostname", ":", "return", "True", "if", "suffix_match", "and", "hostname", ".", "endswith", "(", "b\".\"", "+", "ref", ")", ":", "return", "True", "return", "False" ]
[ 101, 0 ]
[ 141, 16 ]
python
en
['en', 'en', 'en']
True
get_host
(environ, trusted_hosts=None)
Return the host for the given WSGI environment. This first checks the ``Host`` header. If it's not present, then ``SERVER_NAME`` and ``SERVER_PORT`` are used. The host will only contain the port if it is different than the standard port for the protocol. Optionally, verify that the host is trusted using :func:`host_is_trusted` and raise a :exc:`~werkzeug.exceptions.SecurityError` if it is not. :param environ: The WSGI environment to get the host from. :param trusted_hosts: A list of trusted hosts. :return: Host, with port if necessary. :raise ~werkzeug.exceptions.SecurityError: If the host is not trusted.
Return the host for the given WSGI environment. This first checks the ``Host`` header. If it's not present, then ``SERVER_NAME`` and ``SERVER_PORT`` are used. The host will only contain the port if it is different than the standard port for the protocol.
def get_host(environ, trusted_hosts=None): """Return the host for the given WSGI environment. This first checks the ``Host`` header. If it's not present, then ``SERVER_NAME`` and ``SERVER_PORT`` are used. The host will only contain the port if it is different than the standard port for the protocol. Optionally, verify that the host is trusted using :func:`host_is_trusted` and raise a :exc:`~werkzeug.exceptions.SecurityError` if it is not. :param environ: The WSGI environment to get the host from. :param trusted_hosts: A list of trusted hosts. :return: Host, with port if necessary. :raise ~werkzeug.exceptions.SecurityError: If the host is not trusted. """ if "HTTP_HOST" in environ: rv = environ["HTTP_HOST"] if environ["wsgi.url_scheme"] == "http" and rv.endswith(":80"): rv = rv[:-3] elif environ["wsgi.url_scheme"] == "https" and rv.endswith(":443"): rv = rv[:-4] else: rv = environ["SERVER_NAME"] if (environ["wsgi.url_scheme"], environ["SERVER_PORT"]) not in ( ("https", "443"), ("http", "80"), ): rv += ":" + environ["SERVER_PORT"] if trusted_hosts is not None: if not host_is_trusted(rv, trusted_hosts): from .exceptions import SecurityError raise SecurityError('Host "%s" is not trusted' % rv) return rv
[ "def", "get_host", "(", "environ", ",", "trusted_hosts", "=", "None", ")", ":", "if", "\"HTTP_HOST\"", "in", "environ", ":", "rv", "=", "environ", "[", "\"HTTP_HOST\"", "]", "if", "environ", "[", "\"wsgi.url_scheme\"", "]", "==", "\"http\"", "and", "rv", ".", "endswith", "(", "\":80\"", ")", ":", "rv", "=", "rv", "[", ":", "-", "3", "]", "elif", "environ", "[", "\"wsgi.url_scheme\"", "]", "==", "\"https\"", "and", "rv", ".", "endswith", "(", "\":443\"", ")", ":", "rv", "=", "rv", "[", ":", "-", "4", "]", "else", ":", "rv", "=", "environ", "[", "\"SERVER_NAME\"", "]", "if", "(", "environ", "[", "\"wsgi.url_scheme\"", "]", ",", "environ", "[", "\"SERVER_PORT\"", "]", ")", "not", "in", "(", "(", "\"https\"", ",", "\"443\"", ")", ",", "(", "\"http\"", ",", "\"80\"", ")", ",", ")", ":", "rv", "+=", "\":\"", "+", "environ", "[", "\"SERVER_PORT\"", "]", "if", "trusted_hosts", "is", "not", "None", ":", "if", "not", "host_is_trusted", "(", "rv", ",", "trusted_hosts", ")", ":", "from", ".", "exceptions", "import", "SecurityError", "raise", "SecurityError", "(", "'Host \"%s\" is not trusted'", "%", "rv", ")", "return", "rv" ]
[ 144, 0 ]
[ 178, 13 ]
python
en
['en', 'en', 'en']
True
get_content_length
(environ)
Returns the content length from the WSGI environment as integer. If it's not available or chunked transfer encoding is used, ``None`` is returned. .. versionadded:: 0.9 :param environ: the WSGI environ to fetch the content length from.
Returns the content length from the WSGI environment as integer. If it's not available or chunked transfer encoding is used, ``None`` is returned.
def get_content_length(environ): """Returns the content length from the WSGI environment as integer. If it's not available or chunked transfer encoding is used, ``None`` is returned. .. versionadded:: 0.9 :param environ: the WSGI environ to fetch the content length from. """ if environ.get("HTTP_TRANSFER_ENCODING", "") == "chunked": return None content_length = environ.get("CONTENT_LENGTH") if content_length is not None: try: return max(0, int(content_length)) except (ValueError, TypeError): pass
[ "def", "get_content_length", "(", "environ", ")", ":", "if", "environ", ".", "get", "(", "\"HTTP_TRANSFER_ENCODING\"", ",", "\"\"", ")", "==", "\"chunked\"", ":", "return", "None", "content_length", "=", "environ", ".", "get", "(", "\"CONTENT_LENGTH\"", ")", "if", "content_length", "is", "not", "None", ":", "try", ":", "return", "max", "(", "0", ",", "int", "(", "content_length", ")", ")", "except", "(", "ValueError", ",", "TypeError", ")", ":", "pass" ]
[ 181, 0 ]
[ 198, 16 ]
python
en
['en', 'en', 'en']
True
get_input_stream
(environ, safe_fallback=True)
Returns the input stream from the WSGI environment and wraps it in the most sensible way possible. The stream returned is not the raw WSGI stream in most cases but one that is safe to read from without taking into account the content length. If content length is not set, the stream will be empty for safety reasons. If the WSGI server supports chunked or infinite streams, it should set the ``wsgi.input_terminated`` value in the WSGI environ to indicate that. .. versionadded:: 0.9 :param environ: the WSGI environ to fetch the stream from. :param safe_fallback: use an empty stream as a safe fallback when the content length is not set. Disabling this allows infinite streams, which can be a denial-of-service risk.
Returns the input stream from the WSGI environment and wraps it in the most sensible way possible. The stream returned is not the raw WSGI stream in most cases but one that is safe to read from without taking into account the content length.
def get_input_stream(environ, safe_fallback=True): """Returns the input stream from the WSGI environment and wraps it in the most sensible way possible. The stream returned is not the raw WSGI stream in most cases but one that is safe to read from without taking into account the content length. If content length is not set, the stream will be empty for safety reasons. If the WSGI server supports chunked or infinite streams, it should set the ``wsgi.input_terminated`` value in the WSGI environ to indicate that. .. versionadded:: 0.9 :param environ: the WSGI environ to fetch the stream from. :param safe_fallback: use an empty stream as a safe fallback when the content length is not set. Disabling this allows infinite streams, which can be a denial-of-service risk. """ stream = environ["wsgi.input"] content_length = get_content_length(environ) # A wsgi extension that tells us if the input is terminated. In # that case we return the stream unchanged as we know we can safely # read it until the end. if environ.get("wsgi.input_terminated"): return stream # If the request doesn't specify a content length, returning the stream is # potentially dangerous because it could be infinite, malicious or not. If # safe_fallback is true, return an empty stream instead for safety. if content_length is None: return BytesIO() if safe_fallback else stream # Otherwise limit the stream to the content length return LimitedStream(stream, content_length)
[ "def", "get_input_stream", "(", "environ", ",", "safe_fallback", "=", "True", ")", ":", "stream", "=", "environ", "[", "\"wsgi.input\"", "]", "content_length", "=", "get_content_length", "(", "environ", ")", "# A wsgi extension that tells us if the input is terminated. In", "# that case we return the stream unchanged as we know we can safely", "# read it until the end.", "if", "environ", ".", "get", "(", "\"wsgi.input_terminated\"", ")", ":", "return", "stream", "# If the request doesn't specify a content length, returning the stream is", "# potentially dangerous because it could be infinite, malicious or not. If", "# safe_fallback is true, return an empty stream instead for safety.", "if", "content_length", "is", "None", ":", "return", "BytesIO", "(", ")", "if", "safe_fallback", "else", "stream", "# Otherwise limit the stream to the content length", "return", "LimitedStream", "(", "stream", ",", "content_length", ")" ]
[ 201, 0 ]
[ 234, 48 ]
python
en
['en', 'en', 'en']
True
get_query_string
(environ)
Returns the `QUERY_STRING` from the WSGI environment. This also takes care about the WSGI decoding dance on Python 3 environments as a native string. The string returned will be restricted to ASCII characters. .. versionadded:: 0.9 :param environ: the WSGI environment object to get the query string from.
Returns the `QUERY_STRING` from the WSGI environment. This also takes care about the WSGI decoding dance on Python 3 environments as a native string. The string returned will be restricted to ASCII characters.
def get_query_string(environ): """Returns the `QUERY_STRING` from the WSGI environment. This also takes care about the WSGI decoding dance on Python 3 environments as a native string. The string returned will be restricted to ASCII characters. .. versionadded:: 0.9 :param environ: the WSGI environment object to get the query string from. """ qs = wsgi_get_bytes(environ.get("QUERY_STRING", "")) # QUERY_STRING really should be ascii safe but some browsers # will send us some unicode stuff (I am looking at you IE). # In that case we want to urllib quote it badly. return try_coerce_native(url_quote(qs, safe=":&%=+$!*'(),"))
[ "def", "get_query_string", "(", "environ", ")", ":", "qs", "=", "wsgi_get_bytes", "(", "environ", ".", "get", "(", "\"QUERY_STRING\"", ",", "\"\"", ")", ")", "# QUERY_STRING really should be ascii safe but some browsers", "# will send us some unicode stuff (I am looking at you IE).", "# In that case we want to urllib quote it badly.", "return", "try_coerce_native", "(", "url_quote", "(", "qs", ",", "safe", "=", "\":&%=+$!*'(),\"", ")", ")" ]
[ 237, 0 ]
[ 251, 64 ]
python
en
['en', 'en', 'en']
True
get_path_info
(environ, charset="utf-8", errors="replace")
Returns the `PATH_INFO` from the WSGI environment and properly decodes it. This also takes care about the WSGI decoding dance on Python 3 environments. if the `charset` is set to `None` a bytestring is returned. .. versionadded:: 0.9 :param environ: the WSGI environment object to get the path from. :param charset: the charset for the path info, or `None` if no decoding should be performed. :param errors: the decoding error handling.
Returns the `PATH_INFO` from the WSGI environment and properly decodes it. This also takes care about the WSGI decoding dance on Python 3 environments. if the `charset` is set to `None` a bytestring is returned.
def get_path_info(environ, charset="utf-8", errors="replace"): """Returns the `PATH_INFO` from the WSGI environment and properly decodes it. This also takes care about the WSGI decoding dance on Python 3 environments. if the `charset` is set to `None` a bytestring is returned. .. versionadded:: 0.9 :param environ: the WSGI environment object to get the path from. :param charset: the charset for the path info, or `None` if no decoding should be performed. :param errors: the decoding error handling. """ path = wsgi_get_bytes(environ.get("PATH_INFO", "")) return to_unicode(path, charset, errors, allow_none_charset=True)
[ "def", "get_path_info", "(", "environ", ",", "charset", "=", "\"utf-8\"", ",", "errors", "=", "\"replace\"", ")", ":", "path", "=", "wsgi_get_bytes", "(", "environ", ".", "get", "(", "\"PATH_INFO\"", ",", "\"\"", ")", ")", "return", "to_unicode", "(", "path", ",", "charset", ",", "errors", ",", "allow_none_charset", "=", "True", ")" ]
[ 254, 0 ]
[ 268, 69 ]
python
en
['en', 'en', 'en']
True
get_script_name
(environ, charset="utf-8", errors="replace")
Returns the `SCRIPT_NAME` from the WSGI environment and properly decodes it. This also takes care about the WSGI decoding dance on Python 3 environments. if the `charset` is set to `None` a bytestring is returned. .. versionadded:: 0.9 :param environ: the WSGI environment object to get the path from. :param charset: the charset for the path, or `None` if no decoding should be performed. :param errors: the decoding error handling.
Returns the `SCRIPT_NAME` from the WSGI environment and properly decodes it. This also takes care about the WSGI decoding dance on Python 3 environments. if the `charset` is set to `None` a bytestring is returned.
def get_script_name(environ, charset="utf-8", errors="replace"): """Returns the `SCRIPT_NAME` from the WSGI environment and properly decodes it. This also takes care about the WSGI decoding dance on Python 3 environments. if the `charset` is set to `None` a bytestring is returned. .. versionadded:: 0.9 :param environ: the WSGI environment object to get the path from. :param charset: the charset for the path, or `None` if no decoding should be performed. :param errors: the decoding error handling. """ path = wsgi_get_bytes(environ.get("SCRIPT_NAME", "")) return to_unicode(path, charset, errors, allow_none_charset=True)
[ "def", "get_script_name", "(", "environ", ",", "charset", "=", "\"utf-8\"", ",", "errors", "=", "\"replace\"", ")", ":", "path", "=", "wsgi_get_bytes", "(", "environ", ".", "get", "(", "\"SCRIPT_NAME\"", ",", "\"\"", ")", ")", "return", "to_unicode", "(", "path", ",", "charset", ",", "errors", ",", "allow_none_charset", "=", "True", ")" ]
[ 271, 0 ]
[ 285, 69 ]
python
en
['en', 'en', 'en']
True
pop_path_info
(environ, charset="utf-8", errors="replace")
Removes and returns the next segment of `PATH_INFO`, pushing it onto `SCRIPT_NAME`. Returns `None` if there is nothing left on `PATH_INFO`. If the `charset` is set to `None` a bytestring is returned. If there are empty segments (``'/foo//bar``) these are ignored but properly pushed to the `SCRIPT_NAME`: >>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'} >>> pop_path_info(env) 'a' >>> env['SCRIPT_NAME'] '/foo/a' >>> pop_path_info(env) 'b' >>> env['SCRIPT_NAME'] '/foo/a/b' .. versionadded:: 0.5 .. versionchanged:: 0.9 The path is now decoded and a charset and encoding parameter can be provided. :param environ: the WSGI environment that is modified.
Removes and returns the next segment of `PATH_INFO`, pushing it onto `SCRIPT_NAME`. Returns `None` if there is nothing left on `PATH_INFO`.
def pop_path_info(environ, charset="utf-8", errors="replace"): """Removes and returns the next segment of `PATH_INFO`, pushing it onto `SCRIPT_NAME`. Returns `None` if there is nothing left on `PATH_INFO`. If the `charset` is set to `None` a bytestring is returned. If there are empty segments (``'/foo//bar``) these are ignored but properly pushed to the `SCRIPT_NAME`: >>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'} >>> pop_path_info(env) 'a' >>> env['SCRIPT_NAME'] '/foo/a' >>> pop_path_info(env) 'b' >>> env['SCRIPT_NAME'] '/foo/a/b' .. versionadded:: 0.5 .. versionchanged:: 0.9 The path is now decoded and a charset and encoding parameter can be provided. :param environ: the WSGI environment that is modified. """ path = environ.get("PATH_INFO") if not path: return None script_name = environ.get("SCRIPT_NAME", "") # shift multiple leading slashes over old_path = path path = path.lstrip("/") if path != old_path: script_name += "/" * (len(old_path) - len(path)) if "/" not in path: environ["PATH_INFO"] = "" environ["SCRIPT_NAME"] = script_name + path rv = wsgi_get_bytes(path) else: segment, path = path.split("/", 1) environ["PATH_INFO"] = "/" + path environ["SCRIPT_NAME"] = script_name + segment rv = wsgi_get_bytes(segment) return to_unicode(rv, charset, errors, allow_none_charset=True)
[ "def", "pop_path_info", "(", "environ", ",", "charset", "=", "\"utf-8\"", ",", "errors", "=", "\"replace\"", ")", ":", "path", "=", "environ", ".", "get", "(", "\"PATH_INFO\"", ")", "if", "not", "path", ":", "return", "None", "script_name", "=", "environ", ".", "get", "(", "\"SCRIPT_NAME\"", ",", "\"\"", ")", "# shift multiple leading slashes over", "old_path", "=", "path", "path", "=", "path", ".", "lstrip", "(", "\"/\"", ")", "if", "path", "!=", "old_path", ":", "script_name", "+=", "\"/\"", "*", "(", "len", "(", "old_path", ")", "-", "len", "(", "path", ")", ")", "if", "\"/\"", "not", "in", "path", ":", "environ", "[", "\"PATH_INFO\"", "]", "=", "\"\"", "environ", "[", "\"SCRIPT_NAME\"", "]", "=", "script_name", "+", "path", "rv", "=", "wsgi_get_bytes", "(", "path", ")", "else", ":", "segment", ",", "path", "=", "path", ".", "split", "(", "\"/\"", ",", "1", ")", "environ", "[", "\"PATH_INFO\"", "]", "=", "\"/\"", "+", "path", "environ", "[", "\"SCRIPT_NAME\"", "]", "=", "script_name", "+", "segment", "rv", "=", "wsgi_get_bytes", "(", "segment", ")", "return", "to_unicode", "(", "rv", ",", "charset", ",", "errors", ",", "allow_none_charset", "=", "True", ")" ]
[ 288, 0 ]
[ 337, 67 ]
python
en
['en', 'en', 'en']
True
peek_path_info
(environ, charset="utf-8", errors="replace")
Returns the next segment on the `PATH_INFO` or `None` if there is none. Works like :func:`pop_path_info` without modifying the environment: >>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'} >>> peek_path_info(env) 'a' >>> peek_path_info(env) 'a' If the `charset` is set to `None` a bytestring is returned. .. versionadded:: 0.5 .. versionchanged:: 0.9 The path is now decoded and a charset and encoding parameter can be provided. :param environ: the WSGI environment that is checked.
Returns the next segment on the `PATH_INFO` or `None` if there is none. Works like :func:`pop_path_info` without modifying the environment:
def peek_path_info(environ, charset="utf-8", errors="replace"): """Returns the next segment on the `PATH_INFO` or `None` if there is none. Works like :func:`pop_path_info` without modifying the environment: >>> env = {'SCRIPT_NAME': '/foo', 'PATH_INFO': '/a/b'} >>> peek_path_info(env) 'a' >>> peek_path_info(env) 'a' If the `charset` is set to `None` a bytestring is returned. .. versionadded:: 0.5 .. versionchanged:: 0.9 The path is now decoded and a charset and encoding parameter can be provided. :param environ: the WSGI environment that is checked. """ segments = environ.get("PATH_INFO", "").lstrip("/").split("/", 1) if segments: return to_unicode( wsgi_get_bytes(segments[0]), charset, errors, allow_none_charset=True )
[ "def", "peek_path_info", "(", "environ", ",", "charset", "=", "\"utf-8\"", ",", "errors", "=", "\"replace\"", ")", ":", "segments", "=", "environ", ".", "get", "(", "\"PATH_INFO\"", ",", "\"\"", ")", ".", "lstrip", "(", "\"/\"", ")", ".", "split", "(", "\"/\"", ",", "1", ")", "if", "segments", ":", "return", "to_unicode", "(", "wsgi_get_bytes", "(", "segments", "[", "0", "]", ")", ",", "charset", ",", "errors", ",", "allow_none_charset", "=", "True", ")" ]
[ 340, 0 ]
[ 365, 9 ]
python
en
['en', 'en', 'en']
True
extract_path_info
( environ_or_baseurl, path_or_url, charset="utf-8", errors="werkzeug.url_quote", collapse_http_schemes=True, )
Extracts the path info from the given URL (or WSGI environment) and path. The path info returned is a unicode string, not a bytestring suitable for a WSGI environment. The URLs might also be IRIs. If the path info could not be determined, `None` is returned. Some examples: >>> extract_path_info('http://example.com/app', '/app/hello') u'/hello' >>> extract_path_info('http://example.com/app', ... 'https://example.com/app/hello') u'/hello' >>> extract_path_info('http://example.com/app', ... 'https://example.com/app/hello', ... collapse_http_schemes=False) is None True Instead of providing a base URL you can also pass a WSGI environment. :param environ_or_baseurl: a WSGI environment dict, a base URL or base IRI. This is the root of the application. :param path_or_url: an absolute path from the server root, a relative path (in which case it's the path info) or a full URL. Also accepts IRIs and unicode parameters. :param charset: the charset for byte data in URLs :param errors: the error handling on decode :param collapse_http_schemes: if set to `False` the algorithm does not assume that http and https on the same server point to the same resource. .. versionchanged:: 0.15 The ``errors`` parameter defaults to leaving invalid bytes quoted instead of replacing them. .. versionadded:: 0.6
Extracts the path info from the given URL (or WSGI environment) and path. The path info returned is a unicode string, not a bytestring suitable for a WSGI environment. The URLs might also be IRIs.
def extract_path_info( environ_or_baseurl, path_or_url, charset="utf-8", errors="werkzeug.url_quote", collapse_http_schemes=True, ): """Extracts the path info from the given URL (or WSGI environment) and path. The path info returned is a unicode string, not a bytestring suitable for a WSGI environment. The URLs might also be IRIs. If the path info could not be determined, `None` is returned. Some examples: >>> extract_path_info('http://example.com/app', '/app/hello') u'/hello' >>> extract_path_info('http://example.com/app', ... 'https://example.com/app/hello') u'/hello' >>> extract_path_info('http://example.com/app', ... 'https://example.com/app/hello', ... collapse_http_schemes=False) is None True Instead of providing a base URL you can also pass a WSGI environment. :param environ_or_baseurl: a WSGI environment dict, a base URL or base IRI. This is the root of the application. :param path_or_url: an absolute path from the server root, a relative path (in which case it's the path info) or a full URL. Also accepts IRIs and unicode parameters. :param charset: the charset for byte data in URLs :param errors: the error handling on decode :param collapse_http_schemes: if set to `False` the algorithm does not assume that http and https on the same server point to the same resource. .. versionchanged:: 0.15 The ``errors`` parameter defaults to leaving invalid bytes quoted instead of replacing them. .. versionadded:: 0.6 """ def _normalize_netloc(scheme, netloc): parts = netloc.split(u"@", 1)[-1].split(u":", 1) if len(parts) == 2: netloc, port = parts if (scheme == u"http" and port == u"80") or ( scheme == u"https" and port == u"443" ): port = None else: netloc = parts[0] port = None if port is not None: netloc += u":" + port return netloc # make sure whatever we are working on is a IRI and parse it path = uri_to_iri(path_or_url, charset, errors) if isinstance(environ_or_baseurl, dict): environ_or_baseurl = get_current_url(environ_or_baseurl, root_only=True) base_iri = uri_to_iri(environ_or_baseurl, charset, errors) base_scheme, base_netloc, base_path = url_parse(base_iri)[:3] cur_scheme, cur_netloc, cur_path, = url_parse(url_join(base_iri, path))[:3] # normalize the network location base_netloc = _normalize_netloc(base_scheme, base_netloc) cur_netloc = _normalize_netloc(cur_scheme, cur_netloc) # is that IRI even on a known HTTP scheme? if collapse_http_schemes: for scheme in base_scheme, cur_scheme: if scheme not in (u"http", u"https"): return None else: if not (base_scheme in (u"http", u"https") and base_scheme == cur_scheme): return None # are the netlocs compatible? if base_netloc != cur_netloc: return None # are we below the application path? base_path = base_path.rstrip(u"/") if not cur_path.startswith(base_path): return None return u"/" + cur_path[len(base_path) :].lstrip(u"/")
[ "def", "extract_path_info", "(", "environ_or_baseurl", ",", "path_or_url", ",", "charset", "=", "\"utf-8\"", ",", "errors", "=", "\"werkzeug.url_quote\"", ",", "collapse_http_schemes", "=", "True", ",", ")", ":", "def", "_normalize_netloc", "(", "scheme", ",", "netloc", ")", ":", "parts", "=", "netloc", ".", "split", "(", "u\"@\"", ",", "1", ")", "[", "-", "1", "]", ".", "split", "(", "u\":\"", ",", "1", ")", "if", "len", "(", "parts", ")", "==", "2", ":", "netloc", ",", "port", "=", "parts", "if", "(", "scheme", "==", "u\"http\"", "and", "port", "==", "u\"80\"", ")", "or", "(", "scheme", "==", "u\"https\"", "and", "port", "==", "u\"443\"", ")", ":", "port", "=", "None", "else", ":", "netloc", "=", "parts", "[", "0", "]", "port", "=", "None", "if", "port", "is", "not", "None", ":", "netloc", "+=", "u\":\"", "+", "port", "return", "netloc", "# make sure whatever we are working on is a IRI and parse it", "path", "=", "uri_to_iri", "(", "path_or_url", ",", "charset", ",", "errors", ")", "if", "isinstance", "(", "environ_or_baseurl", ",", "dict", ")", ":", "environ_or_baseurl", "=", "get_current_url", "(", "environ_or_baseurl", ",", "root_only", "=", "True", ")", "base_iri", "=", "uri_to_iri", "(", "environ_or_baseurl", ",", "charset", ",", "errors", ")", "base_scheme", ",", "base_netloc", ",", "base_path", "=", "url_parse", "(", "base_iri", ")", "[", ":", "3", "]", "cur_scheme", ",", "cur_netloc", ",", "cur_path", ",", "=", "url_parse", "(", "url_join", "(", "base_iri", ",", "path", ")", ")", "[", ":", "3", "]", "# normalize the network location", "base_netloc", "=", "_normalize_netloc", "(", "base_scheme", ",", "base_netloc", ")", "cur_netloc", "=", "_normalize_netloc", "(", "cur_scheme", ",", "cur_netloc", ")", "# is that IRI even on a known HTTP scheme?", "if", "collapse_http_schemes", ":", "for", "scheme", "in", "base_scheme", ",", "cur_scheme", ":", "if", "scheme", "not", "in", "(", "u\"http\"", ",", "u\"https\"", ")", ":", "return", "None", "else", ":", "if", "not", "(", "base_scheme", "in", "(", "u\"http\"", ",", "u\"https\"", ")", "and", "base_scheme", "==", "cur_scheme", ")", ":", "return", "None", "# are the netlocs compatible?", "if", "base_netloc", "!=", "cur_netloc", ":", "return", "None", "# are we below the application path?", "base_path", "=", "base_path", ".", "rstrip", "(", "u\"/\"", ")", "if", "not", "cur_path", ".", "startswith", "(", "base_path", ")", ":", "return", "None", "return", "u\"/\"", "+", "cur_path", "[", "len", "(", "base_path", ")", ":", "]", ".", "lstrip", "(", "u\"/\"", ")" ]
[ 368, 0 ]
[ 461, 57 ]
python
en
['en', 'en', 'en']
True
wrap_file
(environ, file, buffer_size=8192)
Wraps a file. This uses the WSGI server's file wrapper if available or otherwise the generic :class:`FileWrapper`. .. versionadded:: 0.5 If the file wrapper from the WSGI server is used it's important to not iterate over it from inside the application but to pass it through unchanged. If you want to pass out a file wrapper inside a response object you have to set :attr:`~BaseResponse.direct_passthrough` to `True`. More information about file wrappers are available in :pep:`333`. :param file: a :class:`file`-like object with a :meth:`~file.read` method. :param buffer_size: number of bytes for one iteration.
Wraps a file. This uses the WSGI server's file wrapper if available or otherwise the generic :class:`FileWrapper`.
def wrap_file(environ, file, buffer_size=8192): """Wraps a file. This uses the WSGI server's file wrapper if available or otherwise the generic :class:`FileWrapper`. .. versionadded:: 0.5 If the file wrapper from the WSGI server is used it's important to not iterate over it from inside the application but to pass it through unchanged. If you want to pass out a file wrapper inside a response object you have to set :attr:`~BaseResponse.direct_passthrough` to `True`. More information about file wrappers are available in :pep:`333`. :param file: a :class:`file`-like object with a :meth:`~file.read` method. :param buffer_size: number of bytes for one iteration. """ return environ.get("wsgi.file_wrapper", FileWrapper)(file, buffer_size)
[ "def", "wrap_file", "(", "environ", ",", "file", ",", "buffer_size", "=", "8192", ")", ":", "return", "environ", ".", "get", "(", "\"wsgi.file_wrapper\"", ",", "FileWrapper", ")", "(", "file", ",", "buffer_size", ")" ]
[ 512, 0 ]
[ 528, 75 ]
python
en
['en', 'en', 'en']
True
_make_chunk_iter
(stream, limit, buffer_size)
Helper for the line and chunk iter functions.
Helper for the line and chunk iter functions.
def _make_chunk_iter(stream, limit, buffer_size): """Helper for the line and chunk iter functions.""" if isinstance(stream, (bytes, bytearray, text_type)): raise TypeError( "Passed a string or byte object instead of true iterator or stream." ) if not hasattr(stream, "read"): for item in stream: if item: yield item return if not isinstance(stream, LimitedStream) and limit is not None: stream = LimitedStream(stream, limit) _read = stream.read while 1: item = _read(buffer_size) if not item: break yield item
[ "def", "_make_chunk_iter", "(", "stream", ",", "limit", ",", "buffer_size", ")", ":", "if", "isinstance", "(", "stream", ",", "(", "bytes", ",", "bytearray", ",", "text_type", ")", ")", ":", "raise", "TypeError", "(", "\"Passed a string or byte object instead of true iterator or stream.\"", ")", "if", "not", "hasattr", "(", "stream", ",", "\"read\"", ")", ":", "for", "item", "in", "stream", ":", "if", "item", ":", "yield", "item", "return", "if", "not", "isinstance", "(", "stream", ",", "LimitedStream", ")", "and", "limit", "is", "not", "None", ":", "stream", "=", "LimitedStream", "(", "stream", ",", "limit", ")", "_read", "=", "stream", ".", "read", "while", "1", ":", "item", "=", "_read", "(", "buffer_size", ")", "if", "not", "item", ":", "break", "yield", "item" ]
[ 665, 0 ]
[ 683, 18 ]
python
en
['en', 'en', 'en']
True
make_line_iter
(stream, limit=None, buffer_size=10 * 1024, cap_at_buffer=False)
Safely iterates line-based over an input stream. If the input stream is not a :class:`LimitedStream` the `limit` parameter is mandatory. This uses the stream's :meth:`~file.read` method internally as opposite to the :meth:`~file.readline` method that is unsafe and can only be used in violation of the WSGI specification. The same problem applies to the `__iter__` function of the input stream which calls :meth:`~file.readline` without arguments. If you need line-by-line processing it's strongly recommended to iterate over the input stream using this helper function. .. versionchanged:: 0.8 This function now ensures that the limit was reached. .. versionadded:: 0.9 added support for iterators as input stream. .. versionadded:: 0.11.10 added support for the `cap_at_buffer` parameter. :param stream: the stream or iterate to iterate over. :param limit: the limit in bytes for the stream. (Usually content length. Not necessary if the `stream` is a :class:`LimitedStream`. :param buffer_size: The optional buffer size. :param cap_at_buffer: if this is set chunks are split if they are longer than the buffer size. Internally this is implemented that the buffer size might be exhausted by a factor of two however.
Safely iterates line-based over an input stream. If the input stream is not a :class:`LimitedStream` the `limit` parameter is mandatory.
def make_line_iter(stream, limit=None, buffer_size=10 * 1024, cap_at_buffer=False): """Safely iterates line-based over an input stream. If the input stream is not a :class:`LimitedStream` the `limit` parameter is mandatory. This uses the stream's :meth:`~file.read` method internally as opposite to the :meth:`~file.readline` method that is unsafe and can only be used in violation of the WSGI specification. The same problem applies to the `__iter__` function of the input stream which calls :meth:`~file.readline` without arguments. If you need line-by-line processing it's strongly recommended to iterate over the input stream using this helper function. .. versionchanged:: 0.8 This function now ensures that the limit was reached. .. versionadded:: 0.9 added support for iterators as input stream. .. versionadded:: 0.11.10 added support for the `cap_at_buffer` parameter. :param stream: the stream or iterate to iterate over. :param limit: the limit in bytes for the stream. (Usually content length. Not necessary if the `stream` is a :class:`LimitedStream`. :param buffer_size: The optional buffer size. :param cap_at_buffer: if this is set chunks are split if they are longer than the buffer size. Internally this is implemented that the buffer size might be exhausted by a factor of two however. """ _iter = _make_chunk_iter(stream, limit, buffer_size) first_item = next(_iter, "") if not first_item: return s = make_literal_wrapper(first_item) empty = s("") cr = s("\r") lf = s("\n") crlf = s("\r\n") _iter = chain((first_item,), _iter) def _iter_basic_lines(): _join = empty.join buffer = [] while 1: new_data = next(_iter, "") if not new_data: break new_buf = [] buf_size = 0 for item in chain(buffer, new_data.splitlines(True)): new_buf.append(item) buf_size += len(item) if item and item[-1:] in crlf: yield _join(new_buf) new_buf = [] elif cap_at_buffer and buf_size >= buffer_size: rv = _join(new_buf) while len(rv) >= buffer_size: yield rv[:buffer_size] rv = rv[buffer_size:] new_buf = [rv] buffer = new_buf if buffer: yield _join(buffer) # This hackery is necessary to merge 'foo\r' and '\n' into one item # of 'foo\r\n' if we were unlucky and we hit a chunk boundary. previous = empty for item in _iter_basic_lines(): if item == lf and previous[-1:] == cr: previous += item item = empty if previous: yield previous previous = item if previous: yield previous
[ "def", "make_line_iter", "(", "stream", ",", "limit", "=", "None", ",", "buffer_size", "=", "10", "*", "1024", ",", "cap_at_buffer", "=", "False", ")", ":", "_iter", "=", "_make_chunk_iter", "(", "stream", ",", "limit", ",", "buffer_size", ")", "first_item", "=", "next", "(", "_iter", ",", "\"\"", ")", "if", "not", "first_item", ":", "return", "s", "=", "make_literal_wrapper", "(", "first_item", ")", "empty", "=", "s", "(", "\"\"", ")", "cr", "=", "s", "(", "\"\\r\"", ")", "lf", "=", "s", "(", "\"\\n\"", ")", "crlf", "=", "s", "(", "\"\\r\\n\"", ")", "_iter", "=", "chain", "(", "(", "first_item", ",", ")", ",", "_iter", ")", "def", "_iter_basic_lines", "(", ")", ":", "_join", "=", "empty", ".", "join", "buffer", "=", "[", "]", "while", "1", ":", "new_data", "=", "next", "(", "_iter", ",", "\"\"", ")", "if", "not", "new_data", ":", "break", "new_buf", "=", "[", "]", "buf_size", "=", "0", "for", "item", "in", "chain", "(", "buffer", ",", "new_data", ".", "splitlines", "(", "True", ")", ")", ":", "new_buf", ".", "append", "(", "item", ")", "buf_size", "+=", "len", "(", "item", ")", "if", "item", "and", "item", "[", "-", "1", ":", "]", "in", "crlf", ":", "yield", "_join", "(", "new_buf", ")", "new_buf", "=", "[", "]", "elif", "cap_at_buffer", "and", "buf_size", ">=", "buffer_size", ":", "rv", "=", "_join", "(", "new_buf", ")", "while", "len", "(", "rv", ")", ">=", "buffer_size", ":", "yield", "rv", "[", ":", "buffer_size", "]", "rv", "=", "rv", "[", "buffer_size", ":", "]", "new_buf", "=", "[", "rv", "]", "buffer", "=", "new_buf", "if", "buffer", ":", "yield", "_join", "(", "buffer", ")", "# This hackery is necessary to merge 'foo\\r' and '\\n' into one item", "# of 'foo\\r\\n' if we were unlucky and we hit a chunk boundary.", "previous", "=", "empty", "for", "item", "in", "_iter_basic_lines", "(", ")", ":", "if", "item", "==", "lf", "and", "previous", "[", "-", "1", ":", "]", "==", "cr", ":", "previous", "+=", "item", "item", "=", "empty", "if", "previous", ":", "yield", "previous", "previous", "=", "item", "if", "previous", ":", "yield", "previous" ]
[ 686, 0 ]
[ 768, 22 ]
python
en
['en', 'en', 'en']
True
make_chunk_iter
( stream, separator, limit=None, buffer_size=10 * 1024, cap_at_buffer=False )
Works like :func:`make_line_iter` but accepts a separator which divides chunks. If you want newline based processing you should use :func:`make_line_iter` instead as it supports arbitrary newline markers. .. versionadded:: 0.8 .. versionadded:: 0.9 added support for iterators as input stream. .. versionadded:: 0.11.10 added support for the `cap_at_buffer` parameter. :param stream: the stream or iterate to iterate over. :param separator: the separator that divides chunks. :param limit: the limit in bytes for the stream. (Usually content length. Not necessary if the `stream` is otherwise already limited). :param buffer_size: The optional buffer size. :param cap_at_buffer: if this is set chunks are split if they are longer than the buffer size. Internally this is implemented that the buffer size might be exhausted by a factor of two however.
Works like :func:`make_line_iter` but accepts a separator which divides chunks. If you want newline based processing you should use :func:`make_line_iter` instead as it supports arbitrary newline markers.
def make_chunk_iter( stream, separator, limit=None, buffer_size=10 * 1024, cap_at_buffer=False ): """Works like :func:`make_line_iter` but accepts a separator which divides chunks. If you want newline based processing you should use :func:`make_line_iter` instead as it supports arbitrary newline markers. .. versionadded:: 0.8 .. versionadded:: 0.9 added support for iterators as input stream. .. versionadded:: 0.11.10 added support for the `cap_at_buffer` parameter. :param stream: the stream or iterate to iterate over. :param separator: the separator that divides chunks. :param limit: the limit in bytes for the stream. (Usually content length. Not necessary if the `stream` is otherwise already limited). :param buffer_size: The optional buffer size. :param cap_at_buffer: if this is set chunks are split if they are longer than the buffer size. Internally this is implemented that the buffer size might be exhausted by a factor of two however. """ _iter = _make_chunk_iter(stream, limit, buffer_size) first_item = next(_iter, "") if not first_item: return _iter = chain((first_item,), _iter) if isinstance(first_item, text_type): separator = to_unicode(separator) _split = re.compile(r"(%s)" % re.escape(separator)).split _join = u"".join else: separator = to_bytes(separator) _split = re.compile(b"(" + re.escape(separator) + b")").split _join = b"".join buffer = [] while 1: new_data = next(_iter, "") if not new_data: break chunks = _split(new_data) new_buf = [] buf_size = 0 for item in chain(buffer, chunks): if item == separator: yield _join(new_buf) new_buf = [] buf_size = 0 else: buf_size += len(item) new_buf.append(item) if cap_at_buffer and buf_size >= buffer_size: rv = _join(new_buf) while len(rv) >= buffer_size: yield rv[:buffer_size] rv = rv[buffer_size:] new_buf = [rv] buf_size = len(rv) buffer = new_buf if buffer: yield _join(buffer)
[ "def", "make_chunk_iter", "(", "stream", ",", "separator", ",", "limit", "=", "None", ",", "buffer_size", "=", "10", "*", "1024", ",", "cap_at_buffer", "=", "False", ")", ":", "_iter", "=", "_make_chunk_iter", "(", "stream", ",", "limit", ",", "buffer_size", ")", "first_item", "=", "next", "(", "_iter", ",", "\"\"", ")", "if", "not", "first_item", ":", "return", "_iter", "=", "chain", "(", "(", "first_item", ",", ")", ",", "_iter", ")", "if", "isinstance", "(", "first_item", ",", "text_type", ")", ":", "separator", "=", "to_unicode", "(", "separator", ")", "_split", "=", "re", ".", "compile", "(", "r\"(%s)\"", "%", "re", ".", "escape", "(", "separator", ")", ")", ".", "split", "_join", "=", "u\"\"", ".", "join", "else", ":", "separator", "=", "to_bytes", "(", "separator", ")", "_split", "=", "re", ".", "compile", "(", "b\"(\"", "+", "re", ".", "escape", "(", "separator", ")", "+", "b\")\"", ")", ".", "split", "_join", "=", "b\"\"", ".", "join", "buffer", "=", "[", "]", "while", "1", ":", "new_data", "=", "next", "(", "_iter", ",", "\"\"", ")", "if", "not", "new_data", ":", "break", "chunks", "=", "_split", "(", "new_data", ")", "new_buf", "=", "[", "]", "buf_size", "=", "0", "for", "item", "in", "chain", "(", "buffer", ",", "chunks", ")", ":", "if", "item", "==", "separator", ":", "yield", "_join", "(", "new_buf", ")", "new_buf", "=", "[", "]", "buf_size", "=", "0", "else", ":", "buf_size", "+=", "len", "(", "item", ")", "new_buf", ".", "append", "(", "item", ")", "if", "cap_at_buffer", "and", "buf_size", ">=", "buffer_size", ":", "rv", "=", "_join", "(", "new_buf", ")", "while", "len", "(", "rv", ")", ">=", "buffer_size", ":", "yield", "rv", "[", ":", "buffer_size", "]", "rv", "=", "rv", "[", "buffer_size", ":", "]", "new_buf", "=", "[", "rv", "]", "buf_size", "=", "len", "(", "rv", ")", "buffer", "=", "new_buf", "if", "buffer", ":", "yield", "_join", "(", "buffer", ")" ]
[ 771, 0 ]
[ 841, 27 ]
python
en
['en', 'en', 'en']
True
find_undeclared_variables
(ast)
Returns a set of all variables in the AST that will be looked up from the context at runtime. Because at compile time it's not known which variables will be used depending on the path the execution takes at runtime, all variables are returned. >>> from jinja2 import Environment, meta >>> env = Environment() >>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}') >>> meta.find_undeclared_variables(ast) == set(['bar']) True .. admonition:: Implementation Internally the code generator is used for finding undeclared variables. This is good to know because the code generator might raise a :exc:`TemplateAssertionError` during compilation and as a matter of fact this function can currently raise that exception as well.
Returns a set of all variables in the AST that will be looked up from the context at runtime. Because at compile time it's not known which variables will be used depending on the path the execution takes at runtime, all variables are returned.
def find_undeclared_variables(ast): """Returns a set of all variables in the AST that will be looked up from the context at runtime. Because at compile time it's not known which variables will be used depending on the path the execution takes at runtime, all variables are returned. >>> from jinja2 import Environment, meta >>> env = Environment() >>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}') >>> meta.find_undeclared_variables(ast) == set(['bar']) True .. admonition:: Implementation Internally the code generator is used for finding undeclared variables. This is good to know because the code generator might raise a :exc:`TemplateAssertionError` during compilation and as a matter of fact this function can currently raise that exception as well. """ codegen = TrackingCodeGenerator(ast.environment) codegen.visit(ast) return codegen.undeclared_identifiers
[ "def", "find_undeclared_variables", "(", "ast", ")", ":", "codegen", "=", "TrackingCodeGenerator", "(", "ast", ".", "environment", ")", "codegen", ".", "visit", "(", "ast", ")", "return", "codegen", ".", "undeclared_identifiers" ]
[ 35, 0 ]
[ 56, 41 ]
python
en
['en', 'en', 'en']
True
find_referenced_templates
(ast)
Finds all the referenced templates from the AST. This will return an iterator over all the hardcoded template extensions, inclusions and imports. If dynamic inheritance or inclusion is used, `None` will be yielded. >>> from jinja2 import Environment, meta >>> env = Environment() >>> ast = env.parse('{% extends "layout.html" %}{% include helper %}') >>> list(meta.find_referenced_templates(ast)) ['layout.html', None] This function is useful for dependency tracking. For example if you want to rebuild parts of the website after a layout template has changed.
Finds all the referenced templates from the AST. This will return an iterator over all the hardcoded template extensions, inclusions and imports. If dynamic inheritance or inclusion is used, `None` will be yielded.
def find_referenced_templates(ast): """Finds all the referenced templates from the AST. This will return an iterator over all the hardcoded template extensions, inclusions and imports. If dynamic inheritance or inclusion is used, `None` will be yielded. >>> from jinja2 import Environment, meta >>> env = Environment() >>> ast = env.parse('{% extends "layout.html" %}{% include helper %}') >>> list(meta.find_referenced_templates(ast)) ['layout.html', None] This function is useful for dependency tracking. For example if you want to rebuild parts of the website after a layout template has changed. """ for node in ast.find_all((nodes.Extends, nodes.FromImport, nodes.Import, nodes.Include)): if not isinstance(node.template, nodes.Const): # a tuple with some non consts in there if isinstance(node.template, (nodes.Tuple, nodes.List)): for template_name in node.template.items: # something const, only yield the strings and ignore # non-string consts that really just make no sense if isinstance(template_name, nodes.Const): if isinstance(template_name.value, string_types): yield template_name.value # something dynamic in there else: yield None # something dynamic we don't know about here else: yield None continue # constant is a basestring, direct template name if isinstance(node.template.value, string_types): yield node.template.value # a tuple or list (latter *should* not happen) made of consts, # yield the consts that are strings. We could warn here for # non string values elif isinstance(node, nodes.Include) and \ isinstance(node.template.value, (tuple, list)): for template_name in node.template.value: if isinstance(template_name, string_types): yield template_name # something else we don't care about, we could warn here else: yield None
[ "def", "find_referenced_templates", "(", "ast", ")", ":", "for", "node", "in", "ast", ".", "find_all", "(", "(", "nodes", ".", "Extends", ",", "nodes", ".", "FromImport", ",", "nodes", ".", "Import", ",", "nodes", ".", "Include", ")", ")", ":", "if", "not", "isinstance", "(", "node", ".", "template", ",", "nodes", ".", "Const", ")", ":", "# a tuple with some non consts in there", "if", "isinstance", "(", "node", ".", "template", ",", "(", "nodes", ".", "Tuple", ",", "nodes", ".", "List", ")", ")", ":", "for", "template_name", "in", "node", ".", "template", ".", "items", ":", "# something const, only yield the strings and ignore", "# non-string consts that really just make no sense", "if", "isinstance", "(", "template_name", ",", "nodes", ".", "Const", ")", ":", "if", "isinstance", "(", "template_name", ".", "value", ",", "string_types", ")", ":", "yield", "template_name", ".", "value", "# something dynamic in there", "else", ":", "yield", "None", "# something dynamic we don't know about here", "else", ":", "yield", "None", "continue", "# constant is a basestring, direct template name", "if", "isinstance", "(", "node", ".", "template", ".", "value", ",", "string_types", ")", ":", "yield", "node", ".", "template", ".", "value", "# a tuple or list (latter *should* not happen) made of consts,", "# yield the consts that are strings. We could warn here for", "# non string values", "elif", "isinstance", "(", "node", ",", "nodes", ".", "Include", ")", "and", "isinstance", "(", "node", ".", "template", ".", "value", ",", "(", "tuple", ",", "list", ")", ")", ":", "for", "template_name", "in", "node", ".", "template", ".", "value", ":", "if", "isinstance", "(", "template_name", ",", "string_types", ")", ":", "yield", "template_name", "# something else we don't care about, we could warn here", "else", ":", "yield", "None" ]
[ 59, 0 ]
[ 105, 22 ]
python
en
['en', 'en', 'en']
True
TrackingCodeGenerator.write
(self, x)
Don't write.
Don't write.
def write(self, x): """Don't write."""
[ "def", "write", "(", "self", ",", "x", ")", ":" ]
[ 24, 4 ]
[ 25, 26 ]
python
en
['en', 'ht', 'en']
False
TrackingCodeGenerator.enter_frame
(self, frame)
Remember all undeclared identifiers.
Remember all undeclared identifiers.
def enter_frame(self, frame): """Remember all undeclared identifiers.""" CodeGenerator.enter_frame(self, frame) for _, (action, param) in iteritems(frame.symbols.loads): if action == 'resolve': self.undeclared_identifiers.add(param)
[ "def", "enter_frame", "(", "self", ",", "frame", ")", ":", "CodeGenerator", ".", "enter_frame", "(", "self", ",", "frame", ")", "for", "_", ",", "(", "action", ",", "param", ")", "in", "iteritems", "(", "frame", ".", "symbols", ".", "loads", ")", ":", "if", "action", "==", "'resolve'", ":", "self", ".", "undeclared_identifiers", ".", "add", "(", "param", ")" ]
[ 27, 4 ]
[ 32, 54 ]
python
en
['en', 'en', 'en']
True
PSDraw.begin_document
(self, id=None)
Set up printing of a document. (Write PostScript DSC header.)
Set up printing of a document. (Write PostScript DSC header.)
def begin_document(self, id=None): """Set up printing of a document. (Write PostScript DSC header.)""" # FIXME: incomplete self.fp.write( b"%!PS-Adobe-3.0\n" b"save\n" b"/showpage { } def\n" b"%%EndComments\n" b"%%BeginDocument\n" ) # self.fp.write(ERROR_PS) # debugging! self.fp.write(EDROFF_PS) self.fp.write(VDI_PS) self.fp.write(b"%%EndProlog\n") self.isofont = {}
[ "def", "begin_document", "(", "self", ",", "id", "=", "None", ")", ":", "# FIXME: incomplete", "self", ".", "fp", ".", "write", "(", "b\"%!PS-Adobe-3.0\\n\"", "b\"save\\n\"", "b\"/showpage { } def\\n\"", "b\"%%EndComments\\n\"", "b\"%%BeginDocument\\n\"", ")", "# self.fp.write(ERROR_PS) # debugging!", "self", ".", "fp", ".", "write", "(", "EDROFF_PS", ")", "self", ".", "fp", ".", "write", "(", "VDI_PS", ")", "self", ".", "fp", ".", "write", "(", "b\"%%EndProlog\\n\"", ")", "self", ".", "isofont", "=", "{", "}" ]
[ 39, 4 ]
[ 53, 25 ]
python
en
['en', 'lb', 'en']
True
PSDraw.end_document
(self)
Ends printing. (Write PostScript DSC footer.)
Ends printing. (Write PostScript DSC footer.)
def end_document(self): """Ends printing. (Write PostScript DSC footer.)""" self.fp.write(b"%%EndDocument\nrestore showpage\n%%End\n") if hasattr(self.fp, "flush"): self.fp.flush()
[ "def", "end_document", "(", "self", ")", ":", "self", ".", "fp", ".", "write", "(", "b\"%%EndDocument\\nrestore showpage\\n%%End\\n\"", ")", "if", "hasattr", "(", "self", ".", "fp", ",", "\"flush\"", ")", ":", "self", ".", "fp", ".", "flush", "(", ")" ]
[ 55, 4 ]
[ 59, 27 ]
python
en
['en', 'en', 'en']
True
PSDraw.setfont
(self, font, size)
Selects which font to use. :param font: A PostScript font name :param size: Size in points.
Selects which font to use.
def setfont(self, font, size): """ Selects which font to use. :param font: A PostScript font name :param size: Size in points. """ font = bytes(font, "UTF-8") if font not in self.isofont: # reencode font self.fp.write(b"/PSDraw-%s ISOLatin1Encoding /%s E\n" % (font, font)) self.isofont[font] = 1 # rough self.fp.write(b"/F0 %d /PSDraw-%s F\n" % (size, font))
[ "def", "setfont", "(", "self", ",", "font", ",", "size", ")", ":", "font", "=", "bytes", "(", "font", ",", "\"UTF-8\"", ")", "if", "font", "not", "in", "self", ".", "isofont", ":", "# reencode font", "self", ".", "fp", ".", "write", "(", "b\"/PSDraw-%s ISOLatin1Encoding /%s E\\n\"", "%", "(", "font", ",", "font", ")", ")", "self", ".", "isofont", "[", "font", "]", "=", "1", "# rough", "self", ".", "fp", ".", "write", "(", "b\"/F0 %d /PSDraw-%s F\\n\"", "%", "(", "size", ",", "font", ")", ")" ]
[ 61, 4 ]
[ 74, 62 ]
python
en
['en', 'error', 'th']
False
PSDraw.line
(self, xy0, xy1)
Draws a line between the two points. Coordinates are given in PostScript point coordinates (72 points per inch, (0, 0) is the lower left corner of the page).
Draws a line between the two points. Coordinates are given in PostScript point coordinates (72 points per inch, (0, 0) is the lower left corner of the page).
def line(self, xy0, xy1): """ Draws a line between the two points. Coordinates are given in PostScript point coordinates (72 points per inch, (0, 0) is the lower left corner of the page). """ self.fp.write(b"%d %d %d %d Vl\n" % (*xy0, *xy1))
[ "def", "line", "(", "self", ",", "xy0", ",", "xy1", ")", ":", "self", ".", "fp", ".", "write", "(", "b\"%d %d %d %d Vl\\n\"", "%", "(", "*", "xy0", ",", "*", "xy1", ")", ")" ]
[ 76, 4 ]
[ 82, 57 ]
python
en
['en', 'error', 'th']
False
PSDraw.rectangle
(self, box)
Draws a rectangle. :param box: A 4-tuple of integers whose order and function is currently undocumented. Hint: the tuple is passed into this format string: .. code-block:: python %d %d M %d %d 0 Vr\n
Draws a rectangle.
def rectangle(self, box): """ Draws a rectangle. :param box: A 4-tuple of integers whose order and function is currently undocumented. Hint: the tuple is passed into this format string: .. code-block:: python %d %d M %d %d 0 Vr\n """ self.fp.write(b"%d %d M %d %d 0 Vr\n" % box)
[ "def", "rectangle", "(", "self", ",", "box", ")", ":", "self", ".", "fp", ".", "write", "(", "b\"%d %d M %d %d 0 Vr\\n\"", "%", "box", ")" ]
[ 84, 4 ]
[ 97, 52 ]
python
en
['en', 'error', 'th']
False
PSDraw.text
(self, xy, text)
Draws text at the given position. You must use :py:meth:`~PIL.PSDraw.PSDraw.setfont` before calling this method.
Draws text at the given position. You must use :py:meth:`~PIL.PSDraw.PSDraw.setfont` before calling this method.
def text(self, xy, text): """ Draws text at the given position. You must use :py:meth:`~PIL.PSDraw.PSDraw.setfont` before calling this method. """ text = bytes(text, "UTF-8") text = b"\\(".join(text.split(b"(")) text = b"\\)".join(text.split(b")")) xy += (text,) self.fp.write(b"%d %d M (%s) S\n" % xy)
[ "def", "text", "(", "self", ",", "xy", ",", "text", ")", ":", "text", "=", "bytes", "(", "text", ",", "\"UTF-8\"", ")", "text", "=", "b\"\\\\(\"", ".", "join", "(", "text", ".", "split", "(", "b\"(\"", ")", ")", "text", "=", "b\"\\\\)\"", ".", "join", "(", "text", ".", "split", "(", "b\")\"", ")", ")", "xy", "+=", "(", "text", ",", ")", "self", ".", "fp", ".", "write", "(", "b\"%d %d M (%s) S\\n\"", "%", "xy", ")" ]
[ 99, 4 ]
[ 108, 47 ]
python
en
['en', 'error', 'th']
False
PSDraw.image
(self, box, im, dpi=None)
Draw a PIL image, centered in the given box.
Draw a PIL image, centered in the given box.
def image(self, box, im, dpi=None): """Draw a PIL image, centered in the given box.""" # default resolution depends on mode if not dpi: if im.mode == "1": dpi = 200 # fax else: dpi = 100 # greyscale # image size (on paper) x = im.size[0] * 72 / dpi y = im.size[1] * 72 / dpi # max allowed size xmax = float(box[2] - box[0]) ymax = float(box[3] - box[1]) if x > xmax: y = y * xmax / x x = xmax if y > ymax: x = x * ymax / y y = ymax dx = (xmax - x) / 2 + box[0] dy = (ymax - y) / 2 + box[1] self.fp.write(b"gsave\n%f %f translate\n" % (dx, dy)) if (x, y) != im.size: # EpsImagePlugin._save prints the image at (0,0,xsize,ysize) sx = x / im.size[0] sy = y / im.size[1] self.fp.write(b"%f %f scale\n" % (sx, sy)) EpsImagePlugin._save(im, self.fp, None, 0) self.fp.write(b"\ngrestore\n")
[ "def", "image", "(", "self", ",", "box", ",", "im", ",", "dpi", "=", "None", ")", ":", "# default resolution depends on mode", "if", "not", "dpi", ":", "if", "im", ".", "mode", "==", "\"1\"", ":", "dpi", "=", "200", "# fax", "else", ":", "dpi", "=", "100", "# greyscale", "# image size (on paper)", "x", "=", "im", ".", "size", "[", "0", "]", "*", "72", "/", "dpi", "y", "=", "im", ".", "size", "[", "1", "]", "*", "72", "/", "dpi", "# max allowed size", "xmax", "=", "float", "(", "box", "[", "2", "]", "-", "box", "[", "0", "]", ")", "ymax", "=", "float", "(", "box", "[", "3", "]", "-", "box", "[", "1", "]", ")", "if", "x", ">", "xmax", ":", "y", "=", "y", "*", "xmax", "/", "x", "x", "=", "xmax", "if", "y", ">", "ymax", ":", "x", "=", "x", "*", "ymax", "/", "y", "y", "=", "ymax", "dx", "=", "(", "xmax", "-", "x", ")", "/", "2", "+", "box", "[", "0", "]", "dy", "=", "(", "ymax", "-", "y", ")", "/", "2", "+", "box", "[", "1", "]", "self", ".", "fp", ".", "write", "(", "b\"gsave\\n%f %f translate\\n\"", "%", "(", "dx", ",", "dy", ")", ")", "if", "(", "x", ",", "y", ")", "!=", "im", ".", "size", ":", "# EpsImagePlugin._save prints the image at (0,0,xsize,ysize)", "sx", "=", "x", "/", "im", ".", "size", "[", "0", "]", "sy", "=", "y", "/", "im", ".", "size", "[", "1", "]", "self", ".", "fp", ".", "write", "(", "b\"%f %f scale\\n\"", "%", "(", "sx", ",", "sy", ")", ")", "EpsImagePlugin", ".", "_save", "(", "im", ",", "self", ".", "fp", ",", "None", ",", "0", ")", "self", ".", "fp", ".", "write", "(", "b\"\\ngrestore\\n\"", ")" ]
[ 110, 4 ]
[ 139, 38 ]
python
en
['en', 'en', 'en']
True
_implementation
()
Return a dict with the Python implementation and version. Provide both the name and the version of the Python implementation currently running. For example, on CPython 2.7.5 it will return {'name': 'CPython', 'version': '2.7.5'}. This function works best on CPython and PyPy: in particular, it probably doesn't work for Jython or IronPython. Future investigation should be done to work out the correct shape of the code for those platforms.
Return a dict with the Python implementation and version.
def _implementation(): """Return a dict with the Python implementation and version. Provide both the name and the version of the Python implementation currently running. For example, on CPython 2.7.5 it will return {'name': 'CPython', 'version': '2.7.5'}. This function works best on CPython and PyPy: in particular, it probably doesn't work for Jython or IronPython. Future investigation should be done to work out the correct shape of the code for those platforms. """ implementation = platform.python_implementation() if implementation == 'CPython': implementation_version = platform.python_version() elif implementation == 'PyPy': implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major, sys.pypy_version_info.minor, sys.pypy_version_info.micro) if sys.pypy_version_info.releaselevel != 'final': implementation_version = ''.join([ implementation_version, sys.pypy_version_info.releaselevel ]) elif implementation == 'Jython': implementation_version = platform.python_version() # Complete Guess elif implementation == 'IronPython': implementation_version = platform.python_version() # Complete Guess else: implementation_version = 'Unknown' return {'name': implementation, 'version': implementation_version}
[ "def", "_implementation", "(", ")", ":", "implementation", "=", "platform", ".", "python_implementation", "(", ")", "if", "implementation", "==", "'CPython'", ":", "implementation_version", "=", "platform", ".", "python_version", "(", ")", "elif", "implementation", "==", "'PyPy'", ":", "implementation_version", "=", "'%s.%s.%s'", "%", "(", "sys", ".", "pypy_version_info", ".", "major", ",", "sys", ".", "pypy_version_info", ".", "minor", ",", "sys", ".", "pypy_version_info", ".", "micro", ")", "if", "sys", ".", "pypy_version_info", ".", "releaselevel", "!=", "'final'", ":", "implementation_version", "=", "''", ".", "join", "(", "[", "implementation_version", ",", "sys", ".", "pypy_version_info", ".", "releaselevel", "]", ")", "elif", "implementation", "==", "'Jython'", ":", "implementation_version", "=", "platform", ".", "python_version", "(", ")", "# Complete Guess", "elif", "implementation", "==", "'IronPython'", ":", "implementation_version", "=", "platform", ".", "python_version", "(", ")", "# Complete Guess", "else", ":", "implementation_version", "=", "'Unknown'", "return", "{", "'name'", ":", "implementation", ",", "'version'", ":", "implementation_version", "}" ]
[ 31, 0 ]
[ 61, 70 ]
python
en
['en', 'en', 'en']
True
info
()
Generate information for a bug report.
Generate information for a bug report.
def info(): """Generate information for a bug report.""" try: platform_info = { 'system': platform.system(), 'release': platform.release(), } except IOError: platform_info = { 'system': 'Unknown', 'release': 'Unknown', } implementation_info = _implementation() urllib3_info = {'version': urllib3.__version__} charset_normalizer_info = {'version': None} chardet_info = {'version': None} if charset_normalizer: charset_normalizer_info = {'version': charset_normalizer.__version__} if chardet: chardet_info = {'version': chardet.__version__} pyopenssl_info = { 'version': None, 'openssl_version': '', } if OpenSSL: pyopenssl_info = { 'version': OpenSSL.__version__, 'openssl_version': '%x' % OpenSSL.SSL.OPENSSL_VERSION_NUMBER, } cryptography_info = { 'version': getattr(cryptography, '__version__', ''), } idna_info = { 'version': getattr(idna, '__version__', ''), } system_ssl = ssl.OPENSSL_VERSION_NUMBER system_ssl_info = { 'version': '%x' % system_ssl if system_ssl is not None else '' } return { 'platform': platform_info, 'implementation': implementation_info, 'system_ssl': system_ssl_info, 'using_pyopenssl': pyopenssl is not None, 'using_charset_normalizer': chardet is None, 'pyOpenSSL': pyopenssl_info, 'urllib3': urllib3_info, 'chardet': chardet_info, 'charset_normalizer': charset_normalizer_info, 'cryptography': cryptography_info, 'idna': idna_info, 'requests': { 'version': requests_version, }, }
[ "def", "info", "(", ")", ":", "try", ":", "platform_info", "=", "{", "'system'", ":", "platform", ".", "system", "(", ")", ",", "'release'", ":", "platform", ".", "release", "(", ")", ",", "}", "except", "IOError", ":", "platform_info", "=", "{", "'system'", ":", "'Unknown'", ",", "'release'", ":", "'Unknown'", ",", "}", "implementation_info", "=", "_implementation", "(", ")", "urllib3_info", "=", "{", "'version'", ":", "urllib3", ".", "__version__", "}", "charset_normalizer_info", "=", "{", "'version'", ":", "None", "}", "chardet_info", "=", "{", "'version'", ":", "None", "}", "if", "charset_normalizer", ":", "charset_normalizer_info", "=", "{", "'version'", ":", "charset_normalizer", ".", "__version__", "}", "if", "chardet", ":", "chardet_info", "=", "{", "'version'", ":", "chardet", ".", "__version__", "}", "pyopenssl_info", "=", "{", "'version'", ":", "None", ",", "'openssl_version'", ":", "''", ",", "}", "if", "OpenSSL", ":", "pyopenssl_info", "=", "{", "'version'", ":", "OpenSSL", ".", "__version__", ",", "'openssl_version'", ":", "'%x'", "%", "OpenSSL", ".", "SSL", ".", "OPENSSL_VERSION_NUMBER", ",", "}", "cryptography_info", "=", "{", "'version'", ":", "getattr", "(", "cryptography", ",", "'__version__'", ",", "''", ")", ",", "}", "idna_info", "=", "{", "'version'", ":", "getattr", "(", "idna", ",", "'__version__'", ",", "''", ")", ",", "}", "system_ssl", "=", "ssl", ".", "OPENSSL_VERSION_NUMBER", "system_ssl_info", "=", "{", "'version'", ":", "'%x'", "%", "system_ssl", "if", "system_ssl", "is", "not", "None", "else", "''", "}", "return", "{", "'platform'", ":", "platform_info", ",", "'implementation'", ":", "implementation_info", ",", "'system_ssl'", ":", "system_ssl_info", ",", "'using_pyopenssl'", ":", "pyopenssl", "is", "not", "None", ",", "'using_charset_normalizer'", ":", "chardet", "is", "None", ",", "'pyOpenSSL'", ":", "pyopenssl_info", ",", "'urllib3'", ":", "urllib3_info", ",", "'chardet'", ":", "chardet_info", ",", "'charset_normalizer'", ":", "charset_normalizer_info", ",", "'cryptography'", ":", "cryptography_info", ",", "'idna'", ":", "idna_info", ",", "'requests'", ":", "{", "'version'", ":", "requests_version", ",", "}", ",", "}" ]
[ 64, 0 ]
[ 122, 5 ]
python
en
['en', 'en', 'en']
True
main
()
Pretty-print the bug information as JSON.
Pretty-print the bug information as JSON.
def main(): """Pretty-print the bug information as JSON.""" print(json.dumps(info(), sort_keys=True, indent=2))
[ "def", "main", "(", ")", ":", "print", "(", "json", ".", "dumps", "(", "info", "(", ")", ",", "sort_keys", "=", "True", ",", "indent", "=", "2", ")", ")" ]
[ 125, 0 ]
[ 127, 55 ]
python
en
['en', 'en', 'en']
True
get_dataset_filter
(expr: Expression, expected_to_file_map: dict)
Given an Iceberg Expression and a mapping of names in the iceberg schema to the file schema, convert to an equivalent dataset filter using the file column names. Recursively iterate through the expressions to convert each portion one predicate at a time Parameters ---------- expr : iceberg.api.expressions.Expression An Iceberg Expression to be converted expected_to_file_map : dict A dict that maps the iceberg schema names to the names from the file schema Returns ------- pyarrow._dataset.Expression An equivalent dataset expression
Given an Iceberg Expression and a mapping of names in the iceberg schema to the file schema, convert to an equivalent dataset filter using the file column names. Recursively iterate through the expressions to convert each portion one predicate at a time
def get_dataset_filter(expr: Expression, expected_to_file_map: dict) -> ds.Expression: """ Given an Iceberg Expression and a mapping of names in the iceberg schema to the file schema, convert to an equivalent dataset filter using the file column names. Recursively iterate through the expressions to convert each portion one predicate at a time Parameters ---------- expr : iceberg.api.expressions.Expression An Iceberg Expression to be converted expected_to_file_map : dict A dict that maps the iceberg schema names to the names from the file schema Returns ------- pyarrow._dataset.Expression An equivalent dataset expression """ if expr is None: return None if isinstance(expr, Predicate): return predicate(expr, expected_to_file_map) if expr.op() == Operation.TRUE: return None elif expr.op() == Operation.FALSE: return False elif expr.op() == Operation.NOT: return not_(get_dataset_filter(expr.child, expected_to_file_map)) elif expr.op() == Operation.AND: return and_(get_dataset_filter(expr.left, expected_to_file_map), get_dataset_filter(expr.right, expected_to_file_map)) elif expr.op() == Operation.OR: return or_(get_dataset_filter(expr.left, expected_to_file_map), get_dataset_filter(expr.right, expected_to_file_map)) else: raise RuntimeError("Unknown operation: {}".format(expr.op()))
[ "def", "get_dataset_filter", "(", "expr", ":", "Expression", ",", "expected_to_file_map", ":", "dict", ")", "->", "ds", ".", "Expression", ":", "if", "expr", "is", "None", ":", "return", "None", "if", "isinstance", "(", "expr", ",", "Predicate", ")", ":", "return", "predicate", "(", "expr", ",", "expected_to_file_map", ")", "if", "expr", ".", "op", "(", ")", "==", "Operation", ".", "TRUE", ":", "return", "None", "elif", "expr", ".", "op", "(", ")", "==", "Operation", ".", "FALSE", ":", "return", "False", "elif", "expr", ".", "op", "(", ")", "==", "Operation", ".", "NOT", ":", "return", "not_", "(", "get_dataset_filter", "(", "expr", ".", "child", ",", "expected_to_file_map", ")", ")", "elif", "expr", ".", "op", "(", ")", "==", "Operation", ".", "AND", ":", "return", "and_", "(", "get_dataset_filter", "(", "expr", ".", "left", ",", "expected_to_file_map", ")", ",", "get_dataset_filter", "(", "expr", ".", "right", ",", "expected_to_file_map", ")", ")", "elif", "expr", ".", "op", "(", ")", "==", "Operation", ".", "OR", ":", "return", "or_", "(", "get_dataset_filter", "(", "expr", ".", "left", ",", "expected_to_file_map", ")", ",", "get_dataset_filter", "(", "expr", ".", "right", ",", "expected_to_file_map", ")", ")", "else", ":", "raise", "RuntimeError", "(", "\"Unknown operation: {}\"", ".", "format", "(", "expr", ".", "op", "(", ")", ")", ")" ]
[ 22, 0 ]
[ 58, 69 ]
python
en
['en', 'error', 'th']
False
predicate
(pred: Predicate, field_map: dict)
Given an Iceberg Predicate and a mapping of names in the iceberg schema to the file schema, convert to an equivalent dataset expression using the file column names. Parameters ---------- pred : iceberg.api.expressions.Predicate An Iceberg Predicate to be converted field_map : dict A dict that maps the iceberg schema names to the names from the file schema Returns ------- pyarrow._dataset.Expression An equivalent dataset expression
Given an Iceberg Predicate and a mapping of names in the iceberg schema to the file schema, convert to an equivalent dataset expression using the file column names.
def predicate(pred: Predicate, field_map: dict) -> ds.Expression: # noqa: ignore=C901 """ Given an Iceberg Predicate and a mapping of names in the iceberg schema to the file schema, convert to an equivalent dataset expression using the file column names. Parameters ---------- pred : iceberg.api.expressions.Predicate An Iceberg Predicate to be converted field_map : dict A dict that maps the iceberg schema names to the names from the file schema Returns ------- pyarrow._dataset.Expression An equivalent dataset expression """ # get column name in the file schema so we can apply the predicate col_name = field_map.get(pred.ref.name) if col_name is None: if pred.op == Operation.IS_NULL: return ds.scalar(True) == ds.scalar(True) return ds.scalar(True) == ds.scalar(False) if pred.op == Operation.IS_NULL: return ~ds.field(col_name).is_valid() elif pred.op == Operation.NOT_NULL: return ds.field(col_name).is_valid() elif pred.op == Operation.LT: return ds.field(col_name) < pred.lit.value elif pred.op == Operation.LT_EQ: return ds.field(col_name) <= pred.lit.value elif pred.op == Operation.GT: return ds.field(col_name) > pred.lit.value elif pred.op == Operation.GT_EQ: return ds.field(col_name) >= pred.lit.value elif pred.op == Operation.EQ: return ds.field(col_name) == pred.lit.value elif pred.op == Operation.NOT_EQ: return ds.field(col_name) != pred.lit.value elif pred.op == Operation.IN: return ds.field(col_name).isin(pred.lit.value) elif pred.op == Operation.NOT_IN: return ds.field(col_name).isin(pred.lit.value)
[ "def", "predicate", "(", "pred", ":", "Predicate", ",", "field_map", ":", "dict", ")", "->", "ds", ".", "Expression", ":", "# noqa: ignore=C901", "# get column name in the file schema so we can apply the predicate", "col_name", "=", "field_map", ".", "get", "(", "pred", ".", "ref", ".", "name", ")", "if", "col_name", "is", "None", ":", "if", "pred", ".", "op", "==", "Operation", ".", "IS_NULL", ":", "return", "ds", ".", "scalar", "(", "True", ")", "==", "ds", ".", "scalar", "(", "True", ")", "return", "ds", ".", "scalar", "(", "True", ")", "==", "ds", ".", "scalar", "(", "False", ")", "if", "pred", ".", "op", "==", "Operation", ".", "IS_NULL", ":", "return", "~", "ds", ".", "field", "(", "col_name", ")", ".", "is_valid", "(", ")", "elif", "pred", ".", "op", "==", "Operation", ".", "NOT_NULL", ":", "return", "ds", ".", "field", "(", "col_name", ")", ".", "is_valid", "(", ")", "elif", "pred", ".", "op", "==", "Operation", ".", "LT", ":", "return", "ds", ".", "field", "(", "col_name", ")", "<", "pred", ".", "lit", ".", "value", "elif", "pred", ".", "op", "==", "Operation", ".", "LT_EQ", ":", "return", "ds", ".", "field", "(", "col_name", ")", "<=", "pred", ".", "lit", ".", "value", "elif", "pred", ".", "op", "==", "Operation", ".", "GT", ":", "return", "ds", ".", "field", "(", "col_name", ")", ">", "pred", ".", "lit", ".", "value", "elif", "pred", ".", "op", "==", "Operation", ".", "GT_EQ", ":", "return", "ds", ".", "field", "(", "col_name", ")", ">=", "pred", ".", "lit", ".", "value", "elif", "pred", ".", "op", "==", "Operation", ".", "EQ", ":", "return", "ds", ".", "field", "(", "col_name", ")", "==", "pred", ".", "lit", ".", "value", "elif", "pred", ".", "op", "==", "Operation", ".", "NOT_EQ", ":", "return", "ds", ".", "field", "(", "col_name", ")", "!=", "pred", ".", "lit", ".", "value", "elif", "pred", ".", "op", "==", "Operation", ".", "IN", ":", "return", "ds", ".", "field", "(", "col_name", ")", ".", "isin", "(", "pred", ".", "lit", ".", "value", ")", "elif", "pred", ".", "op", "==", "Operation", ".", "NOT_IN", ":", "return", "ds", ".", "field", "(", "col_name", ")", ".", "isin", "(", "pred", ".", "lit", ".", "value", ")" ]
[ 61, 0 ]
[ 105, 54 ]
python
en
['en', 'error', 'th']
False
and_
(left: ds.Expression, right: ds.Expression)
Given a left and right expression combined them using the `AND` logical operator Parameters ---------- left : pyarrow._dataset.Expression A Dataset `Expression` to logically `AND` right : pyarrow._dataset.Expression A Dataset `Expression` to logically `AND` Returns ------- pyarrow._dataset.Expression The left and right `Expression` combined with `AND`
Given a left and right expression combined them using the `AND` logical operator
def and_(left: ds.Expression, right: ds.Expression) -> ds.Expression: """ Given a left and right expression combined them using the `AND` logical operator Parameters ---------- left : pyarrow._dataset.Expression A Dataset `Expression` to logically `AND` right : pyarrow._dataset.Expression A Dataset `Expression` to logically `AND` Returns ------- pyarrow._dataset.Expression The left and right `Expression` combined with `AND` """ return left & right
[ "def", "and_", "(", "left", ":", "ds", ".", "Expression", ",", "right", ":", "ds", ".", "Expression", ")", "->", "ds", ".", "Expression", ":", "return", "left", "&", "right" ]
[ 108, 0 ]
[ 123, 23 ]
python
en
['en', 'error', 'th']
False
or_
(left: ds.Expression, right: ds.Expression)
Given a left and right expression combined them using the `OR` logical operator Parameters ---------- left : pyarrow._dataset.Expression A Dataset `Expression` to logically `OR` right : pyarrow._dataset.Expression A Dataset `Expression` to logically `OR` Returns ------- pyarrow._dataset.Expression The left and right `Expression` combined with `OR`
Given a left and right expression combined them using the `OR` logical operator
def or_(left: ds.Expression, right: ds.Expression) -> ds.Expression: """ Given a left and right expression combined them using the `OR` logical operator Parameters ---------- left : pyarrow._dataset.Expression A Dataset `Expression` to logically `OR` right : pyarrow._dataset.Expression A Dataset `Expression` to logically `OR` Returns ------- pyarrow._dataset.Expression The left and right `Expression` combined with `OR` """ return left | right
[ "def", "or_", "(", "left", ":", "ds", ".", "Expression", ",", "right", ":", "ds", ".", "Expression", ")", "->", "ds", ".", "Expression", ":", "return", "left", "|", "right" ]
[ 126, 0 ]
[ 141, 23 ]
python
en
['en', 'error', 'th']
False
not_
(child: ds.Expression)
Given a child expression create the logical negation Parameters ---------- child : pyarrow._dataset.Expression A Dataset `Expression` to logically `OR` Returns ------- pyarrow._dataset.Expression The negation of the input `Expression`
Given a child expression create the logical negation
def not_(child: ds.Expression) -> ds.Expression: """ Given a child expression create the logical negation Parameters ---------- child : pyarrow._dataset.Expression A Dataset `Expression` to logically `OR` Returns ------- pyarrow._dataset.Expression The negation of the input `Expression` """ return ~child
[ "def", "not_", "(", "child", ":", "ds", ".", "Expression", ")", "->", "ds", ".", "Expression", ":", "return", "~", "child" ]
[ 144, 0 ]
[ 157, 17 ]
python
en
['en', 'error', 'th']
False
build_model
(output_dir, hub_handle)
Compiles keras model for image classification.
Compiles keras model for image classification.
def build_model(output_dir, hub_handle): """Compiles keras model for image classification.""" model = models.Sequential([ hub.KerasLayer(hub_handle, trainable=False), layers.Dropout(rate=DROPOUT), layers.Dense( NCLASSES, activation='softmax', kernel_regularizer=tf.keras.regularizers.l2(LEARNING_RATE)) ]) model.build((None,)+(util.IMG_HEIGHT, util.IMG_WIDTH, util.IMG_CHANNELS)) model.compile( optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy']) return model
[ "def", "build_model", "(", "output_dir", ",", "hub_handle", ")", ":", "model", "=", "models", ".", "Sequential", "(", "[", "hub", ".", "KerasLayer", "(", "hub_handle", ",", "trainable", "=", "False", ")", ",", "layers", ".", "Dropout", "(", "rate", "=", "DROPOUT", ")", ",", "layers", ".", "Dense", "(", "NCLASSES", ",", "activation", "=", "'softmax'", ",", "kernel_regularizer", "=", "tf", ".", "keras", ".", "regularizers", ".", "l2", "(", "LEARNING_RATE", ")", ")", "]", ")", "model", ".", "build", "(", "(", "None", ",", ")", "+", "(", "util", ".", "IMG_HEIGHT", ",", "util", ".", "IMG_WIDTH", ",", "util", ".", "IMG_CHANNELS", ")", ")", "model", ".", "compile", "(", "optimizer", "=", "'adam'", ",", "loss", "=", "'categorical_crossentropy'", ",", "metrics", "=", "[", "'accuracy'", "]", ")", "return", "model" ]
[ 17, 0 ]
[ 32, 16 ]
python
en
['es', 'en', 'en']
True
train_and_evaluate
( model, num_epochs, steps_per_epoch, train_data, eval_data, output_dir)
Compiles keras model and loads data into it for training.
Compiles keras model and loads data into it for training.
def train_and_evaluate( model, num_epochs, steps_per_epoch, train_data, eval_data, output_dir): """Compiles keras model and loads data into it for training.""" model_callbacks = [] if output_dir: tensorboard_callback = callbacks.TensorBoard(log_dir=output_dir) model_callbacks = [tensorboard_callback] history = model.fit( train_data, validation_data=eval_data, validation_steps=util.VALIDATION_STEPS, epochs=num_epochs, steps_per_epoch=steps_per_epoch, callbacks=model_callbacks) if output_dir: export_path = os.path.join(output_dir, 'keras_export') model.save(export_path, save_format='tf') return history
[ "def", "train_and_evaluate", "(", "model", ",", "num_epochs", ",", "steps_per_epoch", ",", "train_data", ",", "eval_data", ",", "output_dir", ")", ":", "model_callbacks", "=", "[", "]", "if", "output_dir", ":", "tensorboard_callback", "=", "callbacks", ".", "TensorBoard", "(", "log_dir", "=", "output_dir", ")", "model_callbacks", "=", "[", "tensorboard_callback", "]", "history", "=", "model", ".", "fit", "(", "train_data", ",", "validation_data", "=", "eval_data", ",", "validation_steps", "=", "util", ".", "VALIDATION_STEPS", ",", "epochs", "=", "num_epochs", ",", "steps_per_epoch", "=", "steps_per_epoch", ",", "callbacks", "=", "model_callbacks", ")", "if", "output_dir", ":", "export_path", "=", "os", ".", "path", ".", "join", "(", "output_dir", ",", "'keras_export'", ")", "model", ".", "save", "(", "export_path", ",", "save_format", "=", "'tf'", ")", "return", "history" ]
[ 35, 0 ]
[ 55, 18 ]
python
en
['en', 'en', 'en']
True
get_keyring_auth
(url: Optional[str], username: Optional[str])
Return the tuple auth for a given url from keyring.
Return the tuple auth for a given url from keyring.
def get_keyring_auth(url: Optional[str], username: Optional[str]) -> Optional[AuthInfo]: """Return the tuple auth for a given url from keyring.""" global keyring if not url or not keyring: return None try: try: get_credential = keyring.get_credential except AttributeError: pass else: logger.debug("Getting credentials from keyring for %s", url) cred = get_credential(url, username) if cred is not None: return cred.username, cred.password return None if username: logger.debug("Getting password from keyring for %s", url) password = keyring.get_password(url, username) if password: return username, password except Exception as exc: logger.warning( "Keyring is skipped due to an exception: %s", str(exc), ) keyring = None return None
[ "def", "get_keyring_auth", "(", "url", ":", "Optional", "[", "str", "]", ",", "username", ":", "Optional", "[", "str", "]", ")", "->", "Optional", "[", "AuthInfo", "]", ":", "global", "keyring", "if", "not", "url", "or", "not", "keyring", ":", "return", "None", "try", ":", "try", ":", "get_credential", "=", "keyring", ".", "get_credential", "except", "AttributeError", ":", "pass", "else", ":", "logger", ".", "debug", "(", "\"Getting credentials from keyring for %s\"", ",", "url", ")", "cred", "=", "get_credential", "(", "url", ",", "username", ")", "if", "cred", "is", "not", "None", ":", "return", "cred", ".", "username", ",", "cred", ".", "password", "return", "None", "if", "username", ":", "logger", ".", "debug", "(", "\"Getting password from keyring for %s\"", ",", "url", ")", "password", "=", "keyring", ".", "get_password", "(", "url", ",", "username", ")", "if", "password", ":", "return", "username", ",", "password", "except", "Exception", "as", "exc", ":", "logger", ".", "warning", "(", "\"Keyring is skipped due to an exception: %s\"", ",", "str", "(", "exc", ")", ",", ")", "keyring", "=", "None", "return", "None" ]
[ 39, 0 ]
[ 69, 15 ]
python
en
['en', 'en', 'en']
True
MultiDomainBasicAuth._get_index_url
(self, url: str)
Return the original index URL matching the requested URL. Cached or dynamically generated credentials may work against the original index URL rather than just the netloc. The provided url should have had its username and password removed already. If the original index url had credentials then they will be included in the return value. Returns None if no matching index was found, or if --no-index was specified by the user.
Return the original index URL matching the requested URL.
def _get_index_url(self, url: str) -> Optional[str]: """Return the original index URL matching the requested URL. Cached or dynamically generated credentials may work against the original index URL rather than just the netloc. The provided url should have had its username and password removed already. If the original index url had credentials then they will be included in the return value. Returns None if no matching index was found, or if --no-index was specified by the user. """ if not url or not self.index_urls: return None for u in self.index_urls: prefix = remove_auth_from_url(u).rstrip("/") + "/" if url.startswith(prefix): return u return None
[ "def", "_get_index_url", "(", "self", ",", "url", ":", "str", ")", "->", "Optional", "[", "str", "]", ":", "if", "not", "url", "or", "not", "self", ".", "index_urls", ":", "return", "None", "for", "u", "in", "self", ".", "index_urls", ":", "prefix", "=", "remove_auth_from_url", "(", "u", ")", ".", "rstrip", "(", "\"/\"", ")", "+", "\"/\"", "if", "url", ".", "startswith", "(", "prefix", ")", ":", "return", "u", "return", "None" ]
[ 86, 4 ]
[ 106, 19 ]
python
en
['en', 'en', 'en']
True
MultiDomainBasicAuth._get_new_credentials
( self, original_url: str, allow_netrc: bool = True, allow_keyring: bool = False, )
Find and return credentials for the specified URL.
Find and return credentials for the specified URL.
def _get_new_credentials( self, original_url: str, allow_netrc: bool = True, allow_keyring: bool = False, ) -> AuthInfo: """Find and return credentials for the specified URL.""" # Split the credentials and netloc from the url. url, netloc, url_user_password = split_auth_netloc_from_url( original_url, ) # Start with the credentials embedded in the url username, password = url_user_password if username is not None and password is not None: logger.debug("Found credentials in url for %s", netloc) return url_user_password # Find a matching index url for this request index_url = self._get_index_url(url) if index_url: # Split the credentials from the url. index_info = split_auth_netloc_from_url(index_url) if index_info: index_url, _, index_url_user_password = index_info logger.debug("Found index url %s", index_url) # If an index URL was found, try its embedded credentials if index_url and index_url_user_password[0] is not None: username, password = index_url_user_password if username is not None and password is not None: logger.debug("Found credentials in index url for %s", netloc) return index_url_user_password # Get creds from netrc if we still don't have them if allow_netrc: netrc_auth = get_netrc_auth(original_url) if netrc_auth: logger.debug("Found credentials in netrc for %s", netloc) return netrc_auth # If we don't have a password and keyring is available, use it. if allow_keyring: # The index url is more specific than the netloc, so try it first # fmt: off kr_auth = ( get_keyring_auth(index_url, username) or get_keyring_auth(netloc, username) ) # fmt: on if kr_auth: logger.debug("Found credentials in keyring for %s", netloc) return kr_auth return username, password
[ "def", "_get_new_credentials", "(", "self", ",", "original_url", ":", "str", ",", "allow_netrc", ":", "bool", "=", "True", ",", "allow_keyring", ":", "bool", "=", "False", ",", ")", "->", "AuthInfo", ":", "# Split the credentials and netloc from the url.", "url", ",", "netloc", ",", "url_user_password", "=", "split_auth_netloc_from_url", "(", "original_url", ",", ")", "# Start with the credentials embedded in the url", "username", ",", "password", "=", "url_user_password", "if", "username", "is", "not", "None", "and", "password", "is", "not", "None", ":", "logger", ".", "debug", "(", "\"Found credentials in url for %s\"", ",", "netloc", ")", "return", "url_user_password", "# Find a matching index url for this request", "index_url", "=", "self", ".", "_get_index_url", "(", "url", ")", "if", "index_url", ":", "# Split the credentials from the url.", "index_info", "=", "split_auth_netloc_from_url", "(", "index_url", ")", "if", "index_info", ":", "index_url", ",", "_", ",", "index_url_user_password", "=", "index_info", "logger", ".", "debug", "(", "\"Found index url %s\"", ",", "index_url", ")", "# If an index URL was found, try its embedded credentials", "if", "index_url", "and", "index_url_user_password", "[", "0", "]", "is", "not", "None", ":", "username", ",", "password", "=", "index_url_user_password", "if", "username", "is", "not", "None", "and", "password", "is", "not", "None", ":", "logger", ".", "debug", "(", "\"Found credentials in index url for %s\"", ",", "netloc", ")", "return", "index_url_user_password", "# Get creds from netrc if we still don't have them", "if", "allow_netrc", ":", "netrc_auth", "=", "get_netrc_auth", "(", "original_url", ")", "if", "netrc_auth", ":", "logger", ".", "debug", "(", "\"Found credentials in netrc for %s\"", ",", "netloc", ")", "return", "netrc_auth", "# If we don't have a password and keyring is available, use it.", "if", "allow_keyring", ":", "# The index url is more specific than the netloc, so try it first", "# fmt: off", "kr_auth", "=", "(", "get_keyring_auth", "(", "index_url", ",", "username", ")", "or", "get_keyring_auth", "(", "netloc", ",", "username", ")", ")", "# fmt: on", "if", "kr_auth", ":", "logger", ".", "debug", "(", "\"Found credentials in keyring for %s\"", ",", "netloc", ")", "return", "kr_auth", "return", "username", ",", "password" ]
[ 108, 4 ]
[ 162, 33 ]
python
en
['en', 'en', 'en']
True
MultiDomainBasicAuth._get_url_and_credentials
( self, original_url: str )
Return the credentials to use for the provided URL. If allowed, netrc and keyring may be used to obtain the correct credentials. Returns (url_without_credentials, username, password). Note that even if the original URL contains credentials, this function may return a different username and password.
Return the credentials to use for the provided URL.
def _get_url_and_credentials( self, original_url: str ) -> Tuple[str, Optional[str], Optional[str]]: """Return the credentials to use for the provided URL. If allowed, netrc and keyring may be used to obtain the correct credentials. Returns (url_without_credentials, username, password). Note that even if the original URL contains credentials, this function may return a different username and password. """ url, netloc, _ = split_auth_netloc_from_url(original_url) # Try to get credentials from original url username, password = self._get_new_credentials(original_url) # If credentials not found, use any stored credentials for this netloc if username is None and password is None: username, password = self.passwords.get(netloc, (None, None)) if username is not None or password is not None: # Convert the username and password if they're None, so that # this netloc will show up as "cached" in the conditional above. # Further, HTTPBasicAuth doesn't accept None, so it makes sense to # cache the value that is going to be used. username = username or "" password = password or "" # Store any acquired credentials. self.passwords[netloc] = (username, password) assert ( # Credentials were found (username is not None and password is not None) # Credentials were not found or (username is None and password is None) ), f"Could not load credentials from url: {original_url}" return url, username, password
[ "def", "_get_url_and_credentials", "(", "self", ",", "original_url", ":", "str", ")", "->", "Tuple", "[", "str", ",", "Optional", "[", "str", "]", ",", "Optional", "[", "str", "]", "]", ":", "url", ",", "netloc", ",", "_", "=", "split_auth_netloc_from_url", "(", "original_url", ")", "# Try to get credentials from original url", "username", ",", "password", "=", "self", ".", "_get_new_credentials", "(", "original_url", ")", "# If credentials not found, use any stored credentials for this netloc", "if", "username", "is", "None", "and", "password", "is", "None", ":", "username", ",", "password", "=", "self", ".", "passwords", ".", "get", "(", "netloc", ",", "(", "None", ",", "None", ")", ")", "if", "username", "is", "not", "None", "or", "password", "is", "not", "None", ":", "# Convert the username and password if they're None, so that", "# this netloc will show up as \"cached\" in the conditional above.", "# Further, HTTPBasicAuth doesn't accept None, so it makes sense to", "# cache the value that is going to be used.", "username", "=", "username", "or", "\"\"", "password", "=", "password", "or", "\"\"", "# Store any acquired credentials.", "self", ".", "passwords", "[", "netloc", "]", "=", "(", "username", ",", "password", ")", "assert", "(", "# Credentials were found", "(", "username", "is", "not", "None", "and", "password", "is", "not", "None", ")", "# Credentials were not found", "or", "(", "username", "is", "None", "and", "password", "is", "None", ")", ")", ",", "f\"Could not load credentials from url: {original_url}\"", "return", "url", ",", "username", ",", "password" ]
[ 164, 4 ]
[ 203, 38 ]
python
en
['en', 'en', 'en']
True
MultiDomainBasicAuth.warn_on_401
(self, resp: Response, **kwargs: Any)
Response callback to warn about incorrect credentials.
Response callback to warn about incorrect credentials.
def warn_on_401(self, resp: Response, **kwargs: Any) -> None: """Response callback to warn about incorrect credentials.""" if resp.status_code == 401: logger.warning( "401 Error, Credentials not correct for %s", resp.request.url, )
[ "def", "warn_on_401", "(", "self", ",", "resp", ":", "Response", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "if", "resp", ".", "status_code", "==", "401", ":", "logger", ".", "warning", "(", "\"401 Error, Credentials not correct for %s\"", ",", "resp", ".", "request", ".", "url", ",", ")" ]
[ 294, 4 ]
[ 300, 13 ]
python
en
['en', 'en', 'en']
True
MultiDomainBasicAuth.save_credentials
(self, resp: Response, **kwargs: Any)
Response callback to save credentials on success.
Response callback to save credentials on success.
def save_credentials(self, resp: Response, **kwargs: Any) -> None: """Response callback to save credentials on success.""" assert keyring is not None, "should never reach here without keyring" if not keyring: return creds = self._credentials_to_save self._credentials_to_save = None if creds and resp.status_code < 400: try: logger.info("Saving credentials to keyring") keyring.set_password(*creds) except Exception: logger.exception("Failed to save credentials")
[ "def", "save_credentials", "(", "self", ",", "resp", ":", "Response", ",", "*", "*", "kwargs", ":", "Any", ")", "->", "None", ":", "assert", "keyring", "is", "not", "None", ",", "\"should never reach here without keyring\"", "if", "not", "keyring", ":", "return", "creds", "=", "self", ".", "_credentials_to_save", "self", ".", "_credentials_to_save", "=", "None", "if", "creds", "and", "resp", ".", "status_code", "<", "400", ":", "try", ":", "logger", ".", "info", "(", "\"Saving credentials to keyring\"", ")", "keyring", ".", "set_password", "(", "*", "creds", ")", "except", "Exception", ":", "logger", ".", "exception", "(", "\"Failed to save credentials\"", ")" ]
[ 302, 4 ]
[ 315, 62 ]
python
en
['en', 'en', 'en']
True
TimeMixIn.asDateTime
(self)
Create :py:class:`datetime.datetime` object from a |ASN.1| object. Returns ------- : new instance of :py:class:`datetime.datetime` object
Create :py:class:`datetime.datetime` object from a |ASN.1| object.
def asDateTime(self): """Create :py:class:`datetime.datetime` object from a |ASN.1| object. Returns ------- : new instance of :py:class:`datetime.datetime` object """ text = str(self) if text.endswith('Z'): tzinfo = TimeMixIn.UTC text = text[:-1] elif '-' in text or '+' in text: if '+' in text: text, plusminus, tz = string.partition(text, '+') else: text, plusminus, tz = string.partition(text, '-') if self._shortTZ and len(tz) == 2: tz += '00' if len(tz) != 4: raise error.PyAsn1Error('malformed time zone offset %s' % tz) try: minutes = int(tz[:2]) * 60 + int(tz[2:]) if plusminus == '-': minutes *= -1 except ValueError: raise error.PyAsn1Error('unknown time specification %s' % self) tzinfo = TimeMixIn.FixedOffset(minutes, '?') else: tzinfo = None if '.' in text or ',' in text: if '.' in text: text, _, ms = string.partition(text, '.') else: text, _, ms = string.partition(text, ',') try: ms = int(ms) * 1000 except ValueError: raise error.PyAsn1Error('bad sub-second time specification %s' % self) else: ms = 0 if self._optionalMinutes and len(text) - self._yearsDigits == 6: text += '0000' elif len(text) - self._yearsDigits == 8: text += '00' try: dt = dateandtime.strptime(text, self._yearsDigits == 4 and '%Y%m%d%H%M%S' or '%y%m%d%H%M%S') except ValueError: raise error.PyAsn1Error('malformed datetime format %s' % self) return dt.replace(microsecond=ms, tzinfo=tzinfo)
[ "def", "asDateTime", "(", "self", ")", ":", "text", "=", "str", "(", "self", ")", "if", "text", ".", "endswith", "(", "'Z'", ")", ":", "tzinfo", "=", "TimeMixIn", ".", "UTC", "text", "=", "text", "[", ":", "-", "1", "]", "elif", "'-'", "in", "text", "or", "'+'", "in", "text", ":", "if", "'+'", "in", "text", ":", "text", ",", "plusminus", ",", "tz", "=", "string", ".", "partition", "(", "text", ",", "'+'", ")", "else", ":", "text", ",", "plusminus", ",", "tz", "=", "string", ".", "partition", "(", "text", ",", "'-'", ")", "if", "self", ".", "_shortTZ", "and", "len", "(", "tz", ")", "==", "2", ":", "tz", "+=", "'00'", "if", "len", "(", "tz", ")", "!=", "4", ":", "raise", "error", ".", "PyAsn1Error", "(", "'malformed time zone offset %s'", "%", "tz", ")", "try", ":", "minutes", "=", "int", "(", "tz", "[", ":", "2", "]", ")", "*", "60", "+", "int", "(", "tz", "[", "2", ":", "]", ")", "if", "plusminus", "==", "'-'", ":", "minutes", "*=", "-", "1", "except", "ValueError", ":", "raise", "error", ".", "PyAsn1Error", "(", "'unknown time specification %s'", "%", "self", ")", "tzinfo", "=", "TimeMixIn", ".", "FixedOffset", "(", "minutes", ",", "'?'", ")", "else", ":", "tzinfo", "=", "None", "if", "'.'", "in", "text", "or", "','", "in", "text", ":", "if", "'.'", "in", "text", ":", "text", ",", "_", ",", "ms", "=", "string", ".", "partition", "(", "text", ",", "'.'", ")", "else", ":", "text", ",", "_", ",", "ms", "=", "string", ".", "partition", "(", "text", ",", "','", ")", "try", ":", "ms", "=", "int", "(", "ms", ")", "*", "1000", "except", "ValueError", ":", "raise", "error", ".", "PyAsn1Error", "(", "'bad sub-second time specification %s'", "%", "self", ")", "else", ":", "ms", "=", "0", "if", "self", ".", "_optionalMinutes", "and", "len", "(", "text", ")", "-", "self", ".", "_yearsDigits", "==", "6", ":", "text", "+=", "'0000'", "elif", "len", "(", "text", ")", "-", "self", ".", "_yearsDigits", "==", "8", ":", "text", "+=", "'00'", "try", ":", "dt", "=", "dateandtime", ".", "strptime", "(", "text", ",", "self", ".", "_yearsDigits", "==", "4", "and", "'%Y%m%d%H%M%S'", "or", "'%y%m%d%H%M%S'", ")", "except", "ValueError", ":", "raise", "error", ".", "PyAsn1Error", "(", "'malformed datetime format %s'", "%", "self", ")", "return", "dt", ".", "replace", "(", "microsecond", "=", "ms", ",", "tzinfo", "=", "tzinfo", ")" ]
[ 61, 4 ]
[ 125, 56 ]
python
en
['en', 'en', 'en']
True
TimeMixIn.fromDateTime
(cls, dt)
Create |ASN.1| object from a :py:class:`datetime.datetime` object. Parameters ---------- dt: :py:class:`datetime.datetime` object The `datetime.datetime` object to initialize the |ASN.1| object from Returns ------- : new instance of |ASN.1| value
Create |ASN.1| object from a :py:class:`datetime.datetime` object.
def fromDateTime(cls, dt): """Create |ASN.1| object from a :py:class:`datetime.datetime` object. Parameters ---------- dt: :py:class:`datetime.datetime` object The `datetime.datetime` object to initialize the |ASN.1| object from Returns ------- : new instance of |ASN.1| value """ text = dt.strftime(cls._yearsDigits == 4 and '%Y%m%d%H%M%S' or '%y%m%d%H%M%S') if cls._hasSubsecond: text += '.%d' % (dt.microsecond // 1000) if dt.utcoffset(): seconds = dt.utcoffset().seconds if seconds < 0: text += '-' else: text += '+' text += '%.2d%.2d' % (seconds // 3600, seconds % 3600) else: text += 'Z' return cls(text)
[ "def", "fromDateTime", "(", "cls", ",", "dt", ")", ":", "text", "=", "dt", ".", "strftime", "(", "cls", ".", "_yearsDigits", "==", "4", "and", "'%Y%m%d%H%M%S'", "or", "'%y%m%d%H%M%S'", ")", "if", "cls", ".", "_hasSubsecond", ":", "text", "+=", "'.%d'", "%", "(", "dt", ".", "microsecond", "//", "1000", ")", "if", "dt", ".", "utcoffset", "(", ")", ":", "seconds", "=", "dt", ".", "utcoffset", "(", ")", ".", "seconds", "if", "seconds", "<", "0", ":", "text", "+=", "'-'", "else", ":", "text", "+=", "'+'", "text", "+=", "'%.2d%.2d'", "%", "(", "seconds", "//", "3600", ",", "seconds", "%", "3600", ")", "else", ":", "text", "+=", "'Z'", "return", "cls", "(", "text", ")" ]
[ 128, 4 ]
[ 156, 24 ]
python
en
['en', 'en', 'en']
True
ordinal
(value)
Convert an integer to its ordinal as a string. 1 is '1st', 2 is '2nd', 3 is '3rd', etc. Works for any integer.
Convert an integer to its ordinal as a string. 1 is '1st', 2 is '2nd', 3 is '3rd', etc. Works for any integer.
def ordinal(value): """ Convert an integer to its ordinal as a string. 1 is '1st', 2 is '2nd', 3 is '3rd', etc. Works for any integer. """ try: value = int(value) except (TypeError, ValueError): return value if value % 100 in (11, 12, 13): # Translators: Ordinal format for 11 (11th), 12 (12th), and 13 (13th). value = pgettext('ordinal 11, 12, 13', '{}th').format(value) else: templates = ( # Translators: Ordinal format when value ends with 0, e.g. 80th. pgettext('ordinal 0', '{}th'), # Translators: Ordinal format when value ends with 1, e.g. 81st, except 11. pgettext('ordinal 1', '{}st'), # Translators: Ordinal format when value ends with 2, e.g. 82nd, except 12. pgettext('ordinal 2', '{}nd'), # Translators: Ordinal format when value ends with 3, e.g. 83th, except 13. pgettext('ordinal 3', '{}rd'), # Translators: Ordinal format when value ends with 4, e.g. 84th. pgettext('ordinal 4', '{}th'), # Translators: Ordinal format when value ends with 5, e.g. 85th. pgettext('ordinal 5', '{}th'), # Translators: Ordinal format when value ends with 6, e.g. 86th. pgettext('ordinal 6', '{}th'), # Translators: Ordinal format when value ends with 7, e.g. 87th. pgettext('ordinal 7', '{}th'), # Translators: Ordinal format when value ends with 8, e.g. 88th. pgettext('ordinal 8', '{}th'), # Translators: Ordinal format when value ends with 9, e.g. 89th. pgettext('ordinal 9', '{}th'), ) value = templates[value % 10].format(value) # Mark value safe so i18n does not break with <sup> or <sub> see #19988 return mark_safe(value)
[ "def", "ordinal", "(", "value", ")", ":", "try", ":", "value", "=", "int", "(", "value", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "return", "value", "if", "value", "%", "100", "in", "(", "11", ",", "12", ",", "13", ")", ":", "# Translators: Ordinal format for 11 (11th), 12 (12th), and 13 (13th).", "value", "=", "pgettext", "(", "'ordinal 11, 12, 13'", ",", "'{}th'", ")", ".", "format", "(", "value", ")", "else", ":", "templates", "=", "(", "# Translators: Ordinal format when value ends with 0, e.g. 80th.", "pgettext", "(", "'ordinal 0'", ",", "'{}th'", ")", ",", "# Translators: Ordinal format when value ends with 1, e.g. 81st, except 11.", "pgettext", "(", "'ordinal 1'", ",", "'{}st'", ")", ",", "# Translators: Ordinal format when value ends with 2, e.g. 82nd, except 12.", "pgettext", "(", "'ordinal 2'", ",", "'{}nd'", ")", ",", "# Translators: Ordinal format when value ends with 3, e.g. 83th, except 13.", "pgettext", "(", "'ordinal 3'", ",", "'{}rd'", ")", ",", "# Translators: Ordinal format when value ends with 4, e.g. 84th.", "pgettext", "(", "'ordinal 4'", ",", "'{}th'", ")", ",", "# Translators: Ordinal format when value ends with 5, e.g. 85th.", "pgettext", "(", "'ordinal 5'", ",", "'{}th'", ")", ",", "# Translators: Ordinal format when value ends with 6, e.g. 86th.", "pgettext", "(", "'ordinal 6'", ",", "'{}th'", ")", ",", "# Translators: Ordinal format when value ends with 7, e.g. 87th.", "pgettext", "(", "'ordinal 7'", ",", "'{}th'", ")", ",", "# Translators: Ordinal format when value ends with 8, e.g. 88th.", "pgettext", "(", "'ordinal 8'", ",", "'{}th'", ")", ",", "# Translators: Ordinal format when value ends with 9, e.g. 89th.", "pgettext", "(", "'ordinal 9'", ",", "'{}th'", ")", ",", ")", "value", "=", "templates", "[", "value", "%", "10", "]", ".", "format", "(", "value", ")", "# Mark value safe so i18n does not break with <sup> or <sub> see #19988", "return", "mark_safe", "(", "value", ")" ]
[ 18, 0 ]
[ 55, 27 ]
python
en
['en', 'error', 'th']
False
intcomma
(value, use_l10n=True)
Convert an integer to a string containing commas every three digits. For example, 3000 becomes '3,000' and 45000 becomes '45,000'.
Convert an integer to a string containing commas every three digits. For example, 3000 becomes '3,000' and 45000 becomes '45,000'.
def intcomma(value, use_l10n=True): """ Convert an integer to a string containing commas every three digits. For example, 3000 becomes '3,000' and 45000 becomes '45,000'. """ if use_l10n: try: if not isinstance(value, (float, Decimal)): value = int(value) except (TypeError, ValueError): return intcomma(value, False) else: return number_format(value, use_l10n=True, force_grouping=True) orig = str(value) new = re.sub(r"^(-?\d+)(\d{3})", r'\g<1>,\g<2>', orig) if orig == new: return new else: return intcomma(new, use_l10n)
[ "def", "intcomma", "(", "value", ",", "use_l10n", "=", "True", ")", ":", "if", "use_l10n", ":", "try", ":", "if", "not", "isinstance", "(", "value", ",", "(", "float", ",", "Decimal", ")", ")", ":", "value", "=", "int", "(", "value", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "return", "intcomma", "(", "value", ",", "False", ")", "else", ":", "return", "number_format", "(", "value", ",", "use_l10n", "=", "True", ",", "force_grouping", "=", "True", ")", "orig", "=", "str", "(", "value", ")", "new", "=", "re", ".", "sub", "(", "r\"^(-?\\d+)(\\d{3})\"", ",", "r'\\g<1>,\\g<2>'", ",", "orig", ")", "if", "orig", "==", "new", ":", "return", "new", "else", ":", "return", "intcomma", "(", "new", ",", "use_l10n", ")" ]
[ 59, 0 ]
[ 77, 38 ]
python
en
['en', 'error', 'th']
False
intword
(value)
Convert a large integer to a friendly text representation. Works best for numbers over 1 million. For example, 1000000 becomes '1.0 million', 1200000 becomes '1.2 million' and '1200000000' becomes '1.2 billion'.
Convert a large integer to a friendly text representation. Works best for numbers over 1 million. For example, 1000000 becomes '1.0 million', 1200000 becomes '1.2 million' and '1200000000' becomes '1.2 billion'.
def intword(value): """ Convert a large integer to a friendly text representation. Works best for numbers over 1 million. For example, 1000000 becomes '1.0 million', 1200000 becomes '1.2 million' and '1200000000' becomes '1.2 billion'. """ try: value = int(value) except (TypeError, ValueError): return value abs_value = abs(value) if abs_value < 1000000: return value for exponent, converter in intword_converters: large_number = 10 ** exponent if abs_value < large_number * 1000: new_value = value / large_number rounded_value = round_away_from_one(new_value) return converter(abs(rounded_value)) % { 'value': defaultfilters.floatformat(new_value, 1), } return value
[ "def", "intword", "(", "value", ")", ":", "try", ":", "value", "=", "int", "(", "value", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "return", "value", "abs_value", "=", "abs", "(", "value", ")", "if", "abs_value", "<", "1000000", ":", "return", "value", "for", "exponent", ",", "converter", "in", "intword_converters", ":", "large_number", "=", "10", "**", "exponent", "if", "abs_value", "<", "large_number", "*", "1000", ":", "new_value", "=", "value", "/", "large_number", "rounded_value", "=", "round_away_from_one", "(", "new_value", ")", "return", "converter", "(", "abs", "(", "rounded_value", ")", ")", "%", "{", "'value'", ":", "defaultfilters", ".", "floatformat", "(", "new_value", ",", "1", ")", ",", "}", "return", "value" ]
[ 97, 0 ]
[ 120, 16 ]
python
en
['en', 'error', 'th']
False
apnumber
(value)
For numbers 1-9, return the number spelled out. Otherwise, return the number. This follows Associated Press style.
For numbers 1-9, return the number spelled out. Otherwise, return the number. This follows Associated Press style.
def apnumber(value): """ For numbers 1-9, return the number spelled out. Otherwise, return the number. This follows Associated Press style. """ try: value = int(value) except (TypeError, ValueError): return value if not 0 < value < 10: return value return (_('one'), _('two'), _('three'), _('four'), _('five'), _('six'), _('seven'), _('eight'), _('nine'))[value - 1]
[ "def", "apnumber", "(", "value", ")", ":", "try", ":", "value", "=", "int", "(", "value", ")", "except", "(", "TypeError", ",", "ValueError", ")", ":", "return", "value", "if", "not", "0", "<", "value", "<", "10", ":", "return", "value", "return", "(", "_", "(", "'one'", ")", ",", "_", "(", "'two'", ")", ",", "_", "(", "'three'", ")", ",", "_", "(", "'four'", ")", ",", "_", "(", "'five'", ")", ",", "_", "(", "'six'", ")", ",", "_", "(", "'seven'", ")", ",", "_", "(", "'eight'", ")", ",", "_", "(", "'nine'", ")", ")", "[", "value", "-", "1", "]" ]
[ 124, 0 ]
[ 136, 67 ]
python
en
['en', 'error', 'th']
False
naturalday
(value, arg=None)
For date values that are tomorrow, today or yesterday compared to present day return representing string. Otherwise, return a string formatted according to settings.DATE_FORMAT.
For date values that are tomorrow, today or yesterday compared to present day return representing string. Otherwise, return a string formatted according to settings.DATE_FORMAT.
def naturalday(value, arg=None): """ For date values that are tomorrow, today or yesterday compared to present day return representing string. Otherwise, return a string formatted according to settings.DATE_FORMAT. """ tzinfo = getattr(value, 'tzinfo', None) try: value = date(value.year, value.month, value.day) except AttributeError: # Passed value wasn't a date object return value today = datetime.now(tzinfo).date() delta = value - today if delta.days == 0: return _('today') elif delta.days == 1: return _('tomorrow') elif delta.days == -1: return _('yesterday') return defaultfilters.date(value, arg)
[ "def", "naturalday", "(", "value", ",", "arg", "=", "None", ")", ":", "tzinfo", "=", "getattr", "(", "value", ",", "'tzinfo'", ",", "None", ")", "try", ":", "value", "=", "date", "(", "value", ".", "year", ",", "value", ".", "month", ",", "value", ".", "day", ")", "except", "AttributeError", ":", "# Passed value wasn't a date object", "return", "value", "today", "=", "datetime", ".", "now", "(", "tzinfo", ")", ".", "date", "(", ")", "delta", "=", "value", "-", "today", "if", "delta", ".", "days", "==", "0", ":", "return", "_", "(", "'today'", ")", "elif", "delta", ".", "days", "==", "1", ":", "return", "_", "(", "'tomorrow'", ")", "elif", "delta", ".", "days", "==", "-", "1", ":", "return", "_", "(", "'yesterday'", ")", "return", "defaultfilters", ".", "date", "(", "value", ",", "arg", ")" ]
[ 142, 0 ]
[ 162, 42 ]
python
en
['en', 'error', 'th']
False
naturaltime
(value)
For date and time values show how many seconds, minutes, or hours ago compared to current timestamp return representing string.
For date and time values show how many seconds, minutes, or hours ago compared to current timestamp return representing string.
def naturaltime(value): """ For date and time values show how many seconds, minutes, or hours ago compared to current timestamp return representing string. """ return NaturalTimeFormatter.string_for(value)
[ "def", "naturaltime", "(", "value", ")", ":", "return", "NaturalTimeFormatter", ".", "string_for", "(", "value", ")" ]
[ 168, 0 ]
[ 173, 49 ]
python
en
['en', 'error', 'th']
False
gaussian_kernel
(d, scale=0.05, use_mp=False, return_mp=False)
Gaussian kernel with scale :param d: the input of the kernel, might be numpy array but not mp matrix :param scale: the scale :param use_mp: if True use mpmath :param return_mp: if True return mpf object :return: the kernel evaluated at d and scale
Gaussian kernel with scale :param d: the input of the kernel, might be numpy array but not mp matrix :param scale: the scale :param use_mp: if True use mpmath :param return_mp: if True return mpf object :return: the kernel evaluated at d and scale
def gaussian_kernel(d, scale=0.05, use_mp=False, return_mp=False): """ Gaussian kernel with scale :param d: the input of the kernel, might be numpy array but not mp matrix :param scale: the scale :param use_mp: if True use mpmath :param return_mp: if True return mpf object :return: the kernel evaluated at d and scale """ if not use_mp: if return_mp: print("To return an mpf object, set use_mp=True, returning numpy object...") norm = np.sqrt(2 * np.pi) * scale chi = -0.5 * (d / scale) ** 2 return np.maximum(np.exp(chi) / (norm), min_val) else: if isinstance(d, np.ndarray): is_array = True shape = d.shape d = d.ravel().astype(np.float64) else: is_array = False d = np.array([d]).astype(np.float64) res = [] for dd in d: norm = mp.sqrt(2 * mp.pi) * scale chi = -0.5 * (mp.mpf(dd) / scale) ** 2 res.append(mp.exp(chi) / norm) if is_array: if return_mp: return np.array(res).reshape(shape) else: return np.array(res, dtype=np.float64).reshape(shape) else: if return_mp: return res[0] else: return np.float64(res[0])
[ "def", "gaussian_kernel", "(", "d", ",", "scale", "=", "0.05", ",", "use_mp", "=", "False", ",", "return_mp", "=", "False", ")", ":", "if", "not", "use_mp", ":", "if", "return_mp", ":", "print", "(", "\"To return an mpf object, set use_mp=True, returning numpy object...\"", ")", "norm", "=", "np", ".", "sqrt", "(", "2", "*", "np", ".", "pi", ")", "*", "scale", "chi", "=", "-", "0.5", "*", "(", "d", "/", "scale", ")", "**", "2", "return", "np", ".", "maximum", "(", "np", ".", "exp", "(", "chi", ")", "/", "(", "norm", ")", ",", "min_val", ")", "else", ":", "if", "isinstance", "(", "d", ",", "np", ".", "ndarray", ")", ":", "is_array", "=", "True", "shape", "=", "d", ".", "shape", "d", "=", "d", ".", "ravel", "(", ")", ".", "astype", "(", "np", ".", "float64", ")", "else", ":", "is_array", "=", "False", "d", "=", "np", ".", "array", "(", "[", "d", "]", ")", ".", "astype", "(", "np", ".", "float64", ")", "res", "=", "[", "]", "for", "dd", "in", "d", ":", "norm", "=", "mp", ".", "sqrt", "(", "2", "*", "mp", ".", "pi", ")", "*", "scale", "chi", "=", "-", "0.5", "*", "(", "mp", ".", "mpf", "(", "dd", ")", "/", "scale", ")", "**", "2", "res", ".", "append", "(", "mp", ".", "exp", "(", "chi", ")", "/", "norm", ")", "if", "is_array", ":", "if", "return_mp", ":", "return", "np", ".", "array", "(", "res", ")", ".", "reshape", "(", "shape", ")", "else", ":", "return", "np", ".", "array", "(", "res", ",", "dtype", "=", "np", ".", "float64", ")", ".", "reshape", "(", "shape", ")", "else", ":", "if", "return_mp", ":", "return", "res", "[", "0", "]", "else", ":", "return", "np", ".", "float64", "(", "res", "[", "0", "]", ")" ]
[ 11, 0 ]
[ 51, 41 ]
python
en
['en', 'error', 'th']
False
logistic_kernel
(d, scale=0.05, use_mp=False, return_mp=False)
logistic kernel with scale :param d: the input of the kernel, might be numpy array but not mp matrix :param scale: the scale :param use_mp: if True use mpmath :param return_mp: if True return mpf object :return: the kernel evaluated at d and scale
logistic kernel with scale :param d: the input of the kernel, might be numpy array but not mp matrix :param scale: the scale :param use_mp: if True use mpmath :param return_mp: if True return mpf object :return: the kernel evaluated at d and scale
def logistic_kernel(d, scale=0.05, use_mp=False, return_mp=False): """ logistic kernel with scale :param d: the input of the kernel, might be numpy array but not mp matrix :param scale: the scale :param use_mp: if True use mpmath :param return_mp: if True return mpf object :return: the kernel evaluated at d and scale """ if not use_mp: if return_mp: print("To return an mpf object, set use_mp=True, returning numpy object...") pos_exp = np.exp(d / scale) neg_exp = np.exp(-d / scale) return np.maximum(1.0 / (scale * (pos_exp + neg_exp + 2)), min_val) else: if isinstance(d, np.ndarray): is_array = True shape = d.shape d = d.ravel().astype(np.float64) else: is_array = False d = np.array([d]).astype(np.float64) res = [] for dd in d: pos_exp = mp.exp(mp.mpf(dd) / scale) neg_exp = mp.exp(-mp.mpf(dd) / scale) res.append(1.0 / (scale * (pos_exp + neg_exp + 2.0))) if is_array: if return_mp: return np.array(res).reshape(shape) else: return np.array(res, dtype=np.float64).reshape(shape) else: if return_mp: return res[0] else: return np.float64(res[0])
[ "def", "logistic_kernel", "(", "d", ",", "scale", "=", "0.05", ",", "use_mp", "=", "False", ",", "return_mp", "=", "False", ")", ":", "if", "not", "use_mp", ":", "if", "return_mp", ":", "print", "(", "\"To return an mpf object, set use_mp=True, returning numpy object...\"", ")", "pos_exp", "=", "np", ".", "exp", "(", "d", "/", "scale", ")", "neg_exp", "=", "np", ".", "exp", "(", "-", "d", "/", "scale", ")", "return", "np", ".", "maximum", "(", "1.0", "/", "(", "scale", "*", "(", "pos_exp", "+", "neg_exp", "+", "2", ")", ")", ",", "min_val", ")", "else", ":", "if", "isinstance", "(", "d", ",", "np", ".", "ndarray", ")", ":", "is_array", "=", "True", "shape", "=", "d", ".", "shape", "d", "=", "d", ".", "ravel", "(", ")", ".", "astype", "(", "np", ".", "float64", ")", "else", ":", "is_array", "=", "False", "d", "=", "np", ".", "array", "(", "[", "d", "]", ")", ".", "astype", "(", "np", ".", "float64", ")", "res", "=", "[", "]", "for", "dd", "in", "d", ":", "pos_exp", "=", "mp", ".", "exp", "(", "mp", ".", "mpf", "(", "dd", ")", "/", "scale", ")", "neg_exp", "=", "mp", ".", "exp", "(", "-", "mp", ".", "mpf", "(", "dd", ")", "/", "scale", ")", "res", ".", "append", "(", "1.0", "/", "(", "scale", "*", "(", "pos_exp", "+", "neg_exp", "+", "2.0", ")", ")", ")", "if", "is_array", ":", "if", "return_mp", ":", "return", "np", ".", "array", "(", "res", ")", ".", "reshape", "(", "shape", ")", "else", ":", "return", "np", ".", "array", "(", "res", ",", "dtype", "=", "np", ".", "float64", ")", ".", "reshape", "(", "shape", ")", "else", ":", "if", "return_mp", ":", "return", "res", "[", "0", "]", "else", ":", "return", "np", ".", "float64", "(", "res", "[", "0", "]", ")" ]
[ 57, 0 ]
[ 97, 41 ]
python
en
['en', 'error', 'th']
False
sigmoid_kernel
(d, scale=0.05, use_mp=False, return_mp=False)
Sigmoid kernel with scale :param d: the input of the kernel, might be numpy array but not mp matrix :param scale: the scale :param use_mp: if True use mpmath :param return_mp: if True return mpf object :return: the kernel evaluated at d and scale
Sigmoid kernel with scale :param d: the input of the kernel, might be numpy array but not mp matrix :param scale: the scale :param use_mp: if True use mpmath :param return_mp: if True return mpf object :return: the kernel evaluated at d and scale
def sigmoid_kernel(d, scale=0.05, use_mp=False, return_mp=False): """ Sigmoid kernel with scale :param d: the input of the kernel, might be numpy array but not mp matrix :param scale: the scale :param use_mp: if True use mpmath :param return_mp: if True return mpf object :return: the kernel evaluated at d and scale """ if not use_mp: if return_mp: print("To return an mpf object, set use_mp=True, returning numpy object...") pos_exp = np.exp(d/scale) neg_exp = np.exp(-d/scale) return np.maximum(2.0/(np.pi*scale*(pos_exp + neg_exp)), min_val) else: if isinstance(d, np.ndarray): is_array = True shape = d.shape d = d.ravel().astype(np.float64) else: is_array = False d = np.array([d]).astype(np.float64) res = [] for dd in d: pos_exp = mp.exp(mp.mpf(dd) / scale) neg_exp = mp.exp(-mp.mpf(dd) / scale) res.append(2.0 / (mp.pi * scale * (pos_exp + neg_exp))) if is_array: if return_mp: return np.array(res).reshape(shape) else: return np.array(res, dtype=np.float64).reshape(shape) else: if return_mp: return res[0] else: return np.float64(res[0])
[ "def", "sigmoid_kernel", "(", "d", ",", "scale", "=", "0.05", ",", "use_mp", "=", "False", ",", "return_mp", "=", "False", ")", ":", "if", "not", "use_mp", ":", "if", "return_mp", ":", "print", "(", "\"To return an mpf object, set use_mp=True, returning numpy object...\"", ")", "pos_exp", "=", "np", ".", "exp", "(", "d", "/", "scale", ")", "neg_exp", "=", "np", ".", "exp", "(", "-", "d", "/", "scale", ")", "return", "np", ".", "maximum", "(", "2.0", "/", "(", "np", ".", "pi", "*", "scale", "*", "(", "pos_exp", "+", "neg_exp", ")", ")", ",", "min_val", ")", "else", ":", "if", "isinstance", "(", "d", ",", "np", ".", "ndarray", ")", ":", "is_array", "=", "True", "shape", "=", "d", ".", "shape", "d", "=", "d", ".", "ravel", "(", ")", ".", "astype", "(", "np", ".", "float64", ")", "else", ":", "is_array", "=", "False", "d", "=", "np", ".", "array", "(", "[", "d", "]", ")", ".", "astype", "(", "np", ".", "float64", ")", "res", "=", "[", "]", "for", "dd", "in", "d", ":", "pos_exp", "=", "mp", ".", "exp", "(", "mp", ".", "mpf", "(", "dd", ")", "/", "scale", ")", "neg_exp", "=", "mp", ".", "exp", "(", "-", "mp", ".", "mpf", "(", "dd", ")", "/", "scale", ")", "res", ".", "append", "(", "2.0", "/", "(", "mp", ".", "pi", "*", "scale", "*", "(", "pos_exp", "+", "neg_exp", ")", ")", ")", "if", "is_array", ":", "if", "return_mp", ":", "return", "np", ".", "array", "(", "res", ")", ".", "reshape", "(", "shape", ")", "else", ":", "return", "np", ".", "array", "(", "res", ",", "dtype", "=", "np", ".", "float64", ")", ".", "reshape", "(", "shape", ")", "else", ":", "if", "return_mp", ":", "return", "res", "[", "0", "]", "else", ":", "return", "np", ".", "float64", "(", "res", "[", "0", "]", ")" ]
[ 101, 0 ]
[ 141, 41 ]
python
en
['en', 'error', 'th']
False
mp_mean
(arr)
Calculates the mean of the array of mpf values :param arr: array of mp.mpf floats :return: the mean as mpf
Calculates the mean of the array of mpf values :param arr: array of mp.mpf floats :return: the mean as mpf
def mp_mean(arr): """ Calculates the mean of the array of mpf values :param arr: array of mp.mpf floats :return: the mean as mpf """ arr = arr.ravel() N = arr.size res = mp.mpf(0.0) for a in arr: res = res + a return res/N
[ "def", "mp_mean", "(", "arr", ")", ":", "arr", "=", "arr", ".", "ravel", "(", ")", "N", "=", "arr", ".", "size", "res", "=", "mp", ".", "mpf", "(", "0.0", ")", "for", "a", "in", "arr", ":", "res", "=", "res", "+", "a", "return", "res", "/", "N" ]
[ 146, 0 ]
[ 159, 16 ]
python
en
['en', 'error', 'th']
False
mp_std
(arr, ddof=0)
Calculates the standard deviation of the array of mpf values :param arr: array of mp.mpf floats :param ddof: Means Delta Degrees of Freedom. The divisor used in calculations is N - ddof, where N represents the number of elements. By default ddof is zero. (np.std convention) :return: the mean as mpf
Calculates the standard deviation of the array of mpf values :param arr: array of mp.mpf floats :param ddof: Means Delta Degrees of Freedom. The divisor used in calculations is N - ddof, where N represents the number of elements. By default ddof is zero. (np.std convention) :return: the mean as mpf
def mp_std(arr, ddof=0): """ Calculates the standard deviation of the array of mpf values :param arr: array of mp.mpf floats :param ddof: Means Delta Degrees of Freedom. The divisor used in calculations is N - ddof, where N represents the number of elements. By default ddof is zero. (np.std convention) :return: the mean as mpf """ arr = arr.ravel() N = arr.size # get the mean mean = mp_mean(arr) res = mp.mpf(0.0) for a in arr: res = res + (a - mean)**2 return mp.sqrt(res / (N - ddof))
[ "def", "mp_std", "(", "arr", ",", "ddof", "=", "0", ")", ":", "arr", "=", "arr", ".", "ravel", "(", ")", "N", "=", "arr", ".", "size", "# get the mean", "mean", "=", "mp_mean", "(", "arr", ")", "res", "=", "mp", ".", "mpf", "(", "0.0", ")", "for", "a", "in", "arr", ":", "res", "=", "res", "+", "(", "a", "-", "mean", ")", "**", "2", "return", "mp", ".", "sqrt", "(", "res", "/", "(", "N", "-", "ddof", ")", ")" ]
[ 163, 0 ]
[ 181, 36 ]
python
en
['en', 'error', 'th']
False
format_command_result
( command_args, # type: List[str] command_output, # type: str )
Format command information for logging.
Format command information for logging.
def format_command_result( command_args, # type: List[str] command_output, # type: str ): # type: (...) -> str """Format command information for logging.""" command_desc = format_command_args(command_args) text = f'Command arguments: {command_desc}\n' if not command_output: text += 'Command output: None' elif logger.getEffectiveLevel() > logging.DEBUG: text += 'Command output: [use --verbose to show]' else: if not command_output.endswith('\n'): command_output += '\n' text += f'Command output:\n{command_output}{LOG_DIVIDER}' return text
[ "def", "format_command_result", "(", "command_args", ",", "# type: List[str]", "command_output", ",", "# type: str", ")", ":", "# type: (...) -> str", "command_desc", "=", "format_command_args", "(", "command_args", ")", "text", "=", "f'Command arguments: {command_desc}\\n'", "if", "not", "command_output", ":", "text", "+=", "'Command output: None'", "elif", "logger", ".", "getEffectiveLevel", "(", ")", ">", "logging", ".", "DEBUG", ":", "text", "+=", "'Command output: [use --verbose to show]'", "else", ":", "if", "not", "command_output", ".", "endswith", "(", "'\\n'", ")", ":", "command_output", "+=", "'\\n'", "text", "+=", "f'Command output:\\n{command_output}{LOG_DIVIDER}'", "return", "text" ]
[ 15, 0 ]
[ 33, 15 ]
python
en
['en', 'da', 'en']
True
get_legacy_build_wheel_path
( names, # type: List[str] temp_dir, # type: str name, # type: str command_args, # type: List[str] command_output, # type: str )
Return the path to the wheel in the temporary build directory.
Return the path to the wheel in the temporary build directory.
def get_legacy_build_wheel_path( names, # type: List[str] temp_dir, # type: str name, # type: str command_args, # type: List[str] command_output, # type: str ): # type: (...) -> Optional[str] """Return the path to the wheel in the temporary build directory.""" # Sort for determinism. names = sorted(names) if not names: msg = ( 'Legacy build of wheel for {!r} created no files.\n' ).format(name) msg += format_command_result(command_args, command_output) logger.warning(msg) return None if len(names) > 1: msg = ( 'Legacy build of wheel for {!r} created more than one file.\n' 'Filenames (choosing first): {}\n' ).format(name, names) msg += format_command_result(command_args, command_output) logger.warning(msg) return os.path.join(temp_dir, names[0])
[ "def", "get_legacy_build_wheel_path", "(", "names", ",", "# type: List[str]", "temp_dir", ",", "# type: str", "name", ",", "# type: str", "command_args", ",", "# type: List[str]", "command_output", ",", "# type: str", ")", ":", "# type: (...) -> Optional[str]", "# Sort for determinism.", "names", "=", "sorted", "(", "names", ")", "if", "not", "names", ":", "msg", "=", "(", "'Legacy build of wheel for {!r} created no files.\\n'", ")", ".", "format", "(", "name", ")", "msg", "+=", "format_command_result", "(", "command_args", ",", "command_output", ")", "logger", ".", "warning", "(", "msg", ")", "return", "None", "if", "len", "(", "names", ")", ">", "1", ":", "msg", "=", "(", "'Legacy build of wheel for {!r} created more than one file.\\n'", "'Filenames (choosing first): {}\\n'", ")", ".", "format", "(", "name", ",", "names", ")", "msg", "+=", "format_command_result", "(", "command_args", ",", "command_output", ")", "logger", ".", "warning", "(", "msg", ")", "return", "os", ".", "path", ".", "join", "(", "temp_dir", ",", "names", "[", "0", "]", ")" ]
[ 36, 0 ]
[ 63, 43 ]
python
en
['en', 'en', 'en']
True
build_wheel_legacy
( name, # type: str setup_py_path, # type: str source_dir, # type: str global_options, # type: List[str] build_options, # type: List[str] tempd, # type: str )
Build one unpacked package using the "legacy" build process. Returns path to wheel if successfully built. Otherwise, returns None.
Build one unpacked package using the "legacy" build process.
def build_wheel_legacy( name, # type: str setup_py_path, # type: str source_dir, # type: str global_options, # type: List[str] build_options, # type: List[str] tempd, # type: str ): # type: (...) -> Optional[str] """Build one unpacked package using the "legacy" build process. Returns path to wheel if successfully built. Otherwise, returns None. """ wheel_args = make_setuptools_bdist_wheel_args( setup_py_path, global_options=global_options, build_options=build_options, destination_dir=tempd, ) spin_message = f'Building wheel for {name} (setup.py)' with open_spinner(spin_message) as spinner: logger.debug('Destination directory: %s', tempd) try: output = call_subprocess( wheel_args, cwd=source_dir, spinner=spinner, ) except Exception: spinner.finish("error") logger.error('Failed building wheel for %s', name) return None names = os.listdir(tempd) wheel_path = get_legacy_build_wheel_path( names=names, temp_dir=tempd, name=name, command_args=wheel_args, command_output=output, ) return wheel_path
[ "def", "build_wheel_legacy", "(", "name", ",", "# type: str", "setup_py_path", ",", "# type: str", "source_dir", ",", "# type: str", "global_options", ",", "# type: List[str]", "build_options", ",", "# type: List[str]", "tempd", ",", "# type: str", ")", ":", "# type: (...) -> Optional[str]", "wheel_args", "=", "make_setuptools_bdist_wheel_args", "(", "setup_py_path", ",", "global_options", "=", "global_options", ",", "build_options", "=", "build_options", ",", "destination_dir", "=", "tempd", ",", ")", "spin_message", "=", "f'Building wheel for {name} (setup.py)'", "with", "open_spinner", "(", "spin_message", ")", "as", "spinner", ":", "logger", ".", "debug", "(", "'Destination directory: %s'", ",", "tempd", ")", "try", ":", "output", "=", "call_subprocess", "(", "wheel_args", ",", "cwd", "=", "source_dir", ",", "spinner", "=", "spinner", ",", ")", "except", "Exception", ":", "spinner", ".", "finish", "(", "\"error\"", ")", "logger", ".", "error", "(", "'Failed building wheel for %s'", ",", "name", ")", "return", "None", "names", "=", "os", ".", "listdir", "(", "tempd", ")", "wheel_path", "=", "get_legacy_build_wheel_path", "(", "names", "=", "names", ",", "temp_dir", "=", "tempd", ",", "name", "=", "name", ",", "command_args", "=", "wheel_args", ",", "command_output", "=", "output", ",", ")", "return", "wheel_path" ]
[ 66, 0 ]
[ 109, 25 ]
python
en
['en', 'en', 'en']
True
to_native_string
(string, encoding='ascii')
Given a string object, regardless of type, returns a representation of that string in the native string type, encoding and decoding where necessary. This assumes ASCII unless told otherwise.
Given a string object, regardless of type, returns a representation of that string in the native string type, encoding and decoding where necessary. This assumes ASCII unless told otherwise.
def to_native_string(string, encoding='ascii'): """Given a string object, regardless of type, returns a representation of that string in the native string type, encoding and decoding where necessary. This assumes ASCII unless told otherwise. """ if isinstance(string, builtin_str): out = string else: if is_py2: out = string.encode(encoding) else: out = string.decode(encoding) return out
[ "def", "to_native_string", "(", "string", ",", "encoding", "=", "'ascii'", ")", ":", "if", "isinstance", "(", "string", ",", "builtin_str", ")", ":", "out", "=", "string", "else", ":", "if", "is_py2", ":", "out", "=", "string", ".", "encode", "(", "encoding", ")", "else", ":", "out", "=", "string", ".", "decode", "(", "encoding", ")", "return", "out" ]
[ 13, 0 ]
[ 26, 14 ]
python
en
['en', 'en', 'en']
True
unicode_is_ascii
(u_string)
Determine if unicode string only contains ASCII characters. :param str u_string: unicode string to check. Must be unicode and not Python 2 `str`. :rtype: bool
Determine if unicode string only contains ASCII characters.
def unicode_is_ascii(u_string): """Determine if unicode string only contains ASCII characters. :param str u_string: unicode string to check. Must be unicode and not Python 2 `str`. :rtype: bool """ assert isinstance(u_string, str) try: u_string.encode('ascii') return True except UnicodeEncodeError: return False
[ "def", "unicode_is_ascii", "(", "u_string", ")", ":", "assert", "isinstance", "(", "u_string", ",", "str", ")", "try", ":", "u_string", ".", "encode", "(", "'ascii'", ")", "return", "True", "except", "UnicodeEncodeError", ":", "return", "False" ]
[ 29, 0 ]
[ 41, 20 ]
python
en
['en', 'en', 'en']
True
precision_at_k
(ranking, k)
Score is precision @ k Relevance is binary (nonzero is relevant). :param ranking: Relevance scores (list or numpy) in rank order (first element is the first item) :type ranking: list, np.array :param k: length of ranking :type k: int :return: Precision @ k :rtype: float
Score is precision @ k Relevance is binary (nonzero is relevant).
def precision_at_k(ranking, k): """ Score is precision @ k Relevance is binary (nonzero is relevant). :param ranking: Relevance scores (list or numpy) in rank order (first element is the first item) :type ranking: list, np.array :param k: length of ranking :type k: int :return: Precision @ k :rtype: float """ assert k >= 1 ranking = np.asarray(ranking)[:k] != 0 if ranking.size != k: raise ValueError('Relevance score length < k') return np.mean(ranking)
[ "def", "precision_at_k", "(", "ranking", ",", "k", ")", ":", "assert", "k", ">=", "1", "ranking", "=", "np", ".", "asarray", "(", "ranking", ")", "[", ":", "k", "]", "!=", "0", "if", "ranking", ".", "size", "!=", "k", ":", "raise", "ValueError", "(", "'Relevance score length < k'", ")", "return", "np", ".", "mean", "(", "ranking", ")" ]
[ 15, 0 ]
[ 35, 27 ]
python
en
['en', 'error', 'th']
False
average_precision
(ranking)
Score is average precision (area under PR curve). Relevance is binary (nonzero is relevant). :param ranking: Relevance scores (list or numpy) in rank order (first element is the first item) :type ranking: list, np.array :return: Average precision :rtype: float
Score is average precision (area under PR curve). Relevance is binary (nonzero is relevant).
def average_precision(ranking): """ Score is average precision (area under PR curve). Relevance is binary (nonzero is relevant). :param ranking: Relevance scores (list or numpy) in rank order (first element is the first item) :type ranking: list, np.array :return: Average precision :rtype: float """ ranking = np.asarray(ranking) != 0 out = [precision_at_k(ranking, k + 1) for k in range(ranking.size) if ranking[k]] if not out: return 0. return np.mean(out)
[ "def", "average_precision", "(", "ranking", ")", ":", "ranking", "=", "np", ".", "asarray", "(", "ranking", ")", "!=", "0", "out", "=", "[", "precision_at_k", "(", "ranking", ",", "k", "+", "1", ")", "for", "k", "in", "range", "(", "ranking", ".", "size", ")", "if", "ranking", "[", "k", "]", "]", "if", "not", "out", ":", "return", "0.", "return", "np", ".", "mean", "(", "out", ")" ]
[ 38, 0 ]
[ 54, 23 ]
python
en
['en', 'error', 'th']
False
mean_average_precision
(ranking)
Score is mean average precision. Relevance is binary (nonzero is relevant). :param ranking: Relevance scores (list or numpy) in rank order (first element is the first item) :type ranking: list, np.array :return: Mean average precision :rtype: float
Score is mean average precision. Relevance is binary (nonzero is relevant).
def mean_average_precision(ranking): """ Score is mean average precision. Relevance is binary (nonzero is relevant). :param ranking: Relevance scores (list or numpy) in rank order (first element is the first item) :type ranking: list, np.array :return: Mean average precision :rtype: float """ return np.mean([average_precision(r) for r in ranking])
[ "def", "mean_average_precision", "(", "ranking", ")", ":", "return", "np", ".", "mean", "(", "[", "average_precision", "(", "r", ")", "for", "r", "in", "ranking", "]", ")" ]
[ 57, 0 ]
[ 68, 59 ]
python
en
['en', 'error', 'th']
False
ndcg_at_k
(ranking)
Score is normalized discounted cumulative gain (ndcg). Relevance is positive real values. Can use binary as the previous methods. :param ranking: ranking to evaluate in dcg format [0, 0, 1], where 1 is correct info :type ranking: list :return: Normalized discounted cumulative gain :rtype: float
Score is normalized discounted cumulative gain (ndcg). Relevance is positive real values. Can use binary as the previous methods.
def ndcg_at_k(ranking): """ Score is normalized discounted cumulative gain (ndcg). Relevance is positive real values. Can use binary as the previous methods. :param ranking: ranking to evaluate in dcg format [0, 0, 1], where 1 is correct info :type ranking: list :return: Normalized discounted cumulative gain :rtype: float """ ranking = np.asfarray(ranking) r_ideal = np.asfarray(sorted(ranking, reverse=True)) dcg_ideal = r_ideal[0] + np.sum(r_ideal[1:] / np.log2(np.arange(2, r_ideal.size + 1))) dcg_ranking = ranking[0] + np.sum(ranking[1:] / np.log2(np.arange(2, ranking.size + 1))) return dcg_ranking / dcg_ideal
[ "def", "ndcg_at_k", "(", "ranking", ")", ":", "ranking", "=", "np", ".", "asfarray", "(", "ranking", ")", "r_ideal", "=", "np", ".", "asfarray", "(", "sorted", "(", "ranking", ",", "reverse", "=", "True", ")", ")", "dcg_ideal", "=", "r_ideal", "[", "0", "]", "+", "np", ".", "sum", "(", "r_ideal", "[", "1", ":", "]", "/", "np", ".", "log2", "(", "np", ".", "arange", "(", "2", ",", "r_ideal", ".", "size", "+", "1", ")", ")", ")", "dcg_ranking", "=", "ranking", "[", "0", "]", "+", "np", ".", "sum", "(", "ranking", "[", "1", ":", "]", "/", "np", ".", "log2", "(", "np", ".", "arange", "(", "2", ",", "ranking", ".", "size", "+", "1", ")", ")", ")", "return", "dcg_ranking", "/", "dcg_ideal" ]
[ 71, 0 ]
[ 89, 34 ]
python
en
['en', 'error', 'th']
False
unet_weight_map
(batch, wo=10, sigma=5, max_background_ratio=0, set_contours_to_zero=False, dtype=np.float32)
Implementation of Unet weight map as in Ronneberger, O., Fischer, P., & Brox, T. (2015, October). U-net: Convolutional networks for biomedical image segmentation. Parameters ---------- batch : type ND array of shape (batch, Y, X, nchan) of labeld images if nchan>1 function is applied separately on each channel wo : float cf Unet paper sigma : float cf Unet paper max_background_ratio : bool limits the ratio (background volume / foreground volume). useful when foreground is rare, in which case the weight of forground will be: max_background_ratio / (1 + max_background_ratio) if 0, not limit set_contours_to_zero : bool if true, weight of object contours is set to zero dtype : numpy.dtype weight map data type Returns ------- type numpy nd array of same shape as batch
Implementation of Unet weight map as in Ronneberger, O., Fischer, P., & Brox, T. (2015, October). U-net: Convolutional networks for biomedical image segmentation.
def unet_weight_map(batch, wo=10, sigma=5, max_background_ratio=0, set_contours_to_zero=False, dtype=np.float32): """Implementation of Unet weight map as in Ronneberger, O., Fischer, P., & Brox, T. (2015, October). U-net: Convolutional networks for biomedical image segmentation. Parameters ---------- batch : type ND array of shape (batch, Y, X, nchan) of labeld images if nchan>1 function is applied separately on each channel wo : float cf Unet paper sigma : float cf Unet paper max_background_ratio : bool limits the ratio (background volume / foreground volume). useful when foreground is rare, in which case the weight of forground will be: max_background_ratio / (1 + max_background_ratio) if 0, not limit set_contours_to_zero : bool if true, weight of object contours is set to zero dtype : numpy.dtype weight map data type Returns ------- type numpy nd array of same shape as batch """ if batch.shape[-1]>1: wms = [unet_weight_map(batch[...,i:i+1], wo, sigma, max_background_ratio, True, dtype) for i in range(batch.shape[-1])] return np.concatenate(wms, axis=-1) else: s2 = sigma * sigma * 2 wm = weight_map_mask_class_balance(batch, max_background_ratio, True, dtype) if wo>0 or set_contours_to_zero: for i in range(batch.shape[0]): im = batch[i] labels = np.unique(im) labels = labels[labels!=0] if labels.shape[0]>1 and wo>0: edms=[distance_transform_edt(np.invert(im==l)) for l in labels] edm = np.concatenate(edms, axis=-1) edm = np.partition(edm, 1)[...,:2] # get the 2 min values edm = np.sum(edm, axis=-1, keepdims=True) bckg_wm = 1 + wo * np.exp(- edm * edm / s2) bckg_subset = im==0 wm[i][bckg_subset] = bckg_wm[bckg_subset] if labels.shape[0]>0 and set_contours_to_zero: contours = get_contour_mask(im[...,0], fun=_get_contours_binary_2d) wm[i,...,0][contours] = 0 return wm
[ "def", "unet_weight_map", "(", "batch", ",", "wo", "=", "10", ",", "sigma", "=", "5", ",", "max_background_ratio", "=", "0", ",", "set_contours_to_zero", "=", "False", ",", "dtype", "=", "np", ".", "float32", ")", ":", "if", "batch", ".", "shape", "[", "-", "1", "]", ">", "1", ":", "wms", "=", "[", "unet_weight_map", "(", "batch", "[", "...", ",", "i", ":", "i", "+", "1", "]", ",", "wo", ",", "sigma", ",", "max_background_ratio", ",", "True", ",", "dtype", ")", "for", "i", "in", "range", "(", "batch", ".", "shape", "[", "-", "1", "]", ")", "]", "return", "np", ".", "concatenate", "(", "wms", ",", "axis", "=", "-", "1", ")", "else", ":", "s2", "=", "sigma", "*", "sigma", "*", "2", "wm", "=", "weight_map_mask_class_balance", "(", "batch", ",", "max_background_ratio", ",", "True", ",", "dtype", ")", "if", "wo", ">", "0", "or", "set_contours_to_zero", ":", "for", "i", "in", "range", "(", "batch", ".", "shape", "[", "0", "]", ")", ":", "im", "=", "batch", "[", "i", "]", "labels", "=", "np", ".", "unique", "(", "im", ")", "labels", "=", "labels", "[", "labels", "!=", "0", "]", "if", "labels", ".", "shape", "[", "0", "]", ">", "1", "and", "wo", ">", "0", ":", "edms", "=", "[", "distance_transform_edt", "(", "np", ".", "invert", "(", "im", "==", "l", ")", ")", "for", "l", "in", "labels", "]", "edm", "=", "np", ".", "concatenate", "(", "edms", ",", "axis", "=", "-", "1", ")", "edm", "=", "np", ".", "partition", "(", "edm", ",", "1", ")", "[", "...", ",", ":", "2", "]", "# get the 2 min values", "edm", "=", "np", ".", "sum", "(", "edm", ",", "axis", "=", "-", "1", ",", "keepdims", "=", "True", ")", "bckg_wm", "=", "1", "+", "wo", "*", "np", ".", "exp", "(", "-", "edm", "*", "edm", "/", "s2", ")", "bckg_subset", "=", "im", "==", "0", "wm", "[", "i", "]", "[", "bckg_subset", "]", "=", "bckg_wm", "[", "bckg_subset", "]", "if", "labels", ".", "shape", "[", "0", "]", ">", "0", "and", "set_contours_to_zero", ":", "contours", "=", "get_contour_mask", "(", "im", "[", "...", ",", "0", "]", ",", "fun", "=", "_get_contours_binary_2d", ")", "wm", "[", "i", ",", "...", ",", "0", "]", "[", "contours", "]", "=", "0", "return", "wm" ]
[ 47, 0 ]
[ 95, 11 ]
python
en
['en', 'en', 'en']
True
multilabel_edt
(label_img, closed_end=True)
multilabel edt requires edt package. along y-axis (1st axis) : out-of-bound is considered as foreground of upper and lower ends if closed_end=False else only for lower end
multilabel edt requires edt package. along y-axis (1st axis) : out-of-bound is considered as foreground of upper and lower ends if closed_end=False else only for lower end
def multilabel_edt(label_img, closed_end=True): ''' multilabel edt requires edt package. along y-axis (1st axis) : out-of-bound is considered as foreground of upper and lower ends if closed_end=False else only for lower end ''' y_up = 1 if closed_end else 0 if len(label_img.shape)==3: squeeze = True label_img = np.squeeze(label_img, -1) else: squeeze=False label_img = edt.edt(np.pad(label_img, pad_width=((y_up, 0),(1, 1)), mode='constant', constant_values=0), black_border=False)[y_up:,1:-1] if squeeze: label_img = np.expand_dims(label_img, -1) return label_img
[ "def", "multilabel_edt", "(", "label_img", ",", "closed_end", "=", "True", ")", ":", "y_up", "=", "1", "if", "closed_end", "else", "0", "if", "len", "(", "label_img", ".", "shape", ")", "==", "3", ":", "squeeze", "=", "True", "label_img", "=", "np", ".", "squeeze", "(", "label_img", ",", "-", "1", ")", "else", ":", "squeeze", "=", "False", "label_img", "=", "edt", ".", "edt", "(", "np", ".", "pad", "(", "label_img", ",", "pad_width", "=", "(", "(", "y_up", ",", "0", ")", ",", "(", "1", ",", "1", ")", ")", ",", "mode", "=", "'constant'", ",", "constant_values", "=", "0", ")", ",", "black_border", "=", "False", ")", "[", "y_up", ":", ",", "1", ":", "-", "1", "]", "if", "squeeze", ":", "label_img", "=", "np", ".", "expand_dims", "(", "label_img", ",", "-", "1", ")", "return", "label_img" ]
[ 115, 0 ]
[ 129, 20 ]
python
en
['en', 'error', 'th']
False
binary_erode_labelwise
(label_img)
in-place erosion of square 8-connectivity, label by label, with border value = 1
in-place erosion of square 8-connectivity, label by label, with border value = 1
def binary_erode_labelwise(label_img): ''' in-place erosion of square 8-connectivity, label by label, with border value = 1 ''' # todo: set structure as argument, but adapt region dilatation to this parameter regDilSize = 1 regions = find_objects(label_img) shape = label_img.shape for val, region in enumerate(regions, start=1): if region is not None: # extend region in order to avoid border effect when set border_value = 1 region = list(region) for i, s in enumerate(region): region[i] = slice(max(0, s.start-regDilSize), min(s.stop+regDilSize, shape[i]), None) region = tuple(region) subregion = label_img[region] eroded = binary_erosion(subregion == val, border_value = 1) subregion[(subregion == val) *np.logical_not(eroded)] = 0
[ "def", "binary_erode_labelwise", "(", "label_img", ")", ":", "# todo: set structure as argument, but adapt region dilatation to this parameter", "regDilSize", "=", "1", "regions", "=", "find_objects", "(", "label_img", ")", "shape", "=", "label_img", ".", "shape", "for", "val", ",", "region", "in", "enumerate", "(", "regions", ",", "start", "=", "1", ")", ":", "if", "region", "is", "not", "None", ":", "# extend region in order to avoid border effect when set border_value = 1", "region", "=", "list", "(", "region", ")", "for", "i", ",", "s", "in", "enumerate", "(", "region", ")", ":", "region", "[", "i", "]", "=", "slice", "(", "max", "(", "0", ",", "s", ".", "start", "-", "regDilSize", ")", ",", "min", "(", "s", ".", "stop", "+", "regDilSize", ",", "shape", "[", "i", "]", ")", ",", "None", ")", "region", "=", "tuple", "(", "region", ")", "subregion", "=", "label_img", "[", "region", "]", "eroded", "=", "binary_erosion", "(", "subregion", "==", "val", ",", "border_value", "=", "1", ")", "subregion", "[", "(", "subregion", "==", "val", ")", "*", "np", ".", "logical_not", "(", "eroded", ")", "]", "=", "0" ]
[ 134, 0 ]
[ 151, 69 ]
python
en
['en', 'error', 'th']
False
random_rotate90_fun
(axes=(0, 1), other_fun=None)
Augmentation function that applied randomly a 90° rotation with a probability of 50% Parameters ---------- axes : type defines the rotation plane. If input is a batch, set (1, 2) other_fun : type other function applied to the input Returns ------- type a function that input a numpy array
Augmentation function that applied randomly a 90° rotation with a probability of 50%
def random_rotate90_fun(axes=(0, 1), other_fun=None): """Augmentation function that applied randomly a 90° rotation with a probability of 50% Parameters ---------- axes : type defines the rotation plane. If input is a batch, set (1, 2) other_fun : type other function applied to the input Returns ------- type a function that input a numpy array """ def func(img): if not not getrandbits(1): img = np.rot90(img, axes=axes) if other_fun is not None: return other_fun(img) else: return img return func
[ "def", "random_rotate90_fun", "(", "axes", "=", "(", "0", ",", "1", ")", ",", "other_fun", "=", "None", ")", ":", "def", "func", "(", "img", ")", ":", "if", "not", "not", "getrandbits", "(", "1", ")", ":", "img", "=", "np", ".", "rot90", "(", "img", ",", "axes", "=", "axes", ")", "if", "other_fun", "is", "not", "None", ":", "return", "other_fun", "(", "img", ")", "else", ":", "return", "img", "return", "func" ]
[ 213, 0 ]
[ 236, 15 ]
python
en
['en', 'en', 'en']
True
random_scaling
(img, center=None, scale=None, alpha_range=[-0.3, 0.17], beta_range=0.07)
Scales the image by this formlua: I' = ( I - ( μ + ( β * std ) ) ) / (std * 10**α). α, β randomly chosen Parameters ---------- img : numpy array center : float default center value, if center, mean is computed on the array scale : float default standard deviation value, if none, std is computed on the array alpha_range : type range in which α is uniformly chosen (if scalar: range is [-alpha_range, alpha_range]) beta_range : type range in which β is uniformly chosen (if scalar: range is [-beta_range, beta_range]) Returns ------- type scaled array
Scales the image by this formlua: I' = ( I - ( μ + ( β * std ) ) ) / (std * 10**α). α, β randomly chosen
def random_scaling(img, center=None, scale=None, alpha_range=[-0.3, 0.17], beta_range=0.07): """Scales the image by this formlua: I' = ( I - ( μ + ( β * std ) ) ) / (std * 10**α). α, β randomly chosen Parameters ---------- img : numpy array center : float default center value, if center, mean is computed on the array scale : float default standard deviation value, if none, std is computed on the array alpha_range : type range in which α is uniformly chosen (if scalar: range is [-alpha_range, alpha_range]) beta_range : type range in which β is uniformly chosen (if scalar: range is [-beta_range, beta_range]) Returns ------- type scaled array """ if center is None: center = img.mean() if scale is None: scale = img.std() if np.isscalar(alpha_range): alpha_range = [-alpha_range, alpha_range] if np.isscalar(beta_range): beta_range = [-beta_range, beta_range] factor = 1. / (scale * 10**uniform(alpha_range[0], alpha_range[1])) center = center + scale * uniform(beta_range[0], beta_range[1]) return (img - center) * factor
[ "def", "random_scaling", "(", "img", ",", "center", "=", "None", ",", "scale", "=", "None", ",", "alpha_range", "=", "[", "-", "0.3", ",", "0.17", "]", ",", "beta_range", "=", "0.07", ")", ":", "if", "center", "is", "None", ":", "center", "=", "img", ".", "mean", "(", ")", "if", "scale", "is", "None", ":", "scale", "=", "img", ".", "std", "(", ")", "if", "np", ".", "isscalar", "(", "alpha_range", ")", ":", "alpha_range", "=", "[", "-", "alpha_range", ",", "alpha_range", "]", "if", "np", ".", "isscalar", "(", "beta_range", ")", ":", "beta_range", "=", "[", "-", "beta_range", ",", "beta_range", "]", "factor", "=", "1.", "/", "(", "scale", "*", "10", "**", "uniform", "(", "alpha_range", "[", "0", "]", ",", "alpha_range", "[", "1", "]", ")", ")", "center", "=", "center", "+", "scale", "*", "uniform", "(", "beta_range", "[", "0", "]", ",", "beta_range", "[", "1", "]", ")", "return", "(", "img", "-", "center", ")", "*", "factor" ]
[ 264, 0 ]
[ 295, 34 ]
python
en
['en', 'ja', 'en']
True
histogram_voodoo
(image, num_control_points=5, intensity=0.5, target_points = None, return_mapping = False)
Adapted from delta software: https://gitlab.com/dunloplab/delta/blob/master/data.py It performs an elastic deformation on the image histogram to simulate changes in illumination
Adapted from delta software: https://gitlab.com/dunloplab/delta/blob/master/data.py It performs an elastic deformation on the image histogram to simulate changes in illumination
def histogram_voodoo(image, num_control_points=5, intensity=0.5, target_points = None, return_mapping = False): ''' Adapted from delta software: https://gitlab.com/dunloplab/delta/blob/master/data.py It performs an elastic deformation on the image histogram to simulate changes in illumination ''' if target_points is not None and len(target_points)!=num_control_points+2: raise ValueError("invalid target_point number") if target_points is None and intensity<=0 or intensity>=1: raise ValueError("Intensity should be in range ]0, 1[") min = image.min() max = image.max() control_points = np.linspace(min, max, num=num_control_points + 2) if target_points is None: target_points = get_histogram_voodoo_target_points(control_points, intensity) elif target_points[0] != min or target_points[-1] != max: #print("target points borders differs: [{};{}] tp: {}".format(min, max, target_points)) target_points[0] = min target_points[-1] = max mapping = interpolate.PchipInterpolator(control_points, target_points) newimage = mapping(image) if return_mapping: return newimage, mapping else: return newimage
[ "def", "histogram_voodoo", "(", "image", ",", "num_control_points", "=", "5", ",", "intensity", "=", "0.5", ",", "target_points", "=", "None", ",", "return_mapping", "=", "False", ")", ":", "if", "target_points", "is", "not", "None", "and", "len", "(", "target_points", ")", "!=", "num_control_points", "+", "2", ":", "raise", "ValueError", "(", "\"invalid target_point number\"", ")", "if", "target_points", "is", "None", "and", "intensity", "<=", "0", "or", "intensity", ">=", "1", ":", "raise", "ValueError", "(", "\"Intensity should be in range ]0, 1[\"", ")", "min", "=", "image", ".", "min", "(", ")", "max", "=", "image", ".", "max", "(", ")", "control_points", "=", "np", ".", "linspace", "(", "min", ",", "max", ",", "num", "=", "num_control_points", "+", "2", ")", "if", "target_points", "is", "None", ":", "target_points", "=", "get_histogram_voodoo_target_points", "(", "control_points", ",", "intensity", ")", "elif", "target_points", "[", "0", "]", "!=", "min", "or", "target_points", "[", "-", "1", "]", "!=", "max", ":", "#print(\"target points borders differs: [{};{}] tp: {}\".format(min, max, target_points))", "target_points", "[", "0", "]", "=", "min", "target_points", "[", "-", "1", "]", "=", "max", "mapping", "=", "interpolate", ".", "PchipInterpolator", "(", "control_points", ",", "target_points", ")", "newimage", "=", "mapping", "(", "image", ")", "if", "return_mapping", ":", "return", "newimage", ",", "mapping", "else", ":", "return", "newimage" ]
[ 374, 0 ]
[ 400, 23 ]
python
en
['en', 'error', 'th']
False
illumination_voodoo
(image, num_control_points=5, intensity=0.8, target_points = None)
Adapted from delta software: https://gitlab.com/dunloplab/delta/blob/master/data.py It simulates a variation in illumination along the length of the chamber
Adapted from delta software: https://gitlab.com/dunloplab/delta/blob/master/data.py It simulates a variation in illumination along the length of the chamber
def illumination_voodoo(image, num_control_points=5, intensity=0.8, target_points = None): ''' Adapted from delta software: https://gitlab.com/dunloplab/delta/blob/master/data.py It simulates a variation in illumination along the length of the chamber ''' if intensity>=1 or intensity<=0: raise ValueError("Intensity should be in range ]0, 1[") if target_points is not None and len(target_points)!=num_control_points: raise ValueError("invalid target_point number") # Create a random curve along the length of the chamber: control_points = np.linspace(0, image.shape[0]-1, num=num_control_points) if target_points is None: target_points = get_illumination_voodoo_target_points(num_control_points, intensity) mapping = interpolate.PchipInterpolator(control_points, target_points) curve = mapping(np.linspace(0,image.shape[0]-1,image.shape[0])) curveIm = np.reshape( np.tile( np.reshape(curve, curve.shape + (1,)), (1, image.shape[1])) ,image.shape ) # Apply this curve to the image intensity along the length of the chamebr: min = image.min() max = image.max() newimage = np.multiply(image-min, curveIm) # Rescale values to original range: newimage = np.interp(newimage, (newimage.min(), newimage.max()), (min, max)) return newimage
[ "def", "illumination_voodoo", "(", "image", ",", "num_control_points", "=", "5", ",", "intensity", "=", "0.8", ",", "target_points", "=", "None", ")", ":", "if", "intensity", ">=", "1", "or", "intensity", "<=", "0", ":", "raise", "ValueError", "(", "\"Intensity should be in range ]0, 1[\"", ")", "if", "target_points", "is", "not", "None", "and", "len", "(", "target_points", ")", "!=", "num_control_points", ":", "raise", "ValueError", "(", "\"invalid target_point number\"", ")", "# Create a random curve along the length of the chamber:", "control_points", "=", "np", ".", "linspace", "(", "0", ",", "image", ".", "shape", "[", "0", "]", "-", "1", ",", "num", "=", "num_control_points", ")", "if", "target_points", "is", "None", ":", "target_points", "=", "get_illumination_voodoo_target_points", "(", "num_control_points", ",", "intensity", ")", "mapping", "=", "interpolate", ".", "PchipInterpolator", "(", "control_points", ",", "target_points", ")", "curve", "=", "mapping", "(", "np", ".", "linspace", "(", "0", ",", "image", ".", "shape", "[", "0", "]", "-", "1", ",", "image", ".", "shape", "[", "0", "]", ")", ")", "curveIm", "=", "np", ".", "reshape", "(", "np", ".", "tile", "(", "np", ".", "reshape", "(", "curve", ",", "curve", ".", "shape", "+", "(", "1", ",", ")", ")", ",", "(", "1", ",", "image", ".", "shape", "[", "1", "]", ")", ")", ",", "image", ".", "shape", ")", "# Apply this curve to the image intensity along the length of the chamebr:", "min", "=", "image", ".", "min", "(", ")", "max", "=", "image", ".", "max", "(", ")", "newimage", "=", "np", ".", "multiply", "(", "image", "-", "min", ",", "curveIm", ")", "# Rescale values to original range:", "newimage", "=", "np", ".", "interp", "(", "newimage", ",", "(", "newimage", ".", "min", "(", ")", ",", "newimage", ".", "max", "(", ")", ")", ",", "(", "min", ",", "max", ")", ")", "return", "newimage" ]
[ 413, 0 ]
[ 436, 19 ]
python
en
['en', 'error', 'th']
False
_detect_gce_environment
()
Determine if the current environment is Compute Engine. Returns: Boolean indicating whether or not the current environment is Google Compute Engine.
Determine if the current environment is Compute Engine.
def _detect_gce_environment(): """Determine if the current environment is Compute Engine. Returns: Boolean indicating whether or not the current environment is Google Compute Engine. """ # NOTE: The explicit ``timeout`` is a workaround. The underlying # issue is that resolving an unknown host on some networks will take # 20-30 seconds; making this timeout short fixes the issue, but # could lead to false negatives in the event that we are on GCE, but # the metadata resolution was particularly slow. The latter case is # "unlikely". http = transport.get_http_object(timeout=GCE_METADATA_TIMEOUT) try: response, _ = transport.request( http, _GCE_METADATA_URI, headers=_GCE_HEADERS) return ( response.status == http_client.OK and response.get(_METADATA_FLAVOR_HEADER) == _DESIRED_METADATA_FLAVOR) except socket.error: # socket.timeout or socket.error(64, 'Host is down') logger.info('Timeout attempting to reach GCE metadata service.') return False
[ "def", "_detect_gce_environment", "(", ")", ":", "# NOTE: The explicit ``timeout`` is a workaround. The underlying", "# issue is that resolving an unknown host on some networks will take", "# 20-30 seconds; making this timeout short fixes the issue, but", "# could lead to false negatives in the event that we are on GCE, but", "# the metadata resolution was particularly slow. The latter case is", "# \"unlikely\".", "http", "=", "transport", ".", "get_http_object", "(", "timeout", "=", "GCE_METADATA_TIMEOUT", ")", "try", ":", "response", ",", "_", "=", "transport", ".", "request", "(", "http", ",", "_GCE_METADATA_URI", ",", "headers", "=", "_GCE_HEADERS", ")", "return", "(", "response", ".", "status", "==", "http_client", ".", "OK", "and", "response", ".", "get", "(", "_METADATA_FLAVOR_HEADER", ")", "==", "_DESIRED_METADATA_FLAVOR", ")", "except", "socket", ".", "error", ":", "# socket.timeout or socket.error(64, 'Host is down')", "logger", ".", "info", "(", "'Timeout attempting to reach GCE metadata service.'", ")", "return", "False" ]
[ 982, 0 ]
[ 1004, 20 ]
python
en
['en', 'en', 'en']
True
_in_gae_environment
()
Detects if the code is running in the App Engine environment. Returns: True if running in the GAE environment, False otherwise.
Detects if the code is running in the App Engine environment.
def _in_gae_environment(): """Detects if the code is running in the App Engine environment. Returns: True if running in the GAE environment, False otherwise. """ if SETTINGS.env_name is not None: return SETTINGS.env_name in ('GAE_PRODUCTION', 'GAE_LOCAL') try: import google.appengine # noqa: unused import except ImportError: pass else: server_software = os.environ.get(_SERVER_SOFTWARE, '') if server_software.startswith('Google App Engine/'): SETTINGS.env_name = 'GAE_PRODUCTION' return True elif server_software.startswith('Development/'): SETTINGS.env_name = 'GAE_LOCAL' return True return False
[ "def", "_in_gae_environment", "(", ")", ":", "if", "SETTINGS", ".", "env_name", "is", "not", "None", ":", "return", "SETTINGS", ".", "env_name", "in", "(", "'GAE_PRODUCTION'", ",", "'GAE_LOCAL'", ")", "try", ":", "import", "google", ".", "appengine", "# noqa: unused import", "except", "ImportError", ":", "pass", "else", ":", "server_software", "=", "os", ".", "environ", ".", "get", "(", "_SERVER_SOFTWARE", ",", "''", ")", "if", "server_software", ".", "startswith", "(", "'Google App Engine/'", ")", ":", "SETTINGS", ".", "env_name", "=", "'GAE_PRODUCTION'", "return", "True", "elif", "server_software", ".", "startswith", "(", "'Development/'", ")", ":", "SETTINGS", ".", "env_name", "=", "'GAE_LOCAL'", "return", "True", "return", "False" ]
[ 1007, 0 ]
[ 1029, 16 ]
python
en
['en', 'en', 'en']
True
_in_gce_environment
()
Detect if the code is running in the Compute Engine environment. Returns: True if running in the GCE environment, False otherwise.
Detect if the code is running in the Compute Engine environment.
def _in_gce_environment(): """Detect if the code is running in the Compute Engine environment. Returns: True if running in the GCE environment, False otherwise. """ if SETTINGS.env_name is not None: return SETTINGS.env_name == 'GCE_PRODUCTION' if NO_GCE_CHECK != 'True' and _detect_gce_environment(): SETTINGS.env_name = 'GCE_PRODUCTION' return True return False
[ "def", "_in_gce_environment", "(", ")", ":", "if", "SETTINGS", ".", "env_name", "is", "not", "None", ":", "return", "SETTINGS", ".", "env_name", "==", "'GCE_PRODUCTION'", "if", "NO_GCE_CHECK", "!=", "'True'", "and", "_detect_gce_environment", "(", ")", ":", "SETTINGS", ".", "env_name", "=", "'GCE_PRODUCTION'", "return", "True", "return", "False" ]
[ 1032, 0 ]
[ 1044, 16 ]
python
en
['en', 'en', 'en']
True
_save_private_file
(filename, json_contents)
Saves a file with read-write permissions on for the owner. Args: filename: String. Absolute path to file. json_contents: JSON serializable object to be saved.
Saves a file with read-write permissions on for the owner.
def _save_private_file(filename, json_contents): """Saves a file with read-write permissions on for the owner. Args: filename: String. Absolute path to file. json_contents: JSON serializable object to be saved. """ temp_filename = tempfile.mktemp() file_desc = os.open(temp_filename, os.O_WRONLY | os.O_CREAT, 0o600) with os.fdopen(file_desc, 'w') as file_handle: json.dump(json_contents, file_handle, sort_keys=True, indent=2, separators=(',', ': ')) shutil.move(temp_filename, filename)
[ "def", "_save_private_file", "(", "filename", ",", "json_contents", ")", ":", "temp_filename", "=", "tempfile", ".", "mktemp", "(", ")", "file_desc", "=", "os", ".", "open", "(", "temp_filename", ",", "os", ".", "O_WRONLY", "|", "os", ".", "O_CREAT", ",", "0o600", ")", "with", "os", ".", "fdopen", "(", "file_desc", ",", "'w'", ")", "as", "file_handle", ":", "json", ".", "dump", "(", "json_contents", ",", "file_handle", ",", "sort_keys", "=", "True", ",", "indent", "=", "2", ",", "separators", "=", "(", "','", ",", "': '", ")", ")", "shutil", ".", "move", "(", "temp_filename", ",", "filename", ")" ]
[ 1302, 0 ]
[ 1314, 40 ]
python
en
['en', 'en', 'en']
True
save_to_well_known_file
(credentials, well_known_file=None)
Save the provided GoogleCredentials to the well known file. Args: credentials: the credentials to be saved to the well known file; it should be an instance of GoogleCredentials well_known_file: the name of the file where the credentials are to be saved; this parameter is supposed to be used for testing only
Save the provided GoogleCredentials to the well known file.
def save_to_well_known_file(credentials, well_known_file=None): """Save the provided GoogleCredentials to the well known file. Args: credentials: the credentials to be saved to the well known file; it should be an instance of GoogleCredentials well_known_file: the name of the file where the credentials are to be saved; this parameter is supposed to be used for testing only """ # TODO(orestica): move this method to tools.py # once the argparse import gets fixed (it is not present in Python 2.6) if well_known_file is None: well_known_file = _get_well_known_file() config_dir = os.path.dirname(well_known_file) if not os.path.isdir(config_dir): raise OSError( 'Config directory does not exist: {0}'.format(config_dir)) credentials_data = credentials.serialization_data _save_private_file(well_known_file, credentials_data)
[ "def", "save_to_well_known_file", "(", "credentials", ",", "well_known_file", "=", "None", ")", ":", "# TODO(orestica): move this method to tools.py", "# once the argparse import gets fixed (it is not present in Python 2.6)", "if", "well_known_file", "is", "None", ":", "well_known_file", "=", "_get_well_known_file", "(", ")", "config_dir", "=", "os", ".", "path", ".", "dirname", "(", "well_known_file", ")", "if", "not", "os", ".", "path", ".", "isdir", "(", "config_dir", ")", ":", "raise", "OSError", "(", "'Config directory does not exist: {0}'", ".", "format", "(", "config_dir", ")", ")", "credentials_data", "=", "credentials", ".", "serialization_data", "_save_private_file", "(", "well_known_file", ",", "credentials_data", ")" ]
[ 1317, 0 ]
[ 1339, 57 ]
python
en
['en', 'en', 'en']
True
_get_well_known_file
()
Get the well known file produced by command 'gcloud auth login'.
Get the well known file produced by command 'gcloud auth login'.
def _get_well_known_file(): """Get the well known file produced by command 'gcloud auth login'.""" # TODO(orestica): Revisit this method once gcloud provides a better way # of pinpointing the exact location of the file. default_config_dir = os.getenv(_CLOUDSDK_CONFIG_ENV_VAR) if default_config_dir is None: if os.name == 'nt': try: default_config_dir = os.path.join(os.environ['APPDATA'], _CLOUDSDK_CONFIG_DIRECTORY) except KeyError: # This should never happen unless someone is really # messing with things. drive = os.environ.get('SystemDrive', 'C:') default_config_dir = os.path.join(drive, '\\', _CLOUDSDK_CONFIG_DIRECTORY) else: default_config_dir = os.path.join(os.path.expanduser('~'), '.config', _CLOUDSDK_CONFIG_DIRECTORY) return os.path.join(default_config_dir, _WELL_KNOWN_CREDENTIALS_FILE)
[ "def", "_get_well_known_file", "(", ")", ":", "# TODO(orestica): Revisit this method once gcloud provides a better way", "# of pinpointing the exact location of the file.", "default_config_dir", "=", "os", ".", "getenv", "(", "_CLOUDSDK_CONFIG_ENV_VAR", ")", "if", "default_config_dir", "is", "None", ":", "if", "os", ".", "name", "==", "'nt'", ":", "try", ":", "default_config_dir", "=", "os", ".", "path", ".", "join", "(", "os", ".", "environ", "[", "'APPDATA'", "]", ",", "_CLOUDSDK_CONFIG_DIRECTORY", ")", "except", "KeyError", ":", "# This should never happen unless someone is really", "# messing with things.", "drive", "=", "os", ".", "environ", ".", "get", "(", "'SystemDrive'", ",", "'C:'", ")", "default_config_dir", "=", "os", ".", "path", ".", "join", "(", "drive", ",", "'\\\\'", ",", "_CLOUDSDK_CONFIG_DIRECTORY", ")", "else", ":", "default_config_dir", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "expanduser", "(", "'~'", ")", ",", "'.config'", ",", "_CLOUDSDK_CONFIG_DIRECTORY", ")", "return", "os", ".", "path", ".", "join", "(", "default_config_dir", ",", "_WELL_KNOWN_CREDENTIALS_FILE", ")" ]
[ 1357, 0 ]
[ 1378, 73 ]
python
en
['en', 'en', 'en']
True
_get_application_default_credential_from_file
(filename)
Build the Application Default Credentials from file.
Build the Application Default Credentials from file.
def _get_application_default_credential_from_file(filename): """Build the Application Default Credentials from file.""" # read the credentials from the file with open(filename) as file_obj: client_credentials = json.load(file_obj) credentials_type = client_credentials.get('type') if credentials_type == AUTHORIZED_USER: required_fields = set(['client_id', 'client_secret', 'refresh_token']) elif credentials_type == SERVICE_ACCOUNT: required_fields = set(['client_id', 'client_email', 'private_key_id', 'private_key']) else: raise ApplicationDefaultCredentialsError( "'type' field should be defined (and have one of the '" + AUTHORIZED_USER + "' or '" + SERVICE_ACCOUNT + "' values)") missing_fields = required_fields.difference(client_credentials.keys()) if missing_fields: _raise_exception_for_missing_fields(missing_fields) if client_credentials['type'] == AUTHORIZED_USER: return GoogleCredentials( access_token=None, client_id=client_credentials['client_id'], client_secret=client_credentials['client_secret'], refresh_token=client_credentials['refresh_token'], token_expiry=None, token_uri=oauth2client.GOOGLE_TOKEN_URI, user_agent='Python client library') else: # client_credentials['type'] == SERVICE_ACCOUNT from oauth2client import service_account return service_account._JWTAccessCredentials.from_json_keyfile_dict( client_credentials)
[ "def", "_get_application_default_credential_from_file", "(", "filename", ")", ":", "# read the credentials from the file", "with", "open", "(", "filename", ")", "as", "file_obj", ":", "client_credentials", "=", "json", ".", "load", "(", "file_obj", ")", "credentials_type", "=", "client_credentials", ".", "get", "(", "'type'", ")", "if", "credentials_type", "==", "AUTHORIZED_USER", ":", "required_fields", "=", "set", "(", "[", "'client_id'", ",", "'client_secret'", ",", "'refresh_token'", "]", ")", "elif", "credentials_type", "==", "SERVICE_ACCOUNT", ":", "required_fields", "=", "set", "(", "[", "'client_id'", ",", "'client_email'", ",", "'private_key_id'", ",", "'private_key'", "]", ")", "else", ":", "raise", "ApplicationDefaultCredentialsError", "(", "\"'type' field should be defined (and have one of the '\"", "+", "AUTHORIZED_USER", "+", "\"' or '\"", "+", "SERVICE_ACCOUNT", "+", "\"' values)\"", ")", "missing_fields", "=", "required_fields", ".", "difference", "(", "client_credentials", ".", "keys", "(", ")", ")", "if", "missing_fields", ":", "_raise_exception_for_missing_fields", "(", "missing_fields", ")", "if", "client_credentials", "[", "'type'", "]", "==", "AUTHORIZED_USER", ":", "return", "GoogleCredentials", "(", "access_token", "=", "None", ",", "client_id", "=", "client_credentials", "[", "'client_id'", "]", ",", "client_secret", "=", "client_credentials", "[", "'client_secret'", "]", ",", "refresh_token", "=", "client_credentials", "[", "'refresh_token'", "]", ",", "token_expiry", "=", "None", ",", "token_uri", "=", "oauth2client", ".", "GOOGLE_TOKEN_URI", ",", "user_agent", "=", "'Python client library'", ")", "else", ":", "# client_credentials['type'] == SERVICE_ACCOUNT", "from", "oauth2client", "import", "service_account", "return", "service_account", ".", "_JWTAccessCredentials", ".", "from_json_keyfile_dict", "(", "client_credentials", ")" ]
[ 1381, 0 ]
[ 1415, 31 ]
python
en
['en', 'en', 'en']
True
_require_crypto_or_die
()
Ensure we have a crypto library, or throw CryptoUnavailableError. The oauth2client.crypt module requires either PyCrypto or PyOpenSSL to be available in order to function, but these are optional dependencies.
Ensure we have a crypto library, or throw CryptoUnavailableError.
def _require_crypto_or_die(): """Ensure we have a crypto library, or throw CryptoUnavailableError. The oauth2client.crypt module requires either PyCrypto or PyOpenSSL to be available in order to function, but these are optional dependencies. """ if not HAS_CRYPTO: raise CryptoUnavailableError('No crypto library available')
[ "def", "_require_crypto_or_die", "(", ")", ":", "if", "not", "HAS_CRYPTO", ":", "raise", "CryptoUnavailableError", "(", "'No crypto library available'", ")" ]
[ 1517, 0 ]
[ 1525, 67 ]
python
en
['en', 'en', 'en']
True
verify_id_token
(id_token, audience, http=None, cert_uri=ID_TOKEN_VERIFICATION_CERTS)
Verifies a signed JWT id_token. This function requires PyOpenSSL and because of that it does not work on App Engine. Args: id_token: string, A Signed JWT. audience: string, The audience 'aud' that the token should be for. http: httplib2.Http, instance to use to make the HTTP request. Callers should supply an instance that has caching enabled. cert_uri: string, URI of the certificates in JSON format to verify the JWT against. Returns: The deserialized JSON in the JWT. Raises: oauth2client.crypt.AppIdentityError: if the JWT fails to verify. CryptoUnavailableError: if no crypto library is available.
Verifies a signed JWT id_token.
def verify_id_token(id_token, audience, http=None, cert_uri=ID_TOKEN_VERIFICATION_CERTS): """Verifies a signed JWT id_token. This function requires PyOpenSSL and because of that it does not work on App Engine. Args: id_token: string, A Signed JWT. audience: string, The audience 'aud' that the token should be for. http: httplib2.Http, instance to use to make the HTTP request. Callers should supply an instance that has caching enabled. cert_uri: string, URI of the certificates in JSON format to verify the JWT against. Returns: The deserialized JSON in the JWT. Raises: oauth2client.crypt.AppIdentityError: if the JWT fails to verify. CryptoUnavailableError: if no crypto library is available. """ _require_crypto_or_die() if http is None: http = transport.get_cached_http() resp, content = transport.request(http, cert_uri) if resp.status == http_client.OK: certs = json.loads(_helpers._from_bytes(content)) return crypt.verify_signed_jwt_with_certs(id_token, certs, audience) else: raise VerifyJwtTokenError('Status code: {0}'.format(resp.status))
[ "def", "verify_id_token", "(", "id_token", ",", "audience", ",", "http", "=", "None", ",", "cert_uri", "=", "ID_TOKEN_VERIFICATION_CERTS", ")", ":", "_require_crypto_or_die", "(", ")", "if", "http", "is", "None", ":", "http", "=", "transport", ".", "get_cached_http", "(", ")", "resp", ",", "content", "=", "transport", ".", "request", "(", "http", ",", "cert_uri", ")", "if", "resp", ".", "status", "==", "http_client", ".", "OK", ":", "certs", "=", "json", ".", "loads", "(", "_helpers", ".", "_from_bytes", "(", "content", ")", ")", "return", "crypt", ".", "verify_signed_jwt_with_certs", "(", "id_token", ",", "certs", ",", "audience", ")", "else", ":", "raise", "VerifyJwtTokenError", "(", "'Status code: {0}'", ".", "format", "(", "resp", ".", "status", ")", ")" ]
[ 1529, 0 ]
[ 1560, 73 ]
python
en
['en', 'en', 'en']
True
_extract_id_token
(id_token)
Extract the JSON payload from a JWT. Does the extraction w/o checking the signature. Args: id_token: string or bytestring, OAuth 2.0 id_token. Returns: object, The deserialized JSON payload.
Extract the JSON payload from a JWT.
def _extract_id_token(id_token): """Extract the JSON payload from a JWT. Does the extraction w/o checking the signature. Args: id_token: string or bytestring, OAuth 2.0 id_token. Returns: object, The deserialized JSON payload. """ if type(id_token) == bytes: segments = id_token.split(b'.') else: segments = id_token.split(u'.') if len(segments) != 3: raise VerifyJwtTokenError( 'Wrong number of segments in token: {0}'.format(id_token)) return json.loads( _helpers._from_bytes(_helpers._urlsafe_b64decode(segments[1])))
[ "def", "_extract_id_token", "(", "id_token", ")", ":", "if", "type", "(", "id_token", ")", "==", "bytes", ":", "segments", "=", "id_token", ".", "split", "(", "b'.'", ")", "else", ":", "segments", "=", "id_token", ".", "split", "(", "u'.'", ")", "if", "len", "(", "segments", ")", "!=", "3", ":", "raise", "VerifyJwtTokenError", "(", "'Wrong number of segments in token: {0}'", ".", "format", "(", "id_token", ")", ")", "return", "json", ".", "loads", "(", "_helpers", ".", "_from_bytes", "(", "_helpers", ".", "_urlsafe_b64decode", "(", "segments", "[", "1", "]", ")", ")", ")" ]
[ 1563, 0 ]
[ 1584, 71 ]
python
en
['en', 'en', 'en']
True
_parse_exchange_token_response
(content)
Parses response of an exchange token request. Most providers return JSON but some (e.g. Facebook) return a url-encoded string. Args: content: The body of a response Returns: Content as a dictionary object. Note that the dict could be empty, i.e. {}. That basically indicates a failure.
Parses response of an exchange token request.
def _parse_exchange_token_response(content): """Parses response of an exchange token request. Most providers return JSON but some (e.g. Facebook) return a url-encoded string. Args: content: The body of a response Returns: Content as a dictionary object. Note that the dict could be empty, i.e. {}. That basically indicates a failure. """ resp = {} content = _helpers._from_bytes(content) try: resp = json.loads(content) except Exception: # different JSON libs raise different exceptions, # so we just do a catch-all here resp = _helpers.parse_unique_urlencoded(content) # some providers respond with 'expires', others with 'expires_in' if resp and 'expires' in resp: resp['expires_in'] = resp.pop('expires') return resp
[ "def", "_parse_exchange_token_response", "(", "content", ")", ":", "resp", "=", "{", "}", "content", "=", "_helpers", ".", "_from_bytes", "(", "content", ")", "try", ":", "resp", "=", "json", ".", "loads", "(", "content", ")", "except", "Exception", ":", "# different JSON libs raise different exceptions,", "# so we just do a catch-all here", "resp", "=", "_helpers", ".", "parse_unique_urlencoded", "(", "content", ")", "# some providers respond with 'expires', others with 'expires_in'", "if", "resp", "and", "'expires'", "in", "resp", ":", "resp", "[", "'expires_in'", "]", "=", "resp", ".", "pop", "(", "'expires'", ")", "return", "resp" ]
[ 1587, 0 ]
[ 1613, 15 ]
python
en
['en', 'en', 'en']
True
credentials_from_code
(client_id, client_secret, scope, code, redirect_uri='postmessage', http=None, user_agent=None, token_uri=oauth2client.GOOGLE_TOKEN_URI, auth_uri=oauth2client.GOOGLE_AUTH_URI, revoke_uri=oauth2client.GOOGLE_REVOKE_URI, device_uri=oauth2client.GOOGLE_DEVICE_URI, token_info_uri=oauth2client.GOOGLE_TOKEN_INFO_URI, pkce=False, code_verifier=None)
Exchanges an authorization code for an OAuth2Credentials object. Args: client_id: string, client identifier. client_secret: string, client secret. scope: string or iterable of strings, scope(s) to request. code: string, An authorization code, most likely passed down from the client redirect_uri: string, this is generally set to 'postmessage' to match the redirect_uri that the client specified http: httplib2.Http, optional http instance to use to do the fetch token_uri: string, URI for token endpoint. For convenience defaults to Google's endpoints but any OAuth 2.0 provider can be used. auth_uri: string, URI for authorization endpoint. For convenience defaults to Google's endpoints but any OAuth 2.0 provider can be used. revoke_uri: string, URI for revoke endpoint. For convenience defaults to Google's endpoints but any OAuth 2.0 provider can be used. device_uri: string, URI for device authorization endpoint. For convenience defaults to Google's endpoints but any OAuth 2.0 provider can be used. pkce: boolean, default: False, Generate and include a "Proof Key for Code Exchange" (PKCE) with your authorization and token requests. This adds security for installed applications that cannot protect a client_secret. See RFC 7636 for details. code_verifier: bytestring or None, default: None, parameter passed as part of the code exchange when pkce=True. If None, a code_verifier will automatically be generated as part of step1_get_authorize_url(). See RFC 7636 for details. Returns: An OAuth2Credentials object. Raises: FlowExchangeError if the authorization code cannot be exchanged for an access token
Exchanges an authorization code for an OAuth2Credentials object.
def credentials_from_code(client_id, client_secret, scope, code, redirect_uri='postmessage', http=None, user_agent=None, token_uri=oauth2client.GOOGLE_TOKEN_URI, auth_uri=oauth2client.GOOGLE_AUTH_URI, revoke_uri=oauth2client.GOOGLE_REVOKE_URI, device_uri=oauth2client.GOOGLE_DEVICE_URI, token_info_uri=oauth2client.GOOGLE_TOKEN_INFO_URI, pkce=False, code_verifier=None): """Exchanges an authorization code for an OAuth2Credentials object. Args: client_id: string, client identifier. client_secret: string, client secret. scope: string or iterable of strings, scope(s) to request. code: string, An authorization code, most likely passed down from the client redirect_uri: string, this is generally set to 'postmessage' to match the redirect_uri that the client specified http: httplib2.Http, optional http instance to use to do the fetch token_uri: string, URI for token endpoint. For convenience defaults to Google's endpoints but any OAuth 2.0 provider can be used. auth_uri: string, URI for authorization endpoint. For convenience defaults to Google's endpoints but any OAuth 2.0 provider can be used. revoke_uri: string, URI for revoke endpoint. For convenience defaults to Google's endpoints but any OAuth 2.0 provider can be used. device_uri: string, URI for device authorization endpoint. For convenience defaults to Google's endpoints but any OAuth 2.0 provider can be used. pkce: boolean, default: False, Generate and include a "Proof Key for Code Exchange" (PKCE) with your authorization and token requests. This adds security for installed applications that cannot protect a client_secret. See RFC 7636 for details. code_verifier: bytestring or None, default: None, parameter passed as part of the code exchange when pkce=True. If None, a code_verifier will automatically be generated as part of step1_get_authorize_url(). See RFC 7636 for details. Returns: An OAuth2Credentials object. Raises: FlowExchangeError if the authorization code cannot be exchanged for an access token """ flow = OAuth2WebServerFlow(client_id, client_secret, scope, redirect_uri=redirect_uri, user_agent=user_agent, auth_uri=auth_uri, token_uri=token_uri, revoke_uri=revoke_uri, device_uri=device_uri, token_info_uri=token_info_uri, pkce=pkce, code_verifier=code_verifier) credentials = flow.step2_exchange(code, http=http) return credentials
[ "def", "credentials_from_code", "(", "client_id", ",", "client_secret", ",", "scope", ",", "code", ",", "redirect_uri", "=", "'postmessage'", ",", "http", "=", "None", ",", "user_agent", "=", "None", ",", "token_uri", "=", "oauth2client", ".", "GOOGLE_TOKEN_URI", ",", "auth_uri", "=", "oauth2client", ".", "GOOGLE_AUTH_URI", ",", "revoke_uri", "=", "oauth2client", ".", "GOOGLE_REVOKE_URI", ",", "device_uri", "=", "oauth2client", ".", "GOOGLE_DEVICE_URI", ",", "token_info_uri", "=", "oauth2client", ".", "GOOGLE_TOKEN_INFO_URI", ",", "pkce", "=", "False", ",", "code_verifier", "=", "None", ")", ":", "flow", "=", "OAuth2WebServerFlow", "(", "client_id", ",", "client_secret", ",", "scope", ",", "redirect_uri", "=", "redirect_uri", ",", "user_agent", "=", "user_agent", ",", "auth_uri", "=", "auth_uri", ",", "token_uri", "=", "token_uri", ",", "revoke_uri", "=", "revoke_uri", ",", "device_uri", "=", "device_uri", ",", "token_info_uri", "=", "token_info_uri", ",", "pkce", "=", "pkce", ",", "code_verifier", "=", "code_verifier", ")", "credentials", "=", "flow", ".", "step2_exchange", "(", "code", ",", "http", "=", "http", ")", "return", "credentials" ]
[ 1617, 0 ]
[ 1679, 22 ]
python
en
['en', 'en', 'en']
True
credentials_from_clientsecrets_and_code
(filename, scope, code, message=None, redirect_uri='postmessage', http=None, cache=None, device_uri=None)
Returns OAuth2Credentials from a clientsecrets file and an auth code. Will create the right kind of Flow based on the contents of the clientsecrets file or will raise InvalidClientSecretsError for unknown types of Flows. Args: filename: string, File name of clientsecrets. scope: string or iterable of strings, scope(s) to request. code: string, An authorization code, most likely passed down from the client message: string, A friendly string to display to the user if the clientsecrets file is missing or invalid. If message is provided then sys.exit will be called in the case of an error. If message in not provided then clientsecrets.InvalidClientSecretsError will be raised. redirect_uri: string, this is generally set to 'postmessage' to match the redirect_uri that the client specified http: httplib2.Http, optional http instance to use to do the fetch cache: An optional cache service client that implements get() and set() methods. See clientsecrets.loadfile() for details. device_uri: string, OAuth 2.0 device authorization endpoint pkce: boolean, default: False, Generate and include a "Proof Key for Code Exchange" (PKCE) with your authorization and token requests. This adds security for installed applications that cannot protect a client_secret. See RFC 7636 for details. code_verifier: bytestring or None, default: None, parameter passed as part of the code exchange when pkce=True. If None, a code_verifier will automatically be generated as part of step1_get_authorize_url(). See RFC 7636 for details. Returns: An OAuth2Credentials object. Raises: FlowExchangeError: if the authorization code cannot be exchanged for an access token UnknownClientSecretsFlowError: if the file describes an unknown kind of Flow. clientsecrets.InvalidClientSecretsError: if the clientsecrets file is invalid.
Returns OAuth2Credentials from a clientsecrets file and an auth code.
def credentials_from_clientsecrets_and_code(filename, scope, code, message=None, redirect_uri='postmessage', http=None, cache=None, device_uri=None): """Returns OAuth2Credentials from a clientsecrets file and an auth code. Will create the right kind of Flow based on the contents of the clientsecrets file or will raise InvalidClientSecretsError for unknown types of Flows. Args: filename: string, File name of clientsecrets. scope: string or iterable of strings, scope(s) to request. code: string, An authorization code, most likely passed down from the client message: string, A friendly string to display to the user if the clientsecrets file is missing or invalid. If message is provided then sys.exit will be called in the case of an error. If message in not provided then clientsecrets.InvalidClientSecretsError will be raised. redirect_uri: string, this is generally set to 'postmessage' to match the redirect_uri that the client specified http: httplib2.Http, optional http instance to use to do the fetch cache: An optional cache service client that implements get() and set() methods. See clientsecrets.loadfile() for details. device_uri: string, OAuth 2.0 device authorization endpoint pkce: boolean, default: False, Generate and include a "Proof Key for Code Exchange" (PKCE) with your authorization and token requests. This adds security for installed applications that cannot protect a client_secret. See RFC 7636 for details. code_verifier: bytestring or None, default: None, parameter passed as part of the code exchange when pkce=True. If None, a code_verifier will automatically be generated as part of step1_get_authorize_url(). See RFC 7636 for details. Returns: An OAuth2Credentials object. Raises: FlowExchangeError: if the authorization code cannot be exchanged for an access token UnknownClientSecretsFlowError: if the file describes an unknown kind of Flow. clientsecrets.InvalidClientSecretsError: if the clientsecrets file is invalid. """ flow = flow_from_clientsecrets(filename, scope, message=message, cache=cache, redirect_uri=redirect_uri, device_uri=device_uri) credentials = flow.step2_exchange(code, http=http) return credentials
[ "def", "credentials_from_clientsecrets_and_code", "(", "filename", ",", "scope", ",", "code", ",", "message", "=", "None", ",", "redirect_uri", "=", "'postmessage'", ",", "http", "=", "None", ",", "cache", "=", "None", ",", "device_uri", "=", "None", ")", ":", "flow", "=", "flow_from_clientsecrets", "(", "filename", ",", "scope", ",", "message", "=", "message", ",", "cache", "=", "cache", ",", "redirect_uri", "=", "redirect_uri", ",", "device_uri", "=", "device_uri", ")", "credentials", "=", "flow", ".", "step2_exchange", "(", "code", ",", "http", "=", "http", ")", "return", "credentials" ]
[ 1683, 0 ]
[ 1736, 22 ]
python
en
['en', 'en', 'en']
True
_oauth2_web_server_flow_params
(kwargs)
Configures redirect URI parameters for OAuth2WebServerFlow.
Configures redirect URI parameters for OAuth2WebServerFlow.
def _oauth2_web_server_flow_params(kwargs): """Configures redirect URI parameters for OAuth2WebServerFlow.""" params = { 'access_type': 'offline', 'response_type': 'code', } params.update(kwargs) # Check for the presence of the deprecated approval_prompt param and # warn appropriately. approval_prompt = params.get('approval_prompt') if approval_prompt is not None: logger.warning( 'The approval_prompt parameter for OAuth2WebServerFlow is ' 'deprecated. Please use the prompt parameter instead.') if approval_prompt == 'force': logger.warning( 'approval_prompt="force" has been adjusted to ' 'prompt="consent"') params['prompt'] = 'consent' del params['approval_prompt'] return params
[ "def", "_oauth2_web_server_flow_params", "(", "kwargs", ")", ":", "params", "=", "{", "'access_type'", ":", "'offline'", ",", "'response_type'", ":", "'code'", ",", "}", "params", ".", "update", "(", "kwargs", ")", "# Check for the presence of the deprecated approval_prompt param and", "# warn appropriately.", "approval_prompt", "=", "params", ".", "get", "(", "'approval_prompt'", ")", "if", "approval_prompt", "is", "not", "None", ":", "logger", ".", "warning", "(", "'The approval_prompt parameter for OAuth2WebServerFlow is '", "'deprecated. Please use the prompt parameter instead.'", ")", "if", "approval_prompt", "==", "'force'", ":", "logger", ".", "warning", "(", "'approval_prompt=\"force\" has been adjusted to '", "'prompt=\"consent\"'", ")", "params", "[", "'prompt'", "]", "=", "'consent'", "del", "params", "[", "'approval_prompt'", "]", "return", "params" ]
[ 1777, 0 ]
[ 1801, 17 ]
python
en
['en', 'nl', 'en']
True
flow_from_clientsecrets
(filename, scope, redirect_uri=None, message=None, cache=None, login_hint=None, device_uri=None, pkce=None, code_verifier=None, prompt=None)
Create a Flow from a clientsecrets file. Will create the right kind of Flow based on the contents of the clientsecrets file or will raise InvalidClientSecretsError for unknown types of Flows. Args: filename: string, File name of client secrets. scope: string or iterable of strings, scope(s) to request. redirect_uri: string, Either the string 'urn:ietf:wg:oauth:2.0:oob' for a non-web-based application, or a URI that handles the callback from the authorization server. message: string, A friendly string to display to the user if the clientsecrets file is missing or invalid. If message is provided then sys.exit will be called in the case of an error. If message in not provided then clientsecrets.InvalidClientSecretsError will be raised. cache: An optional cache service client that implements get() and set() methods. See clientsecrets.loadfile() for details. login_hint: string, Either an email address or domain. Passing this hint will either pre-fill the email box on the sign-in form or select the proper multi-login session, thereby simplifying the login flow. device_uri: string, URI for device authorization endpoint. For convenience defaults to Google's endpoints but any OAuth 2.0 provider can be used. Returns: A Flow object. Raises: UnknownClientSecretsFlowError: if the file describes an unknown kind of Flow. clientsecrets.InvalidClientSecretsError: if the clientsecrets file is invalid.
Create a Flow from a clientsecrets file.
def flow_from_clientsecrets(filename, scope, redirect_uri=None, message=None, cache=None, login_hint=None, device_uri=None, pkce=None, code_verifier=None, prompt=None): """Create a Flow from a clientsecrets file. Will create the right kind of Flow based on the contents of the clientsecrets file or will raise InvalidClientSecretsError for unknown types of Flows. Args: filename: string, File name of client secrets. scope: string or iterable of strings, scope(s) to request. redirect_uri: string, Either the string 'urn:ietf:wg:oauth:2.0:oob' for a non-web-based application, or a URI that handles the callback from the authorization server. message: string, A friendly string to display to the user if the clientsecrets file is missing or invalid. If message is provided then sys.exit will be called in the case of an error. If message in not provided then clientsecrets.InvalidClientSecretsError will be raised. cache: An optional cache service client that implements get() and set() methods. See clientsecrets.loadfile() for details. login_hint: string, Either an email address or domain. Passing this hint will either pre-fill the email box on the sign-in form or select the proper multi-login session, thereby simplifying the login flow. device_uri: string, URI for device authorization endpoint. For convenience defaults to Google's endpoints but any OAuth 2.0 provider can be used. Returns: A Flow object. Raises: UnknownClientSecretsFlowError: if the file describes an unknown kind of Flow. clientsecrets.InvalidClientSecretsError: if the clientsecrets file is invalid. """ try: client_type, client_info = clientsecrets.loadfile(filename, cache=cache) if client_type in (clientsecrets.TYPE_WEB, clientsecrets.TYPE_INSTALLED): constructor_kwargs = { 'redirect_uri': redirect_uri, 'auth_uri': client_info['auth_uri'], 'token_uri': client_info['token_uri'], 'login_hint': login_hint, } revoke_uri = client_info.get('revoke_uri') optional = ( 'revoke_uri', 'device_uri', 'pkce', 'code_verifier', 'prompt' ) for param in optional: if locals()[param] is not None: constructor_kwargs[param] = locals()[param] return OAuth2WebServerFlow( client_info['client_id'], client_info['client_secret'], scope, **constructor_kwargs) except clientsecrets.InvalidClientSecretsError as e: if message is not None: if e.args: message = ('The client secrets were invalid: ' '\n{0}\n{1}'.format(e, message)) sys.exit(message) else: raise else: raise UnknownClientSecretsFlowError( 'This OAuth 2.0 flow is unsupported: {0!r}'.format(client_type))
[ "def", "flow_from_clientsecrets", "(", "filename", ",", "scope", ",", "redirect_uri", "=", "None", ",", "message", "=", "None", ",", "cache", "=", "None", ",", "login_hint", "=", "None", ",", "device_uri", "=", "None", ",", "pkce", "=", "None", ",", "code_verifier", "=", "None", ",", "prompt", "=", "None", ")", ":", "try", ":", "client_type", ",", "client_info", "=", "clientsecrets", ".", "loadfile", "(", "filename", ",", "cache", "=", "cache", ")", "if", "client_type", "in", "(", "clientsecrets", ".", "TYPE_WEB", ",", "clientsecrets", ".", "TYPE_INSTALLED", ")", ":", "constructor_kwargs", "=", "{", "'redirect_uri'", ":", "redirect_uri", ",", "'auth_uri'", ":", "client_info", "[", "'auth_uri'", "]", ",", "'token_uri'", ":", "client_info", "[", "'token_uri'", "]", ",", "'login_hint'", ":", "login_hint", ",", "}", "revoke_uri", "=", "client_info", ".", "get", "(", "'revoke_uri'", ")", "optional", "=", "(", "'revoke_uri'", ",", "'device_uri'", ",", "'pkce'", ",", "'code_verifier'", ",", "'prompt'", ")", "for", "param", "in", "optional", ":", "if", "locals", "(", ")", "[", "param", "]", "is", "not", "None", ":", "constructor_kwargs", "[", "param", "]", "=", "locals", "(", ")", "[", "param", "]", "return", "OAuth2WebServerFlow", "(", "client_info", "[", "'client_id'", "]", ",", "client_info", "[", "'client_secret'", "]", ",", "scope", ",", "*", "*", "constructor_kwargs", ")", "except", "clientsecrets", ".", "InvalidClientSecretsError", "as", "e", ":", "if", "message", "is", "not", "None", ":", "if", "e", ".", "args", ":", "message", "=", "(", "'The client secrets were invalid: '", "'\\n{0}\\n{1}'", ".", "format", "(", "e", ",", "message", ")", ")", "sys", ".", "exit", "(", "message", ")", "else", ":", "raise", "else", ":", "raise", "UnknownClientSecretsFlowError", "(", "'This OAuth 2.0 flow is unsupported: {0!r}'", ".", "format", "(", "client_type", ")", ")" ]
[ 2092, 0 ]
[ 2169, 76 ]
python
en
['en', 'en', 'en']
True
Credentials.authorize
(self, http)
Take an httplib2.Http instance (or equivalent) and authorizes it. Authorizes it for the set of credentials, usually by replacing http.request() with a method that adds in the appropriate headers and then delegates to the original Http.request() method. Args: http: httplib2.Http, an http object to be used to make the refresh request.
Take an httplib2.Http instance (or equivalent) and authorizes it.
def authorize(self, http): """Take an httplib2.Http instance (or equivalent) and authorizes it. Authorizes it for the set of credentials, usually by replacing http.request() with a method that adds in the appropriate headers and then delegates to the original Http.request() method. Args: http: httplib2.Http, an http object to be used to make the refresh request. """ raise NotImplementedError
[ "def", "authorize", "(", "self", ",", "http", ")", ":", "raise", "NotImplementedError" ]
[ 201, 4 ]
[ 212, 33 ]
python
en
['en', 'en', 'en']
True
Credentials.refresh
(self, http)
Forces a refresh of the access_token. Args: http: httplib2.Http, an http object to be used to make the refresh request.
Forces a refresh of the access_token.
def refresh(self, http): """Forces a refresh of the access_token. Args: http: httplib2.Http, an http object to be used to make the refresh request. """ raise NotImplementedError
[ "def", "refresh", "(", "self", ",", "http", ")", ":", "raise", "NotImplementedError" ]
[ 214, 4 ]
[ 221, 33 ]
python
en
['en', 'en', 'en']
True
Credentials.revoke
(self, http)
Revokes a refresh_token and makes the credentials void. Args: http: httplib2.Http, an http object to be used to make the revoke request.
Revokes a refresh_token and makes the credentials void.
def revoke(self, http): """Revokes a refresh_token and makes the credentials void. Args: http: httplib2.Http, an http object to be used to make the revoke request. """ raise NotImplementedError
[ "def", "revoke", "(", "self", ",", "http", ")", ":", "raise", "NotImplementedError" ]
[ 223, 4 ]
[ 230, 33 ]
python
en
['en', 'en', 'en']
True
Credentials.apply
(self, headers)
Add the authorization to the headers. Args: headers: dict, the headers to add the Authorization header to.
Add the authorization to the headers.
def apply(self, headers): """Add the authorization to the headers. Args: headers: dict, the headers to add the Authorization header to. """ raise NotImplementedError
[ "def", "apply", "(", "self", ",", "headers", ")", ":", "raise", "NotImplementedError" ]
[ 232, 4 ]
[ 238, 33 ]
python
en
['en', 'en', 'en']
True
Credentials._to_json
(self, strip, to_serialize=None)
Utility function that creates JSON repr. of a Credentials object. Args: strip: array, An array of names of members to exclude from the JSON. to_serialize: dict, (Optional) The properties for this object that will be serialized. This allows callers to modify before serializing. Returns: string, a JSON representation of this instance, suitable to pass to from_json().
Utility function that creates JSON repr. of a Credentials object.
def _to_json(self, strip, to_serialize=None): """Utility function that creates JSON repr. of a Credentials object. Args: strip: array, An array of names of members to exclude from the JSON. to_serialize: dict, (Optional) The properties for this object that will be serialized. This allows callers to modify before serializing. Returns: string, a JSON representation of this instance, suitable to pass to from_json(). """ curr_type = self.__class__ if to_serialize is None: to_serialize = copy.copy(self.__dict__) else: # Assumes it is a str->str dictionary, so we don't deep copy. to_serialize = copy.copy(to_serialize) for member in strip: if member in to_serialize: del to_serialize[member] to_serialize['token_expiry'] = _parse_expiry( to_serialize.get('token_expiry')) # Add in information we will need later to reconstitute this instance. to_serialize['_class'] = curr_type.__name__ to_serialize['_module'] = curr_type.__module__ for key, val in to_serialize.items(): if isinstance(val, bytes): to_serialize[key] = val.decode('utf-8') if isinstance(val, set): to_serialize[key] = list(val) return json.dumps(to_serialize)
[ "def", "_to_json", "(", "self", ",", "strip", ",", "to_serialize", "=", "None", ")", ":", "curr_type", "=", "self", ".", "__class__", "if", "to_serialize", "is", "None", ":", "to_serialize", "=", "copy", ".", "copy", "(", "self", ".", "__dict__", ")", "else", ":", "# Assumes it is a str->str dictionary, so we don't deep copy.", "to_serialize", "=", "copy", ".", "copy", "(", "to_serialize", ")", "for", "member", "in", "strip", ":", "if", "member", "in", "to_serialize", ":", "del", "to_serialize", "[", "member", "]", "to_serialize", "[", "'token_expiry'", "]", "=", "_parse_expiry", "(", "to_serialize", ".", "get", "(", "'token_expiry'", ")", ")", "# Add in information we will need later to reconstitute this instance.", "to_serialize", "[", "'_class'", "]", "=", "curr_type", ".", "__name__", "to_serialize", "[", "'_module'", "]", "=", "curr_type", ".", "__module__", "for", "key", ",", "val", "in", "to_serialize", ".", "items", "(", ")", ":", "if", "isinstance", "(", "val", ",", "bytes", ")", ":", "to_serialize", "[", "key", "]", "=", "val", ".", "decode", "(", "'utf-8'", ")", "if", "isinstance", "(", "val", ",", "set", ")", ":", "to_serialize", "[", "key", "]", "=", "list", "(", "val", ")", "return", "json", ".", "dumps", "(", "to_serialize", ")" ]
[ 240, 4 ]
[ 273, 39 ]
python
en
['en', 'en', 'en']
True
Credentials.to_json
(self)
Creating a JSON representation of an instance of Credentials. Returns: string, a JSON representation of this instance, suitable to pass to from_json().
Creating a JSON representation of an instance of Credentials.
def to_json(self): """Creating a JSON representation of an instance of Credentials. Returns: string, a JSON representation of this instance, suitable to pass to from_json(). """ return self._to_json(self.NON_SERIALIZED_MEMBERS)
[ "def", "to_json", "(", "self", ")", ":", "return", "self", ".", "_to_json", "(", "self", ".", "NON_SERIALIZED_MEMBERS", ")" ]
[ 275, 4 ]
[ 282, 57 ]
python
en
['en', 'en', 'en']
True
Credentials.new_from_json
(cls, json_data)
Utility class method to instantiate a Credentials subclass from JSON. Expects the JSON string to have been produced by to_json(). Args: json_data: string or bytes, JSON from to_json(). Returns: An instance of the subclass of Credentials that was serialized with to_json().
Utility class method to instantiate a Credentials subclass from JSON.
def new_from_json(cls, json_data): """Utility class method to instantiate a Credentials subclass from JSON. Expects the JSON string to have been produced by to_json(). Args: json_data: string or bytes, JSON from to_json(). Returns: An instance of the subclass of Credentials that was serialized with to_json(). """ json_data_as_unicode = _helpers._from_bytes(json_data) data = json.loads(json_data_as_unicode) # Find and call the right classmethod from_json() to restore # the object. module_name = data['_module'] try: module_obj = __import__(module_name) except ImportError: # In case there's an object from the old package structure, # update it module_name = module_name.replace('.googleapiclient', '') module_obj = __import__(module_name) module_obj = __import__(module_name, fromlist=module_name.split('.')[:-1]) kls = getattr(module_obj, data['_class']) return kls.from_json(json_data_as_unicode)
[ "def", "new_from_json", "(", "cls", ",", "json_data", ")", ":", "json_data_as_unicode", "=", "_helpers", ".", "_from_bytes", "(", "json_data", ")", "data", "=", "json", ".", "loads", "(", "json_data_as_unicode", ")", "# Find and call the right classmethod from_json() to restore", "# the object.", "module_name", "=", "data", "[", "'_module'", "]", "try", ":", "module_obj", "=", "__import__", "(", "module_name", ")", "except", "ImportError", ":", "# In case there's an object from the old package structure,", "# update it", "module_name", "=", "module_name", ".", "replace", "(", "'.googleapiclient'", ",", "''", ")", "module_obj", "=", "__import__", "(", "module_name", ")", "module_obj", "=", "__import__", "(", "module_name", ",", "fromlist", "=", "module_name", ".", "split", "(", "'.'", ")", "[", ":", "-", "1", "]", ")", "kls", "=", "getattr", "(", "module_obj", ",", "data", "[", "'_class'", "]", ")", "return", "kls", ".", "from_json", "(", "json_data_as_unicode", ")" ]
[ 285, 4 ]
[ 313, 50 ]
python
en
['en', 'en', 'en']
True
Credentials.from_json
(cls, unused_data)
Instantiate a Credentials object from a JSON description of it. The JSON should have been produced by calling .to_json() on the object. Args: unused_data: dict, A deserialized JSON object. Returns: An instance of a Credentials subclass.
Instantiate a Credentials object from a JSON description of it.
def from_json(cls, unused_data): """Instantiate a Credentials object from a JSON description of it. The JSON should have been produced by calling .to_json() on the object. Args: unused_data: dict, A deserialized JSON object. Returns: An instance of a Credentials subclass. """ return Credentials()
[ "def", "from_json", "(", "cls", ",", "unused_data", ")", ":", "return", "Credentials", "(", ")" ]
[ 316, 4 ]
[ 327, 28 ]
python
en
['en', 'en', 'en']
True
Storage.__init__
(self, lock=None)
Create a Storage instance. Args: lock: An optional threading.Lock-like object. Must implement at least acquire() and release(). Does not need to be re-entrant.
Create a Storage instance.
def __init__(self, lock=None): """Create a Storage instance. Args: lock: An optional threading.Lock-like object. Must implement at least acquire() and release(). Does not need to be re-entrant. """ self._lock = lock
[ "def", "__init__", "(", "self", ",", "lock", "=", "None", ")", ":", "self", ".", "_lock", "=", "lock" ]
[ 342, 4 ]
[ 350, 25 ]
python
en
['en', 'de', 'en']
True
Storage.acquire_lock
(self)
Acquires any lock necessary to access this Storage. This lock is not reentrant.
Acquires any lock necessary to access this Storage.
def acquire_lock(self): """Acquires any lock necessary to access this Storage. This lock is not reentrant. """ if self._lock is not None: self._lock.acquire()
[ "def", "acquire_lock", "(", "self", ")", ":", "if", "self", ".", "_lock", "is", "not", "None", ":", "self", ".", "_lock", ".", "acquire", "(", ")" ]
[ 352, 4 ]
[ 358, 32 ]
python
en
['en', 'en', 'en']
True
Storage.release_lock
(self)
Release the Storage lock. Trying to release a lock that isn't held will result in a RuntimeError in the case of a threading.Lock or multiprocessing.Lock.
Release the Storage lock.
def release_lock(self): """Release the Storage lock. Trying to release a lock that isn't held will result in a RuntimeError in the case of a threading.Lock or multiprocessing.Lock. """ if self._lock is not None: self._lock.release()
[ "def", "release_lock", "(", "self", ")", ":", "if", "self", ".", "_lock", "is", "not", "None", ":", "self", ".", "_lock", ".", "release", "(", ")" ]
[ 360, 4 ]
[ 367, 32 ]
python
en
['en', 'de', 'en']
True
Storage.locked_get
(self)
Retrieve credential. The Storage lock must be held when this is called. Returns: oauth2client.client.Credentials
Retrieve credential.
def locked_get(self): """Retrieve credential. The Storage lock must be held when this is called. Returns: oauth2client.client.Credentials """ raise NotImplementedError
[ "def", "locked_get", "(", "self", ")", ":", "raise", "NotImplementedError" ]
[ 369, 4 ]
[ 377, 33 ]
python
en
['en', 'pt', 'en']
False
Storage.locked_put
(self, credentials)
Write a credential. The Storage lock must be held when this is called. Args: credentials: Credentials, the credentials to store.
Write a credential.
def locked_put(self, credentials): """Write a credential. The Storage lock must be held when this is called. Args: credentials: Credentials, the credentials to store. """ raise NotImplementedError
[ "def", "locked_put", "(", "self", ",", "credentials", ")", ":", "raise", "NotImplementedError" ]
[ 379, 4 ]
[ 387, 33 ]
python
en
['es', 'pt', 'en']
False
Storage.locked_delete
(self)
Delete a credential. The Storage lock must be held when this is called.
Delete a credential.
def locked_delete(self): """Delete a credential. The Storage lock must be held when this is called. """ raise NotImplementedError
[ "def", "locked_delete", "(", "self", ")", ":", "raise", "NotImplementedError" ]
[ 389, 4 ]
[ 394, 33 ]
python
en
['en', 'it', 'en']
True