Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
BaseDatabaseWrapper.check_constraints | (self, table_names=None) |
Backends can override this method if they can apply constraint
checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE"). Should raise an
IntegrityError if any invalid foreign key references are encountered.
|
Backends can override this method if they can apply constraint
checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE"). Should raise an
IntegrityError if any invalid foreign key references are encountered.
| def check_constraints(self, table_names=None):
"""
Backends can override this method if they can apply constraint
checking (e.g. via "SET CONSTRAINTS ALL IMMEDIATE"). Should raise an
IntegrityError if any invalid foreign key references are encountered.
"""
pass | [
"def",
"check_constraints",
"(",
"self",
",",
"table_names",
"=",
"None",
")",
":",
"pass"
] | [
479,
4
] | [
485,
12
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseWrapper.is_usable | (self) |
Test if the database connection is usable.
This method may assume that self.connection is not None.
Actual implementations should take care not to raise exceptions
as that may prevent Django from recycling unusable connections.
|
Test if the database connection is usable. | def is_usable(self):
"""
Test if the database connection is usable.
This method may assume that self.connection is not None.
Actual implementations should take care not to raise exceptions
as that may prevent Django from recycling unusable connections.
"""
raise NotImplementedError(
"subclasses of BaseDatabaseWrapper may require an is_usable() method") | [
"def",
"is_usable",
"(",
"self",
")",
":",
"raise",
"NotImplementedError",
"(",
"\"subclasses of BaseDatabaseWrapper may require an is_usable() method\"",
")"
] | [
489,
4
] | [
499,
82
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseWrapper.close_if_unusable_or_obsolete | (self) |
Close the current connection if unrecoverable errors have occurred
or if it outlived its maximum age.
|
Close the current connection if unrecoverable errors have occurred
or if it outlived its maximum age.
| def close_if_unusable_or_obsolete(self):
"""
Close the current connection if unrecoverable errors have occurred
or if it outlived its maximum age.
"""
if self.connection is not None:
# If the application didn't restore the original autocommit setting,
# don't take chances, drop the connection.
if self.get_autocommit() != self.settings_dict['AUTOCOMMIT']:
self.close()
return
# If an exception other than DataError or IntegrityError occurred
# since the last commit / rollback, check if the connection works.
if self.errors_occurred:
if self.is_usable():
self.errors_occurred = False
else:
self.close()
return
if self.close_at is not None and time.monotonic() >= self.close_at:
self.close()
return | [
"def",
"close_if_unusable_or_obsolete",
"(",
"self",
")",
":",
"if",
"self",
".",
"connection",
"is",
"not",
"None",
":",
"# If the application didn't restore the original autocommit setting,",
"# don't take chances, drop the connection.",
"if",
"self",
".",
"get_autocommit",
"(",
")",
"!=",
"self",
".",
"settings_dict",
"[",
"'AUTOCOMMIT'",
"]",
":",
"self",
".",
"close",
"(",
")",
"return",
"# If an exception other than DataError or IntegrityError occurred",
"# since the last commit / rollback, check if the connection works.",
"if",
"self",
".",
"errors_occurred",
":",
"if",
"self",
".",
"is_usable",
"(",
")",
":",
"self",
".",
"errors_occurred",
"=",
"False",
"else",
":",
"self",
".",
"close",
"(",
")",
"return",
"if",
"self",
".",
"close_at",
"is",
"not",
"None",
"and",
"time",
".",
"monotonic",
"(",
")",
">=",
"self",
".",
"close_at",
":",
"self",
".",
"close",
"(",
")",
"return"
] | [
501,
4
] | [
524,
22
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseWrapper.validate_thread_sharing | (self) |
Validate that the connection isn't accessed by another thread than the
one which originally created it, unless the connection was explicitly
authorized to be shared between threads (via the `inc_thread_sharing()`
method). Raise an exception if the validation fails.
|
Validate that the connection isn't accessed by another thread than the
one which originally created it, unless the connection was explicitly
authorized to be shared between threads (via the `inc_thread_sharing()`
method). Raise an exception if the validation fails.
| def validate_thread_sharing(self):
"""
Validate that the connection isn't accessed by another thread than the
one which originally created it, unless the connection was explicitly
authorized to be shared between threads (via the `inc_thread_sharing()`
method). Raise an exception if the validation fails.
"""
if not (self.allow_thread_sharing or self._thread_ident == _thread.get_ident()):
raise DatabaseError(
"DatabaseWrapper objects created in a "
"thread can only be used in that same thread. The object "
"with alias '%s' was created in thread id %s and this is "
"thread id %s."
% (self.alias, self._thread_ident, _thread.get_ident())
) | [
"def",
"validate_thread_sharing",
"(",
"self",
")",
":",
"if",
"not",
"(",
"self",
".",
"allow_thread_sharing",
"or",
"self",
".",
"_thread_ident",
"==",
"_thread",
".",
"get_ident",
"(",
")",
")",
":",
"raise",
"DatabaseError",
"(",
"\"DatabaseWrapper objects created in a \"",
"\"thread can only be used in that same thread. The object \"",
"\"with alias '%s' was created in thread id %s and this is \"",
"\"thread id %s.\"",
"%",
"(",
"self",
".",
"alias",
",",
"self",
".",
"_thread_ident",
",",
"_thread",
".",
"get_ident",
"(",
")",
")",
")"
] | [
543,
4
] | [
557,
13
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseWrapper.prepare_database | (self) |
Hook to do any database check or preparation, generally called before
migrating a project or an app.
|
Hook to do any database check or preparation, generally called before
migrating a project or an app.
| def prepare_database(self):
"""
Hook to do any database check or preparation, generally called before
migrating a project or an app.
"""
pass | [
"def",
"prepare_database",
"(",
"self",
")",
":",
"pass"
] | [
561,
4
] | [
566,
12
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseWrapper.wrap_database_errors | (self) |
Context manager and decorator that re-throws backend-specific database
exceptions using Django's common wrappers.
|
Context manager and decorator that re-throws backend-specific database
exceptions using Django's common wrappers.
| def wrap_database_errors(self):
"""
Context manager and decorator that re-throws backend-specific database
exceptions using Django's common wrappers.
"""
return DatabaseErrorWrapper(self) | [
"def",
"wrap_database_errors",
"(",
"self",
")",
":",
"return",
"DatabaseErrorWrapper",
"(",
"self",
")"
] | [
569,
4
] | [
574,
41
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseWrapper.chunked_cursor | (self) |
Return a cursor that tries to avoid caching in the database (if
supported by the database), otherwise return a regular cursor.
|
Return a cursor that tries to avoid caching in the database (if
supported by the database), otherwise return a regular cursor.
| def chunked_cursor(self):
"""
Return a cursor that tries to avoid caching in the database (if
supported by the database), otherwise return a regular cursor.
"""
return self.cursor() | [
"def",
"chunked_cursor",
"(",
"self",
")",
":",
"return",
"self",
".",
"cursor",
"(",
")"
] | [
576,
4
] | [
581,
28
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseWrapper.make_debug_cursor | (self, cursor) | Create a cursor that logs all queries in self.queries_log. | Create a cursor that logs all queries in self.queries_log. | def make_debug_cursor(self, cursor):
"""Create a cursor that logs all queries in self.queries_log."""
return utils.CursorDebugWrapper(cursor, self) | [
"def",
"make_debug_cursor",
"(",
"self",
",",
"cursor",
")",
":",
"return",
"utils",
".",
"CursorDebugWrapper",
"(",
"cursor",
",",
"self",
")"
] | [
583,
4
] | [
585,
53
] | python | en | ['en', 'gd', 'en'] | True |
BaseDatabaseWrapper.make_cursor | (self, cursor) | Create a cursor without debug logging. | Create a cursor without debug logging. | def make_cursor(self, cursor):
"""Create a cursor without debug logging."""
return utils.CursorWrapper(cursor, self) | [
"def",
"make_cursor",
"(",
"self",
",",
"cursor",
")",
":",
"return",
"utils",
".",
"CursorWrapper",
"(",
"cursor",
",",
"self",
")"
] | [
587,
4
] | [
589,
48
] | python | en | ['en', 'en', 'en'] | True |
BaseDatabaseWrapper.temporary_connection | (self) |
Context manager that ensures that a connection is established, and
if it opened one, closes it to avoid leaving a dangling connection.
This is useful for operations outside of the request-response cycle.
Provide a cursor: with self.temporary_connection() as cursor: ...
|
Context manager that ensures that a connection is established, and
if it opened one, closes it to avoid leaving a dangling connection.
This is useful for operations outside of the request-response cycle. | def temporary_connection(self):
"""
Context manager that ensures that a connection is established, and
if it opened one, closes it to avoid leaving a dangling connection.
This is useful for operations outside of the request-response cycle.
Provide a cursor: with self.temporary_connection() as cursor: ...
"""
must_close = self.connection is None
try:
with self.cursor() as cursor:
yield cursor
finally:
if must_close:
self.close() | [
"def",
"temporary_connection",
"(",
"self",
")",
":",
"must_close",
"=",
"self",
".",
"connection",
"is",
"None",
"try",
":",
"with",
"self",
".",
"cursor",
"(",
")",
"as",
"cursor",
":",
"yield",
"cursor",
"finally",
":",
"if",
"must_close",
":",
"self",
".",
"close",
"(",
")"
] | [
592,
4
] | [
606,
28
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseWrapper._nodb_cursor | (self) |
Return a cursor from an alternative connection to be used when there is
no need to access the main database, specifically for test db
creation/deletion. This also prevents the production database from
being exposed to potential child threads while (or after) the test
database is destroyed. Refs #10868, #17786, #16969.
|
Return a cursor from an alternative connection to be used when there is
no need to access the main database, specifically for test db
creation/deletion. This also prevents the production database from
being exposed to potential child threads while (or after) the test
database is destroyed. Refs #10868, #17786, #16969.
| def _nodb_cursor(self):
"""
Return a cursor from an alternative connection to be used when there is
no need to access the main database, specifically for test db
creation/deletion. This also prevents the production database from
being exposed to potential child threads while (or after) the test
database is destroyed. Refs #10868, #17786, #16969.
"""
conn = self.__class__({**self.settings_dict, 'NAME': None}, alias=NO_DB_ALIAS)
try:
with conn.cursor() as cursor:
yield cursor
finally:
conn.close() | [
"def",
"_nodb_cursor",
"(",
"self",
")",
":",
"conn",
"=",
"self",
".",
"__class__",
"(",
"{",
"*",
"*",
"self",
".",
"settings_dict",
",",
"'NAME'",
":",
"None",
"}",
",",
"alias",
"=",
"NO_DB_ALIAS",
")",
"try",
":",
"with",
"conn",
".",
"cursor",
"(",
")",
"as",
"cursor",
":",
"yield",
"cursor",
"finally",
":",
"conn",
".",
"close",
"(",
")"
] | [
609,
4
] | [
622,
24
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseWrapper.schema_editor | (self, *args, **kwargs) |
Return a new instance of this backend's SchemaEditor.
|
Return a new instance of this backend's SchemaEditor.
| def schema_editor(self, *args, **kwargs):
"""
Return a new instance of this backend's SchemaEditor.
"""
if self.SchemaEditorClass is None:
raise NotImplementedError(
'The SchemaEditorClass attribute of this database wrapper is still None')
return self.SchemaEditorClass(self, *args, **kwargs) | [
"def",
"schema_editor",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"self",
".",
"SchemaEditorClass",
"is",
"None",
":",
"raise",
"NotImplementedError",
"(",
"'The SchemaEditorClass attribute of this database wrapper is still None'",
")",
"return",
"self",
".",
"SchemaEditorClass",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | [
624,
4
] | [
631,
60
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseWrapper.execute_wrapper | (self, wrapper) |
Return a context manager under which the wrapper is applied to suitable
database query executions.
|
Return a context manager under which the wrapper is applied to suitable
database query executions.
| def execute_wrapper(self, wrapper):
"""
Return a context manager under which the wrapper is applied to suitable
database query executions.
"""
self.execute_wrappers.append(wrapper)
try:
yield
finally:
self.execute_wrappers.pop() | [
"def",
"execute_wrapper",
"(",
"self",
",",
"wrapper",
")",
":",
"self",
".",
"execute_wrappers",
".",
"append",
"(",
"wrapper",
")",
"try",
":",
"yield",
"finally",
":",
"self",
".",
"execute_wrappers",
".",
"pop",
"(",
")"
] | [
655,
4
] | [
664,
39
] | python | en | ['en', 'error', 'th'] | False |
BaseDatabaseWrapper.copy | (self, alias=None) |
Return a copy of this connection.
For tests that require two connections to the same database.
|
Return a copy of this connection. | def copy(self, alias=None):
"""
Return a copy of this connection.
For tests that require two connections to the same database.
"""
settings_dict = copy.deepcopy(self.settings_dict)
if alias is None:
alias = self.alias
return type(self)(settings_dict, alias) | [
"def",
"copy",
"(",
"self",
",",
"alias",
"=",
"None",
")",
":",
"settings_dict",
"=",
"copy",
".",
"deepcopy",
"(",
"self",
".",
"settings_dict",
")",
"if",
"alias",
"is",
"None",
":",
"alias",
"=",
"self",
".",
"alias",
"return",
"type",
"(",
"self",
")",
"(",
"settings_dict",
",",
"alias",
")"
] | [
666,
4
] | [
675,
47
] | python | en | ['en', 'error', 'th'] | False |
Bazaar.is_commit_id_equal | (cls, dest, name) | Always assume the versions don't match | Always assume the versions don't match | def is_commit_id_equal(cls, dest, name):
# type: (str, Optional[str]) -> bool
"""Always assume the versions don't match"""
return False | [
"def",
"is_commit_id_equal",
"(",
"cls",
",",
"dest",
",",
"name",
")",
":",
"# type: (str, Optional[str]) -> bool",
"return",
"False"
] | [
89,
4
] | [
92,
20
] | python | en | ['en', 'en', 'en'] | True |
pkg_resources_distribution_for_wheel | (wheel_zip, name, location) | Get a pkg_resources distribution given a wheel.
:raises UnsupportedWheel: on any errors
| Get a pkg_resources distribution given a wheel. | def pkg_resources_distribution_for_wheel(wheel_zip, name, location):
# type: (ZipFile, str, str) -> Distribution
"""Get a pkg_resources distribution given a wheel.
:raises UnsupportedWheel: on any errors
"""
info_dir, _ = parse_wheel(wheel_zip, name)
metadata_files = [p for p in wheel_zip.namelist() if p.startswith(f"{info_dir}/")]
metadata_text = {} # type: Dict[str, bytes]
for path in metadata_files:
_, metadata_name = path.split("/", 1)
try:
metadata_text[metadata_name] = read_wheel_metadata_file(wheel_zip, path)
except UnsupportedWheel as e:
raise UnsupportedWheel("{} has an invalid wheel, {}".format(name, str(e)))
metadata = WheelMetadata(metadata_text, location)
return DistInfoDistribution(location=location, metadata=metadata, project_name=name) | [
"def",
"pkg_resources_distribution_for_wheel",
"(",
"wheel_zip",
",",
"name",
",",
"location",
")",
":",
"# type: (ZipFile, str, str) -> Distribution",
"info_dir",
",",
"_",
"=",
"parse_wheel",
"(",
"wheel_zip",
",",
"name",
")",
"metadata_files",
"=",
"[",
"p",
"for",
"p",
"in",
"wheel_zip",
".",
"namelist",
"(",
")",
"if",
"p",
".",
"startswith",
"(",
"f\"{info_dir}/\"",
")",
"]",
"metadata_text",
"=",
"{",
"}",
"# type: Dict[str, bytes]",
"for",
"path",
"in",
"metadata_files",
":",
"_",
",",
"metadata_name",
"=",
"path",
".",
"split",
"(",
"\"/\"",
",",
"1",
")",
"try",
":",
"metadata_text",
"[",
"metadata_name",
"]",
"=",
"read_wheel_metadata_file",
"(",
"wheel_zip",
",",
"path",
")",
"except",
"UnsupportedWheel",
"as",
"e",
":",
"raise",
"UnsupportedWheel",
"(",
"\"{} has an invalid wheel, {}\"",
".",
"format",
"(",
"name",
",",
"str",
"(",
"e",
")",
")",
")",
"metadata",
"=",
"WheelMetadata",
"(",
"metadata_text",
",",
"location",
")",
"return",
"DistInfoDistribution",
"(",
"location",
"=",
"location",
",",
"metadata",
"=",
"metadata",
",",
"project_name",
"=",
"name",
")"
] | [
42,
0
] | [
63,
88
] | python | en | ['en', 'en', 'en'] | True |
parse_wheel | (wheel_zip, name) | Extract information from the provided wheel, ensuring it meets basic
standards.
Returns the name of the .dist-info directory and the parsed WHEEL metadata.
| Extract information from the provided wheel, ensuring it meets basic
standards. | def parse_wheel(wheel_zip, name):
# type: (ZipFile, str) -> Tuple[str, Message]
"""Extract information from the provided wheel, ensuring it meets basic
standards.
Returns the name of the .dist-info directory and the parsed WHEEL metadata.
"""
try:
info_dir = wheel_dist_info_dir(wheel_zip, name)
metadata = wheel_metadata(wheel_zip, info_dir)
version = wheel_version(metadata)
except UnsupportedWheel as e:
raise UnsupportedWheel("{} has an invalid wheel, {}".format(name, str(e)))
check_compatibility(version, name)
return info_dir, metadata | [
"def",
"parse_wheel",
"(",
"wheel_zip",
",",
"name",
")",
":",
"# type: (ZipFile, str) -> Tuple[str, Message]",
"try",
":",
"info_dir",
"=",
"wheel_dist_info_dir",
"(",
"wheel_zip",
",",
"name",
")",
"metadata",
"=",
"wheel_metadata",
"(",
"wheel_zip",
",",
"info_dir",
")",
"version",
"=",
"wheel_version",
"(",
"metadata",
")",
"except",
"UnsupportedWheel",
"as",
"e",
":",
"raise",
"UnsupportedWheel",
"(",
"\"{} has an invalid wheel, {}\"",
".",
"format",
"(",
"name",
",",
"str",
"(",
"e",
")",
")",
")",
"check_compatibility",
"(",
"version",
",",
"name",
")",
"return",
"info_dir",
",",
"metadata"
] | [
66,
0
] | [
82,
29
] | python | en | ['en', 'en', 'en'] | True |
wheel_dist_info_dir | (source, name) | Returns the name of the contained .dist-info directory.
Raises AssertionError or UnsupportedWheel if not found, >1 found, or
it doesn't match the provided name.
| Returns the name of the contained .dist-info directory. | def wheel_dist_info_dir(source, name):
# type: (ZipFile, str) -> str
"""Returns the name of the contained .dist-info directory.
Raises AssertionError or UnsupportedWheel if not found, >1 found, or
it doesn't match the provided name.
"""
# Zip file path separators must be /
subdirs = {p.split("/", 1)[0] for p in source.namelist()}
info_dirs = [s for s in subdirs if s.endswith(".dist-info")]
if not info_dirs:
raise UnsupportedWheel(".dist-info directory not found")
if len(info_dirs) > 1:
raise UnsupportedWheel(
"multiple .dist-info directories found: {}".format(", ".join(info_dirs))
)
info_dir = info_dirs[0]
info_dir_name = canonicalize_name(info_dir)
canonical_name = canonicalize_name(name)
if not info_dir_name.startswith(canonical_name):
raise UnsupportedWheel(
".dist-info directory {!r} does not start with {!r}".format(
info_dir, canonical_name
)
)
return info_dir | [
"def",
"wheel_dist_info_dir",
"(",
"source",
",",
"name",
")",
":",
"# type: (ZipFile, str) -> str",
"# Zip file path separators must be /",
"subdirs",
"=",
"{",
"p",
".",
"split",
"(",
"\"/\"",
",",
"1",
")",
"[",
"0",
"]",
"for",
"p",
"in",
"source",
".",
"namelist",
"(",
")",
"}",
"info_dirs",
"=",
"[",
"s",
"for",
"s",
"in",
"subdirs",
"if",
"s",
".",
"endswith",
"(",
"\".dist-info\"",
")",
"]",
"if",
"not",
"info_dirs",
":",
"raise",
"UnsupportedWheel",
"(",
"\".dist-info directory not found\"",
")",
"if",
"len",
"(",
"info_dirs",
")",
">",
"1",
":",
"raise",
"UnsupportedWheel",
"(",
"\"multiple .dist-info directories found: {}\"",
".",
"format",
"(",
"\", \"",
".",
"join",
"(",
"info_dirs",
")",
")",
")",
"info_dir",
"=",
"info_dirs",
"[",
"0",
"]",
"info_dir_name",
"=",
"canonicalize_name",
"(",
"info_dir",
")",
"canonical_name",
"=",
"canonicalize_name",
"(",
"name",
")",
"if",
"not",
"info_dir_name",
".",
"startswith",
"(",
"canonical_name",
")",
":",
"raise",
"UnsupportedWheel",
"(",
"\".dist-info directory {!r} does not start with {!r}\"",
".",
"format",
"(",
"info_dir",
",",
"canonical_name",
")",
")",
"return",
"info_dir"
] | [
85,
0
] | [
116,
19
] | python | en | ['en', 'en', 'en'] | True |
wheel_metadata | (source, dist_info_dir) | Return the WHEEL metadata of an extracted wheel, if possible.
Otherwise, raise UnsupportedWheel.
| Return the WHEEL metadata of an extracted wheel, if possible.
Otherwise, raise UnsupportedWheel.
| def wheel_metadata(source, dist_info_dir):
# type: (ZipFile, str) -> Message
"""Return the WHEEL metadata of an extracted wheel, if possible.
Otherwise, raise UnsupportedWheel.
"""
path = f"{dist_info_dir}/WHEEL"
# Zip file path separators must be /
wheel_contents = read_wheel_metadata_file(source, path)
try:
wheel_text = wheel_contents.decode()
except UnicodeDecodeError as e:
raise UnsupportedWheel(f"error decoding {path!r}: {e!r}")
# FeedParser (used by Parser) does not raise any exceptions. The returned
# message may have .defects populated, but for backwards-compatibility we
# currently ignore them.
return Parser().parsestr(wheel_text) | [
"def",
"wheel_metadata",
"(",
"source",
",",
"dist_info_dir",
")",
":",
"# type: (ZipFile, str) -> Message",
"path",
"=",
"f\"{dist_info_dir}/WHEEL\"",
"# Zip file path separators must be /",
"wheel_contents",
"=",
"read_wheel_metadata_file",
"(",
"source",
",",
"path",
")",
"try",
":",
"wheel_text",
"=",
"wheel_contents",
".",
"decode",
"(",
")",
"except",
"UnicodeDecodeError",
"as",
"e",
":",
"raise",
"UnsupportedWheel",
"(",
"f\"error decoding {path!r}: {e!r}\"",
")",
"# FeedParser (used by Parser) does not raise any exceptions. The returned",
"# message may have .defects populated, but for backwards-compatibility we",
"# currently ignore them.",
"return",
"Parser",
"(",
")",
".",
"parsestr",
"(",
"wheel_text",
")"
] | [
129,
0
] | [
146,
40
] | python | en | ['en', 'en', 'en'] | True |
wheel_version | (wheel_data) | Given WHEEL metadata, return the parsed Wheel-Version.
Otherwise, raise UnsupportedWheel.
| Given WHEEL metadata, return the parsed Wheel-Version.
Otherwise, raise UnsupportedWheel.
| def wheel_version(wheel_data):
# type: (Message) -> Tuple[int, ...]
"""Given WHEEL metadata, return the parsed Wheel-Version.
Otherwise, raise UnsupportedWheel.
"""
version_text = wheel_data["Wheel-Version"]
if version_text is None:
raise UnsupportedWheel("WHEEL is missing Wheel-Version")
version = version_text.strip()
try:
return tuple(map(int, version.split(".")))
except ValueError:
raise UnsupportedWheel(f"invalid Wheel-Version: {version!r}") | [
"def",
"wheel_version",
"(",
"wheel_data",
")",
":",
"# type: (Message) -> Tuple[int, ...]",
"version_text",
"=",
"wheel_data",
"[",
"\"Wheel-Version\"",
"]",
"if",
"version_text",
"is",
"None",
":",
"raise",
"UnsupportedWheel",
"(",
"\"WHEEL is missing Wheel-Version\"",
")",
"version",
"=",
"version_text",
".",
"strip",
"(",
")",
"try",
":",
"return",
"tuple",
"(",
"map",
"(",
"int",
",",
"version",
".",
"split",
"(",
"\".\"",
")",
")",
")",
"except",
"ValueError",
":",
"raise",
"UnsupportedWheel",
"(",
"f\"invalid Wheel-Version: {version!r}\"",
")"
] | [
149,
0
] | [
163,
69
] | python | en | ['en', 'de', 'en'] | True |
check_compatibility | (version, name) | Raises errors or warns if called with an incompatible Wheel-Version.
pip should refuse to install a Wheel-Version that's a major series
ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
installing a version only minor version ahead (e.g 1.2 > 1.1).
version: a 2-tuple representing a Wheel-Version (Major, Minor)
name: name of wheel or package to raise exception about
:raises UnsupportedWheel: when an incompatible Wheel-Version is given
| Raises errors or warns if called with an incompatible Wheel-Version. | def check_compatibility(version, name):
# type: (Tuple[int, ...], str) -> None
"""Raises errors or warns if called with an incompatible Wheel-Version.
pip should refuse to install a Wheel-Version that's a major series
ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
installing a version only minor version ahead (e.g 1.2 > 1.1).
version: a 2-tuple representing a Wheel-Version (Major, Minor)
name: name of wheel or package to raise exception about
:raises UnsupportedWheel: when an incompatible Wheel-Version is given
"""
if version[0] > VERSION_COMPATIBLE[0]:
raise UnsupportedWheel(
"{}'s Wheel-Version ({}) is not compatible with this version "
"of pip".format(name, ".".join(map(str, version)))
)
elif version > VERSION_COMPATIBLE:
logger.warning(
"Installing from a newer Wheel-Version (%s)",
".".join(map(str, version)),
) | [
"def",
"check_compatibility",
"(",
"version",
",",
"name",
")",
":",
"# type: (Tuple[int, ...], str) -> None",
"if",
"version",
"[",
"0",
"]",
">",
"VERSION_COMPATIBLE",
"[",
"0",
"]",
":",
"raise",
"UnsupportedWheel",
"(",
"\"{}'s Wheel-Version ({}) is not compatible with this version \"",
"\"of pip\"",
".",
"format",
"(",
"name",
",",
"\".\"",
".",
"join",
"(",
"map",
"(",
"str",
",",
"version",
")",
")",
")",
")",
"elif",
"version",
">",
"VERSION_COMPATIBLE",
":",
"logger",
".",
"warning",
"(",
"\"Installing from a newer Wheel-Version (%s)\"",
",",
"\".\"",
".",
"join",
"(",
"map",
"(",
"str",
",",
"version",
")",
")",
",",
")"
] | [
166,
0
] | [
188,
9
] | python | en | ['en', 'en', 'en'] | True |
create_model | (
image_size,
labels,
model_type=XModelType.CENTERNET,
backbone=XModelBackbone.EFFICIENTNETB0,
mode=XModelMode.CONCAT,
pretrained_backbone=True,
) |
Creates a new TensorFlow model.
:param image_size: image height and width
:param labels: number of labels
:param backbone: backbone to be used for creating a new model (pre-trained if available)
:return: new model (XCenternetModel)
|
Creates a new TensorFlow model. | def create_model(
image_size,
labels,
model_type=XModelType.CENTERNET,
backbone=XModelBackbone.EFFICIENTNETB0,
mode=XModelMode.CONCAT,
pretrained_backbone=True,
):
"""
Creates a new TensorFlow model.
:param image_size: image height and width
:param labels: number of labels
:param backbone: backbone to be used for creating a new model (pre-trained if available)
:return: new model (XCenternetModel)
"""
input, features = _create_backbone(image_size, pretrained_backbone, backbone=backbone, mode=mode)
return _finish_model(labels, input, features, model_type) | [
"def",
"create_model",
"(",
"image_size",
",",
"labels",
",",
"model_type",
"=",
"XModelType",
".",
"CENTERNET",
",",
"backbone",
"=",
"XModelBackbone",
".",
"EFFICIENTNETB0",
",",
"mode",
"=",
"XModelMode",
".",
"CONCAT",
",",
"pretrained_backbone",
"=",
"True",
",",
")",
":",
"input",
",",
"features",
"=",
"_create_backbone",
"(",
"image_size",
",",
"pretrained_backbone",
",",
"backbone",
"=",
"backbone",
",",
"mode",
"=",
"mode",
")",
"return",
"_finish_model",
"(",
"labels",
",",
"input",
",",
"features",
",",
"model_type",
")"
] | [
17,
0
] | [
34,
61
] | python | en | ['en', 'error', 'th'] | False |
load_and_update_model | (model_dir: str, labels: int, model_type: XModelType, feature_layer="features") |
Loads model from given directory and update it to the new number of labels.
:param model_dir: directory with model in TensorFlow SavedModel format
:param labels: number of labels in the new model
:param model_type:
:return: loadel model (XCenternetModel)
|
Loads model from given directory and update it to the new number of labels. | def load_and_update_model(model_dir: str, labels: int, model_type: XModelType, feature_layer="features"):
"""
Loads model from given directory and update it to the new number of labels.
:param model_dir: directory with model in TensorFlow SavedModel format
:param labels: number of labels in the new model
:param model_type:
:return: loadel model (XCenternetModel)
"""
input, features = _load_backbone(model_dir, feature_layer=feature_layer)
return _finish_model(labels, input, features, model_type) | [
"def",
"load_and_update_model",
"(",
"model_dir",
":",
"str",
",",
"labels",
":",
"int",
",",
"model_type",
":",
"XModelType",
",",
"feature_layer",
"=",
"\"features\"",
")",
":",
"input",
",",
"features",
"=",
"_load_backbone",
"(",
"model_dir",
",",
"feature_layer",
"=",
"feature_layer",
")",
"return",
"_finish_model",
"(",
"labels",
",",
"input",
",",
"features",
",",
"model_type",
")"
] | [
37,
0
] | [
47,
61
] | python | en | ['en', 'error', 'th'] | False |
load_pretrained_weights | (model, weights_path, reset_heads=True) |
Loads pretrained weights for given model by name. By default, the heads are reset to default values.
The heads in a new model might have a same shape as in the pretrained one. But we should not keep them
and instead train the from scratch.
:param model: Non-trained model.
:param weights_path: Path to file with pretrained model weights.
:param reset_heads: reset weights for heatmap, size and offset of present
:return: None
|
Loads pretrained weights for given model by name. By default, the heads are reset to default values.
The heads in a new model might have a same shape as in the pretrained one. But we should not keep them
and instead train the from scratch. | def load_pretrained_weights(model, weights_path, reset_heads=True):
"""
Loads pretrained weights for given model by name. By default, the heads are reset to default values.
The heads in a new model might have a same shape as in the pretrained one. But we should not keep them
and instead train the from scratch.
:param model: Non-trained model.
:param weights_path: Path to file with pretrained model weights.
:param reset_heads: reset weights for heatmap, size and offset of present
:return: None
"""
def load():
model.load_weights(weights_path, by_name=True, skip_mismatch=True)
if not reset_heads:
load()
return
# I did not find a way hot to reinitialize weights with proper initializer after they have been changed.
# Remembering the weights from an untrained model and setting them after loading pretrained weights
# will do the trick.
layers_to_reset = _layers_to_reset(model)
init_weights = {l.name: l.get_weights() for l in layers_to_reset}
load()
for layer in model.layers:
if layer in init_weights:
layer.set_weights(init_weights[layer]) | [
"def",
"load_pretrained_weights",
"(",
"model",
",",
"weights_path",
",",
"reset_heads",
"=",
"True",
")",
":",
"def",
"load",
"(",
")",
":",
"model",
".",
"load_weights",
"(",
"weights_path",
",",
"by_name",
"=",
"True",
",",
"skip_mismatch",
"=",
"True",
")",
"if",
"not",
"reset_heads",
":",
"load",
"(",
")",
"return",
"# I did not find a way hot to reinitialize weights with proper initializer after they have been changed.",
"# Remembering the weights from an untrained model and setting them after loading pretrained weights",
"# will do the trick.",
"layers_to_reset",
"=",
"_layers_to_reset",
"(",
"model",
")",
"init_weights",
"=",
"{",
"l",
".",
"name",
":",
"l",
".",
"get_weights",
"(",
")",
"for",
"l",
"in",
"layers_to_reset",
"}",
"load",
"(",
")",
"for",
"layer",
"in",
"model",
".",
"layers",
":",
"if",
"layer",
"in",
"init_weights",
":",
"layer",
".",
"set_weights",
"(",
"init_weights",
"[",
"layer",
"]",
")"
] | [
50,
0
] | [
79,
50
] | python | en | ['en', 'error', 'th'] | False |
tokenize | (sql, encoding=None) | Tokenize sql.
Tokenize *sql* using the :class:`Lexer` and return a 2-tuple stream
of ``(token type, value)`` items.
| Tokenize sql. | def tokenize(sql, encoding=None):
"""Tokenize sql.
Tokenize *sql* using the :class:`Lexer` and return a 2-tuple stream
of ``(token type, value)`` items.
"""
return Lexer().get_tokens(sql, encoding) | [
"def",
"tokenize",
"(",
"sql",
",",
"encoding",
"=",
"None",
")",
":",
"return",
"Lexer",
"(",
")",
".",
"get_tokens",
"(",
"sql",
",",
"encoding",
")"
] | [
75,
0
] | [
81,
44
] | python | nl | ['nl', 'sl', 'tr'] | False |
Lexer.get_tokens | (text, encoding=None) |
Return an iterable of (tokentype, value) pairs generated from
`text`. If `unfiltered` is set to `True`, the filtering mechanism
is bypassed even if filters are defined.
Also preprocess the text, i.e. expand tabs and strip it if
wanted and applies registered filters.
Split ``text`` into (tokentype, text) pairs.
``stack`` is the initial stack (default: ``['root']``)
|
Return an iterable of (tokentype, value) pairs generated from
`text`. If `unfiltered` is set to `True`, the filtering mechanism
is bypassed even if filters are defined. | def get_tokens(text, encoding=None):
"""
Return an iterable of (tokentype, value) pairs generated from
`text`. If `unfiltered` is set to `True`, the filtering mechanism
is bypassed even if filters are defined.
Also preprocess the text, i.e. expand tabs and strip it if
wanted and applies registered filters.
Split ``text`` into (tokentype, text) pairs.
``stack`` is the initial stack (default: ``['root']``)
"""
if isinstance(text, TextIOBase):
text = text.read()
if isinstance(text, str):
pass
elif isinstance(text, bytes):
if encoding:
text = text.decode(encoding)
else:
try:
text = text.decode('utf-8')
except UnicodeDecodeError:
text = text.decode('unicode-escape')
else:
raise TypeError("Expected text or file-like object, got {!r}".
format(type(text)))
iterable = enumerate(text)
for pos, char in iterable:
for rexmatch, action in SQL_REGEX:
m = rexmatch(text, pos)
if not m:
continue
elif isinstance(action, tokens._TokenType):
yield action, m.group()
elif callable(action):
yield action(m.group())
consume(iterable, m.end() - pos - 1)
break
else:
yield tokens.Error, char | [
"def",
"get_tokens",
"(",
"text",
",",
"encoding",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"text",
",",
"TextIOBase",
")",
":",
"text",
"=",
"text",
".",
"read",
"(",
")",
"if",
"isinstance",
"(",
"text",
",",
"str",
")",
":",
"pass",
"elif",
"isinstance",
"(",
"text",
",",
"bytes",
")",
":",
"if",
"encoding",
":",
"text",
"=",
"text",
".",
"decode",
"(",
"encoding",
")",
"else",
":",
"try",
":",
"text",
"=",
"text",
".",
"decode",
"(",
"'utf-8'",
")",
"except",
"UnicodeDecodeError",
":",
"text",
"=",
"text",
".",
"decode",
"(",
"'unicode-escape'",
")",
"else",
":",
"raise",
"TypeError",
"(",
"\"Expected text or file-like object, got {!r}\"",
".",
"format",
"(",
"type",
"(",
"text",
")",
")",
")",
"iterable",
"=",
"enumerate",
"(",
"text",
")",
"for",
"pos",
",",
"char",
"in",
"iterable",
":",
"for",
"rexmatch",
",",
"action",
"in",
"SQL_REGEX",
":",
"m",
"=",
"rexmatch",
"(",
"text",
",",
"pos",
")",
"if",
"not",
"m",
":",
"continue",
"elif",
"isinstance",
"(",
"action",
",",
"tokens",
".",
"_TokenType",
")",
":",
"yield",
"action",
",",
"m",
".",
"group",
"(",
")",
"elif",
"callable",
"(",
"action",
")",
":",
"yield",
"action",
"(",
"m",
".",
"group",
"(",
")",
")",
"consume",
"(",
"iterable",
",",
"m",
".",
"end",
"(",
")",
"-",
"pos",
"-",
"1",
")",
"break",
"else",
":",
"yield",
"tokens",
".",
"Error",
",",
"char"
] | [
27,
4
] | [
72,
40
] | python | en | ['en', 'error', 'th'] | False |
popen_wrapper | (args, stdout_encoding='utf-8') |
Friendly wrapper around Popen.
Return stdout output, stderr output, and OS status code.
|
Friendly wrapper around Popen. | def popen_wrapper(args, stdout_encoding='utf-8'):
"""
Friendly wrapper around Popen.
Return stdout output, stderr output, and OS status code.
"""
try:
p = run(args, stdout=PIPE, stderr=PIPE, close_fds=os.name != 'nt')
except OSError as err:
raise CommandError('Error executing %s' % args[0]) from err
return (
p.stdout.decode(stdout_encoding),
p.stderr.decode(DEFAULT_LOCALE_ENCODING, errors='replace'),
p.returncode
) | [
"def",
"popen_wrapper",
"(",
"args",
",",
"stdout_encoding",
"=",
"'utf-8'",
")",
":",
"try",
":",
"p",
"=",
"run",
"(",
"args",
",",
"stdout",
"=",
"PIPE",
",",
"stderr",
"=",
"PIPE",
",",
"close_fds",
"=",
"os",
".",
"name",
"!=",
"'nt'",
")",
"except",
"OSError",
"as",
"err",
":",
"raise",
"CommandError",
"(",
"'Error executing %s'",
"%",
"args",
"[",
"0",
"]",
")",
"from",
"err",
"return",
"(",
"p",
".",
"stdout",
".",
"decode",
"(",
"stdout_encoding",
")",
",",
"p",
".",
"stderr",
".",
"decode",
"(",
"DEFAULT_LOCALE_ENCODING",
",",
"errors",
"=",
"'replace'",
")",
",",
"p",
".",
"returncode",
")"
] | [
12,
0
] | [
26,
5
] | python | en | ['en', 'error', 'th'] | False |
handle_extensions | (extensions) |
Organize multiple extensions that are separated with commas or passed by
using --extension/-e multiple times.
For example: running 'django-admin makemessages -e js,txt -e xhtml -a'
would result in an extension list: ['.js', '.txt', '.xhtml']
>>> handle_extensions(['.html', 'html,js,py,py,py,.py', 'py,.py'])
{'.html', '.js', '.py'}
>>> handle_extensions(['.html, txt,.tpl'])
{'.html', '.tpl', '.txt'}
|
Organize multiple extensions that are separated with commas or passed by
using --extension/-e multiple times. | def handle_extensions(extensions):
"""
Organize multiple extensions that are separated with commas or passed by
using --extension/-e multiple times.
For example: running 'django-admin makemessages -e js,txt -e xhtml -a'
would result in an extension list: ['.js', '.txt', '.xhtml']
>>> handle_extensions(['.html', 'html,js,py,py,py,.py', 'py,.py'])
{'.html', '.js', '.py'}
>>> handle_extensions(['.html, txt,.tpl'])
{'.html', '.tpl', '.txt'}
"""
ext_list = []
for ext in extensions:
ext_list.extend(ext.replace(' ', '').split(','))
for i, ext in enumerate(ext_list):
if not ext.startswith('.'):
ext_list[i] = '.%s' % ext_list[i]
return set(ext_list) | [
"def",
"handle_extensions",
"(",
"extensions",
")",
":",
"ext_list",
"=",
"[",
"]",
"for",
"ext",
"in",
"extensions",
":",
"ext_list",
".",
"extend",
"(",
"ext",
".",
"replace",
"(",
"' '",
",",
"''",
")",
".",
"split",
"(",
"','",
")",
")",
"for",
"i",
",",
"ext",
"in",
"enumerate",
"(",
"ext_list",
")",
":",
"if",
"not",
"ext",
".",
"startswith",
"(",
"'.'",
")",
":",
"ext_list",
"[",
"i",
"]",
"=",
"'.%s'",
"%",
"ext_list",
"[",
"i",
"]",
"return",
"set",
"(",
"ext_list",
")"
] | [
29,
0
] | [
48,
24
] | python | en | ['en', 'error', 'th'] | False |
get_random_secret_key | () |
Return a 50 character random string usable as a SECRET_KEY setting value.
|
Return a 50 character random string usable as a SECRET_KEY setting value.
| def get_random_secret_key():
"""
Return a 50 character random string usable as a SECRET_KEY setting value.
"""
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
return get_random_string(50, chars) | [
"def",
"get_random_secret_key",
"(",
")",
":",
"chars",
"=",
"'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'",
"return",
"get_random_string",
"(",
"50",
",",
"chars",
")"
] | [
76,
0
] | [
81,
39
] | python | en | ['en', 'error', 'th'] | False |
parse_apps_and_model_labels | (labels) |
Parse a list of "app_label.ModelName" or "app_label" strings into actual
objects and return a two-element tuple:
(set of model classes, set of app_configs).
Raise a CommandError if some specified models or apps don't exist.
|
Parse a list of "app_label.ModelName" or "app_label" strings into actual
objects and return a two-element tuple:
(set of model classes, set of app_configs).
Raise a CommandError if some specified models or apps don't exist.
| def parse_apps_and_model_labels(labels):
"""
Parse a list of "app_label.ModelName" or "app_label" strings into actual
objects and return a two-element tuple:
(set of model classes, set of app_configs).
Raise a CommandError if some specified models or apps don't exist.
"""
apps = set()
models = set()
for label in labels:
if '.' in label:
try:
model = installed_apps.get_model(label)
except LookupError:
raise CommandError('Unknown model: %s' % label)
models.add(model)
else:
try:
app_config = installed_apps.get_app_config(label)
except LookupError as e:
raise CommandError(str(e))
apps.add(app_config)
return models, apps | [
"def",
"parse_apps_and_model_labels",
"(",
"labels",
")",
":",
"apps",
"=",
"set",
"(",
")",
"models",
"=",
"set",
"(",
")",
"for",
"label",
"in",
"labels",
":",
"if",
"'.'",
"in",
"label",
":",
"try",
":",
"model",
"=",
"installed_apps",
".",
"get_model",
"(",
"label",
")",
"except",
"LookupError",
":",
"raise",
"CommandError",
"(",
"'Unknown model: %s'",
"%",
"label",
")",
"models",
".",
"add",
"(",
"model",
")",
"else",
":",
"try",
":",
"app_config",
"=",
"installed_apps",
".",
"get_app_config",
"(",
"label",
")",
"except",
"LookupError",
"as",
"e",
":",
"raise",
"CommandError",
"(",
"str",
"(",
"e",
")",
")",
"apps",
".",
"add",
"(",
"app_config",
")",
"return",
"models",
",",
"apps"
] | [
84,
0
] | [
108,
23
] | python | en | ['en', 'error', 'th'] | False |
get_command_line_option | (argv, option) |
Return the value of a command line option (which should include leading
dashes, e.g. '--testrunner') from an argument list. Return None if the
option wasn't passed or if the argument list couldn't be parsed.
|
Return the value of a command line option (which should include leading
dashes, e.g. '--testrunner') from an argument list. Return None if the
option wasn't passed or if the argument list couldn't be parsed.
| def get_command_line_option(argv, option):
"""
Return the value of a command line option (which should include leading
dashes, e.g. '--testrunner') from an argument list. Return None if the
option wasn't passed or if the argument list couldn't be parsed.
"""
parser = CommandParser(add_help=False, allow_abbrev=False)
parser.add_argument(option, dest='value')
try:
options, _ = parser.parse_known_args(argv[2:])
except CommandError:
return None
else:
return options.value | [
"def",
"get_command_line_option",
"(",
"argv",
",",
"option",
")",
":",
"parser",
"=",
"CommandParser",
"(",
"add_help",
"=",
"False",
",",
"allow_abbrev",
"=",
"False",
")",
"parser",
".",
"add_argument",
"(",
"option",
",",
"dest",
"=",
"'value'",
")",
"try",
":",
"options",
",",
"_",
"=",
"parser",
".",
"parse_known_args",
"(",
"argv",
"[",
"2",
":",
"]",
")",
"except",
"CommandError",
":",
"return",
"None",
"else",
":",
"return",
"options",
".",
"value"
] | [
111,
0
] | [
124,
28
] | python | en | ['en', 'error', 'th'] | False |
normalize_path_patterns | (patterns) | Normalize an iterable of glob style patterns based on OS. | Normalize an iterable of glob style patterns based on OS. | def normalize_path_patterns(patterns):
"""Normalize an iterable of glob style patterns based on OS."""
patterns = [os.path.normcase(p) for p in patterns]
dir_suffixes = {'%s*' % path_sep for path_sep in {'/', os.sep}}
norm_patterns = []
for pattern in patterns:
for dir_suffix in dir_suffixes:
if pattern.endswith(dir_suffix):
norm_patterns.append(pattern[:-len(dir_suffix)])
break
else:
norm_patterns.append(pattern)
return norm_patterns | [
"def",
"normalize_path_patterns",
"(",
"patterns",
")",
":",
"patterns",
"=",
"[",
"os",
".",
"path",
".",
"normcase",
"(",
"p",
")",
"for",
"p",
"in",
"patterns",
"]",
"dir_suffixes",
"=",
"{",
"'%s*'",
"%",
"path_sep",
"for",
"path_sep",
"in",
"{",
"'/'",
",",
"os",
".",
"sep",
"}",
"}",
"norm_patterns",
"=",
"[",
"]",
"for",
"pattern",
"in",
"patterns",
":",
"for",
"dir_suffix",
"in",
"dir_suffixes",
":",
"if",
"pattern",
".",
"endswith",
"(",
"dir_suffix",
")",
":",
"norm_patterns",
".",
"append",
"(",
"pattern",
"[",
":",
"-",
"len",
"(",
"dir_suffix",
")",
"]",
")",
"break",
"else",
":",
"norm_patterns",
".",
"append",
"(",
"pattern",
")",
"return",
"norm_patterns"
] | [
127,
0
] | [
139,
24
] | python | en | ['en', 'en', 'en'] | True |
is_ignored_path | (path, ignore_patterns) |
Check if the given path should be ignored or not based on matching
one of the glob style `ignore_patterns`.
|
Check if the given path should be ignored or not based on matching
one of the glob style `ignore_patterns`.
| def is_ignored_path(path, ignore_patterns):
"""
Check if the given path should be ignored or not based on matching
one of the glob style `ignore_patterns`.
"""
path = Path(path)
def ignore(pattern):
return fnmatch.fnmatchcase(path.name, pattern) or fnmatch.fnmatchcase(str(path), pattern)
return any(ignore(pattern) for pattern in normalize_path_patterns(ignore_patterns)) | [
"def",
"is_ignored_path",
"(",
"path",
",",
"ignore_patterns",
")",
":",
"path",
"=",
"Path",
"(",
"path",
")",
"def",
"ignore",
"(",
"pattern",
")",
":",
"return",
"fnmatch",
".",
"fnmatchcase",
"(",
"path",
".",
"name",
",",
"pattern",
")",
"or",
"fnmatch",
".",
"fnmatchcase",
"(",
"str",
"(",
"path",
")",
",",
"pattern",
")",
"return",
"any",
"(",
"ignore",
"(",
"pattern",
")",
"for",
"pattern",
"in",
"normalize_path_patterns",
"(",
"ignore_patterns",
")",
")"
] | [
142,
0
] | [
152,
87
] | python | en | ['en', 'error', 'th'] | False |
_save | (im, fp, tile, bufsize=0) | Helper to save image based on tile list
:param im: Image object.
:param fp: File object.
:param tile: Tile list.
:param bufsize: Optional buffer size
| Helper to save image based on tile list | def _save(im, fp, tile, bufsize=0):
"""Helper to save image based on tile list
:param im: Image object.
:param fp: File object.
:param tile: Tile list.
:param bufsize: Optional buffer size
"""
im.load()
if not hasattr(im, "encoderconfig"):
im.encoderconfig = ()
tile.sort(key=_tilesort)
# FIXME: make MAXBLOCK a configuration parameter
# It would be great if we could have the encoder specify what it needs
# But, it would need at least the image size in most cases. RawEncode is
# a tricky case.
bufsize = max(MAXBLOCK, bufsize, im.size[0] * 4) # see RawEncode.c
try:
stdout = fp == sys.stdout or fp == sys.stdout.buffer
except (OSError, AttributeError):
stdout = False
if stdout:
fp.flush()
return
try:
fh = fp.fileno()
fp.flush()
except (AttributeError, io.UnsupportedOperation) as exc:
# compress to Python file-compatible object
for e, b, o, a in tile:
e = Image._getencoder(im.mode, e, a, im.encoderconfig)
if o > 0:
fp.seek(o)
e.setimage(im.im, b)
if e.pushes_fd:
e.setfd(fp)
l, s = e.encode_to_pyfd()
else:
while True:
l, s, d = e.encode(bufsize)
fp.write(d)
if s:
break
if s < 0:
raise OSError(f"encoder error {s} when writing image file") from exc
e.cleanup()
else:
# slight speedup: compress to real file object
for e, b, o, a in tile:
e = Image._getencoder(im.mode, e, a, im.encoderconfig)
if o > 0:
fp.seek(o)
e.setimage(im.im, b)
if e.pushes_fd:
e.setfd(fp)
l, s = e.encode_to_pyfd()
else:
s = e.encode_to_file(fh, bufsize)
if s < 0:
raise OSError(f"encoder error {s} when writing image file")
e.cleanup()
if hasattr(fp, "flush"):
fp.flush() | [
"def",
"_save",
"(",
"im",
",",
"fp",
",",
"tile",
",",
"bufsize",
"=",
"0",
")",
":",
"im",
".",
"load",
"(",
")",
"if",
"not",
"hasattr",
"(",
"im",
",",
"\"encoderconfig\"",
")",
":",
"im",
".",
"encoderconfig",
"=",
"(",
")",
"tile",
".",
"sort",
"(",
"key",
"=",
"_tilesort",
")",
"# FIXME: make MAXBLOCK a configuration parameter",
"# It would be great if we could have the encoder specify what it needs",
"# But, it would need at least the image size in most cases. RawEncode is",
"# a tricky case.",
"bufsize",
"=",
"max",
"(",
"MAXBLOCK",
",",
"bufsize",
",",
"im",
".",
"size",
"[",
"0",
"]",
"*",
"4",
")",
"# see RawEncode.c",
"try",
":",
"stdout",
"=",
"fp",
"==",
"sys",
".",
"stdout",
"or",
"fp",
"==",
"sys",
".",
"stdout",
".",
"buffer",
"except",
"(",
"OSError",
",",
"AttributeError",
")",
":",
"stdout",
"=",
"False",
"if",
"stdout",
":",
"fp",
".",
"flush",
"(",
")",
"return",
"try",
":",
"fh",
"=",
"fp",
".",
"fileno",
"(",
")",
"fp",
".",
"flush",
"(",
")",
"except",
"(",
"AttributeError",
",",
"io",
".",
"UnsupportedOperation",
")",
"as",
"exc",
":",
"# compress to Python file-compatible object",
"for",
"e",
",",
"b",
",",
"o",
",",
"a",
"in",
"tile",
":",
"e",
"=",
"Image",
".",
"_getencoder",
"(",
"im",
".",
"mode",
",",
"e",
",",
"a",
",",
"im",
".",
"encoderconfig",
")",
"if",
"o",
">",
"0",
":",
"fp",
".",
"seek",
"(",
"o",
")",
"e",
".",
"setimage",
"(",
"im",
".",
"im",
",",
"b",
")",
"if",
"e",
".",
"pushes_fd",
":",
"e",
".",
"setfd",
"(",
"fp",
")",
"l",
",",
"s",
"=",
"e",
".",
"encode_to_pyfd",
"(",
")",
"else",
":",
"while",
"True",
":",
"l",
",",
"s",
",",
"d",
"=",
"e",
".",
"encode",
"(",
"bufsize",
")",
"fp",
".",
"write",
"(",
"d",
")",
"if",
"s",
":",
"break",
"if",
"s",
"<",
"0",
":",
"raise",
"OSError",
"(",
"f\"encoder error {s} when writing image file\"",
")",
"from",
"exc",
"e",
".",
"cleanup",
"(",
")",
"else",
":",
"# slight speedup: compress to real file object",
"for",
"e",
",",
"b",
",",
"o",
",",
"a",
"in",
"tile",
":",
"e",
"=",
"Image",
".",
"_getencoder",
"(",
"im",
".",
"mode",
",",
"e",
",",
"a",
",",
"im",
".",
"encoderconfig",
")",
"if",
"o",
">",
"0",
":",
"fp",
".",
"seek",
"(",
"o",
")",
"e",
".",
"setimage",
"(",
"im",
".",
"im",
",",
"b",
")",
"if",
"e",
".",
"pushes_fd",
":",
"e",
".",
"setfd",
"(",
"fp",
")",
"l",
",",
"s",
"=",
"e",
".",
"encode_to_pyfd",
"(",
")",
"else",
":",
"s",
"=",
"e",
".",
"encode_to_file",
"(",
"fh",
",",
"bufsize",
")",
"if",
"s",
"<",
"0",
":",
"raise",
"OSError",
"(",
"f\"encoder error {s} when writing image file\"",
")",
"e",
".",
"cleanup",
"(",
")",
"if",
"hasattr",
"(",
"fp",
",",
"\"flush\"",
")",
":",
"fp",
".",
"flush",
"(",
")"
] | [
477,
0
] | [
540,
18
] | python | en | ['en', 'da', 'en'] | True |
_safe_read | (fp, size) |
Reads large blocks in a safe way. Unlike fp.read(n), this function
doesn't trust the user. If the requested size is larger than
SAFEBLOCK, the file is read block by block.
:param fp: File handle. Must implement a <b>read</b> method.
:param size: Number of bytes to read.
:returns: A string containing <i>size</i> bytes of data.
Raises an OSError if the file is truncated and the read cannot be completed
|
Reads large blocks in a safe way. Unlike fp.read(n), this function
doesn't trust the user. If the requested size is larger than
SAFEBLOCK, the file is read block by block. | def _safe_read(fp, size):
"""
Reads large blocks in a safe way. Unlike fp.read(n), this function
doesn't trust the user. If the requested size is larger than
SAFEBLOCK, the file is read block by block.
:param fp: File handle. Must implement a <b>read</b> method.
:param size: Number of bytes to read.
:returns: A string containing <i>size</i> bytes of data.
Raises an OSError if the file is truncated and the read cannot be completed
"""
if size <= 0:
return b""
if size <= SAFEBLOCK:
data = fp.read(size)
if len(data) < size:
raise OSError("Truncated File Read")
return data
data = []
while size > 0:
block = fp.read(min(size, SAFEBLOCK))
if not block:
break
data.append(block)
size -= len(block)
if sum(len(d) for d in data) < size:
raise OSError("Truncated File Read")
return b"".join(data) | [
"def",
"_safe_read",
"(",
"fp",
",",
"size",
")",
":",
"if",
"size",
"<=",
"0",
":",
"return",
"b\"\"",
"if",
"size",
"<=",
"SAFEBLOCK",
":",
"data",
"=",
"fp",
".",
"read",
"(",
"size",
")",
"if",
"len",
"(",
"data",
")",
"<",
"size",
":",
"raise",
"OSError",
"(",
"\"Truncated File Read\"",
")",
"return",
"data",
"data",
"=",
"[",
"]",
"while",
"size",
">",
"0",
":",
"block",
"=",
"fp",
".",
"read",
"(",
"min",
"(",
"size",
",",
"SAFEBLOCK",
")",
")",
"if",
"not",
"block",
":",
"break",
"data",
".",
"append",
"(",
"block",
")",
"size",
"-=",
"len",
"(",
"block",
")",
"if",
"sum",
"(",
"len",
"(",
"d",
")",
"for",
"d",
"in",
"data",
")",
"<",
"size",
":",
"raise",
"OSError",
"(",
"\"Truncated File Read\"",
")",
"return",
"b\"\"",
".",
"join",
"(",
"data",
")"
] | [
543,
0
] | [
572,
25
] | python | en | ['en', 'error', 'th'] | False |
ImageFile.__init__ | (self, fp=None, filename=None) | A list of tile descriptors, or ``None`` | A list of tile descriptors, or ``None`` | def __init__(self, fp=None, filename=None):
super().__init__()
self._min_frame = 0
self.custom_mimetype = None
self.tile = None
""" A list of tile descriptors, or ``None`` """
self.readonly = 1 # until we know better
self.decoderconfig = ()
self.decodermaxblock = MAXBLOCK
if isPath(fp):
# filename
self.fp = open(fp, "rb")
self.filename = fp
self._exclusive_fp = True
else:
# stream
self.fp = fp
self.filename = filename
# can be overridden
self._exclusive_fp = None
try:
try:
self._open()
except (
IndexError, # end of data
TypeError, # end of data (ord)
KeyError, # unsupported mode
EOFError, # got header but not the first frame
struct.error,
) as v:
raise SyntaxError(v) from v
if not self.mode or self.size[0] <= 0:
raise SyntaxError("not identified by this driver")
except BaseException:
# close the file only if we have opened it this constructor
if self._exclusive_fp:
self.fp.close()
raise | [
"def",
"__init__",
"(",
"self",
",",
"fp",
"=",
"None",
",",
"filename",
"=",
"None",
")",
":",
"super",
"(",
")",
".",
"__init__",
"(",
")",
"self",
".",
"_min_frame",
"=",
"0",
"self",
".",
"custom_mimetype",
"=",
"None",
"self",
".",
"tile",
"=",
"None",
"self",
".",
"readonly",
"=",
"1",
"# until we know better",
"self",
".",
"decoderconfig",
"=",
"(",
")",
"self",
".",
"decodermaxblock",
"=",
"MAXBLOCK",
"if",
"isPath",
"(",
"fp",
")",
":",
"# filename",
"self",
".",
"fp",
"=",
"open",
"(",
"fp",
",",
"\"rb\"",
")",
"self",
".",
"filename",
"=",
"fp",
"self",
".",
"_exclusive_fp",
"=",
"True",
"else",
":",
"# stream",
"self",
".",
"fp",
"=",
"fp",
"self",
".",
"filename",
"=",
"filename",
"# can be overridden",
"self",
".",
"_exclusive_fp",
"=",
"None",
"try",
":",
"try",
":",
"self",
".",
"_open",
"(",
")",
"except",
"(",
"IndexError",
",",
"# end of data",
"TypeError",
",",
"# end of data (ord)",
"KeyError",
",",
"# unsupported mode",
"EOFError",
",",
"# got header but not the first frame",
"struct",
".",
"error",
",",
")",
"as",
"v",
":",
"raise",
"SyntaxError",
"(",
"v",
")",
"from",
"v",
"if",
"not",
"self",
".",
"mode",
"or",
"self",
".",
"size",
"[",
"0",
"]",
"<=",
"0",
":",
"raise",
"SyntaxError",
"(",
"\"not identified by this driver\"",
")",
"except",
"BaseException",
":",
"# close the file only if we have opened it this constructor",
"if",
"self",
".",
"_exclusive_fp",
":",
"self",
".",
"fp",
".",
"close",
"(",
")",
"raise"
] | [
91,
4
] | [
136,
17
] | python | en | ['en', 'fr', 'en'] | True |
ImageFile.verify | (self) | Check file integrity | Check file integrity | def verify(self):
"""Check file integrity"""
# raise exception if something's wrong. must be called
# directly after open, and closes file when finished.
if self._exclusive_fp:
self.fp.close()
self.fp = None | [
"def",
"verify",
"(",
"self",
")",
":",
"# raise exception if something's wrong. must be called",
"# directly after open, and closes file when finished.",
"if",
"self",
".",
"_exclusive_fp",
":",
"self",
".",
"fp",
".",
"close",
"(",
")",
"self",
".",
"fp",
"=",
"None"
] | [
144,
4
] | [
151,
22
] | python | en | ['en', 'en', 'en'] | True |
ImageFile.load | (self) | Load image data based on tile list | Load image data based on tile list | def load(self):
"""Load image data based on tile list"""
if self.tile is None:
raise OSError("cannot load this image")
pixel = Image.Image.load(self)
if not self.tile:
return pixel
self.map = None
use_mmap = self.filename and len(self.tile) == 1
# As of pypy 2.1.0, memory mapping was failing here.
use_mmap = use_mmap and not hasattr(sys, "pypy_version_info")
readonly = 0
# look for read/seek overrides
try:
read = self.load_read
# don't use mmap if there are custom read/seek functions
use_mmap = False
except AttributeError:
read = self.fp.read
try:
seek = self.load_seek
use_mmap = False
except AttributeError:
seek = self.fp.seek
if use_mmap:
# try memory mapping
decoder_name, extents, offset, args = self.tile[0]
if (
decoder_name == "raw"
and len(args) >= 3
and args[0] == self.mode
and args[0] in Image._MAPMODES
):
try:
# use mmap, if possible
import mmap
with open(self.filename) as fp:
self.map = mmap.mmap(fp.fileno(), 0, access=mmap.ACCESS_READ)
self.im = Image.core.map_buffer(
self.map, self.size, decoder_name, offset, args
)
readonly = 1
# After trashing self.im,
# we might need to reload the palette data.
if self.palette:
self.palette.dirty = 1
except (AttributeError, OSError, ImportError):
self.map = None
self.load_prepare()
err_code = -3 # initialize to unknown error
if not self.map:
# sort tiles in file order
self.tile.sort(key=_tilesort)
try:
# FIXME: This is a hack to handle TIFF's JpegTables tag.
prefix = self.tile_prefix
except AttributeError:
prefix = b""
for decoder_name, extents, offset, args in self.tile:
decoder = Image._getdecoder(
self.mode, decoder_name, args, self.decoderconfig
)
try:
seek(offset)
decoder.setimage(self.im, extents)
if decoder.pulls_fd:
decoder.setfd(self.fp)
status, err_code = decoder.decode(b"")
else:
b = prefix
while True:
try:
s = read(self.decodermaxblock)
except (IndexError, struct.error) as e:
# truncated png/gif
if LOAD_TRUNCATED_IMAGES:
break
else:
raise OSError("image file is truncated") from e
if not s: # truncated jpeg
if LOAD_TRUNCATED_IMAGES:
break
else:
raise OSError(
"image file is truncated "
f"({len(b)} bytes not processed)"
)
b = b + s
n, err_code = decoder.decode(b)
if n < 0:
break
b = b[n:]
finally:
# Need to cleanup here to prevent leaks
decoder.cleanup()
self.tile = []
self.readonly = readonly
self.load_end()
if self._exclusive_fp and self._close_exclusive_fp_after_loading:
self.fp.close()
self.fp = None
if not self.map and not LOAD_TRUNCATED_IMAGES and err_code < 0:
# still raised if decoder fails to return anything
raise_oserror(err_code)
return Image.Image.load(self) | [
"def",
"load",
"(",
"self",
")",
":",
"if",
"self",
".",
"tile",
"is",
"None",
":",
"raise",
"OSError",
"(",
"\"cannot load this image\"",
")",
"pixel",
"=",
"Image",
".",
"Image",
".",
"load",
"(",
"self",
")",
"if",
"not",
"self",
".",
"tile",
":",
"return",
"pixel",
"self",
".",
"map",
"=",
"None",
"use_mmap",
"=",
"self",
".",
"filename",
"and",
"len",
"(",
"self",
".",
"tile",
")",
"==",
"1",
"# As of pypy 2.1.0, memory mapping was failing here.",
"use_mmap",
"=",
"use_mmap",
"and",
"not",
"hasattr",
"(",
"sys",
",",
"\"pypy_version_info\"",
")",
"readonly",
"=",
"0",
"# look for read/seek overrides",
"try",
":",
"read",
"=",
"self",
".",
"load_read",
"# don't use mmap if there are custom read/seek functions",
"use_mmap",
"=",
"False",
"except",
"AttributeError",
":",
"read",
"=",
"self",
".",
"fp",
".",
"read",
"try",
":",
"seek",
"=",
"self",
".",
"load_seek",
"use_mmap",
"=",
"False",
"except",
"AttributeError",
":",
"seek",
"=",
"self",
".",
"fp",
".",
"seek",
"if",
"use_mmap",
":",
"# try memory mapping",
"decoder_name",
",",
"extents",
",",
"offset",
",",
"args",
"=",
"self",
".",
"tile",
"[",
"0",
"]",
"if",
"(",
"decoder_name",
"==",
"\"raw\"",
"and",
"len",
"(",
"args",
")",
">=",
"3",
"and",
"args",
"[",
"0",
"]",
"==",
"self",
".",
"mode",
"and",
"args",
"[",
"0",
"]",
"in",
"Image",
".",
"_MAPMODES",
")",
":",
"try",
":",
"# use mmap, if possible",
"import",
"mmap",
"with",
"open",
"(",
"self",
".",
"filename",
")",
"as",
"fp",
":",
"self",
".",
"map",
"=",
"mmap",
".",
"mmap",
"(",
"fp",
".",
"fileno",
"(",
")",
",",
"0",
",",
"access",
"=",
"mmap",
".",
"ACCESS_READ",
")",
"self",
".",
"im",
"=",
"Image",
".",
"core",
".",
"map_buffer",
"(",
"self",
".",
"map",
",",
"self",
".",
"size",
",",
"decoder_name",
",",
"offset",
",",
"args",
")",
"readonly",
"=",
"1",
"# After trashing self.im,",
"# we might need to reload the palette data.",
"if",
"self",
".",
"palette",
":",
"self",
".",
"palette",
".",
"dirty",
"=",
"1",
"except",
"(",
"AttributeError",
",",
"OSError",
",",
"ImportError",
")",
":",
"self",
".",
"map",
"=",
"None",
"self",
".",
"load_prepare",
"(",
")",
"err_code",
"=",
"-",
"3",
"# initialize to unknown error",
"if",
"not",
"self",
".",
"map",
":",
"# sort tiles in file order",
"self",
".",
"tile",
".",
"sort",
"(",
"key",
"=",
"_tilesort",
")",
"try",
":",
"# FIXME: This is a hack to handle TIFF's JpegTables tag.",
"prefix",
"=",
"self",
".",
"tile_prefix",
"except",
"AttributeError",
":",
"prefix",
"=",
"b\"\"",
"for",
"decoder_name",
",",
"extents",
",",
"offset",
",",
"args",
"in",
"self",
".",
"tile",
":",
"decoder",
"=",
"Image",
".",
"_getdecoder",
"(",
"self",
".",
"mode",
",",
"decoder_name",
",",
"args",
",",
"self",
".",
"decoderconfig",
")",
"try",
":",
"seek",
"(",
"offset",
")",
"decoder",
".",
"setimage",
"(",
"self",
".",
"im",
",",
"extents",
")",
"if",
"decoder",
".",
"pulls_fd",
":",
"decoder",
".",
"setfd",
"(",
"self",
".",
"fp",
")",
"status",
",",
"err_code",
"=",
"decoder",
".",
"decode",
"(",
"b\"\"",
")",
"else",
":",
"b",
"=",
"prefix",
"while",
"True",
":",
"try",
":",
"s",
"=",
"read",
"(",
"self",
".",
"decodermaxblock",
")",
"except",
"(",
"IndexError",
",",
"struct",
".",
"error",
")",
"as",
"e",
":",
"# truncated png/gif",
"if",
"LOAD_TRUNCATED_IMAGES",
":",
"break",
"else",
":",
"raise",
"OSError",
"(",
"\"image file is truncated\"",
")",
"from",
"e",
"if",
"not",
"s",
":",
"# truncated jpeg",
"if",
"LOAD_TRUNCATED_IMAGES",
":",
"break",
"else",
":",
"raise",
"OSError",
"(",
"\"image file is truncated \"",
"f\"({len(b)} bytes not processed)\"",
")",
"b",
"=",
"b",
"+",
"s",
"n",
",",
"err_code",
"=",
"decoder",
".",
"decode",
"(",
"b",
")",
"if",
"n",
"<",
"0",
":",
"break",
"b",
"=",
"b",
"[",
"n",
":",
"]",
"finally",
":",
"# Need to cleanup here to prevent leaks",
"decoder",
".",
"cleanup",
"(",
")",
"self",
".",
"tile",
"=",
"[",
"]",
"self",
".",
"readonly",
"=",
"readonly",
"self",
".",
"load_end",
"(",
")",
"if",
"self",
".",
"_exclusive_fp",
"and",
"self",
".",
"_close_exclusive_fp_after_loading",
":",
"self",
".",
"fp",
".",
"close",
"(",
")",
"self",
".",
"fp",
"=",
"None",
"if",
"not",
"self",
".",
"map",
"and",
"not",
"LOAD_TRUNCATED_IMAGES",
"and",
"err_code",
"<",
"0",
":",
"# still raised if decoder fails to return anything",
"raise_oserror",
"(",
"err_code",
")",
"return",
"Image",
".",
"Image",
".",
"load",
"(",
"self",
")"
] | [
153,
4
] | [
275,
37
] | python | en | ['en', 'da', 'en'] | True |
StubImageFile._load | (self) | (Hook) Find actual image loader. | (Hook) Find actual image loader. | def _load(self):
"""(Hook) Find actual image loader."""
raise NotImplementedError("StubImageFile subclass must implement _load") | [
"def",
"_load",
"(",
"self",
")",
":",
"raise",
"NotImplementedError",
"(",
"\"StubImageFile subclass must implement _load\"",
")"
] | [
333,
4
] | [
335,
80
] | python | en | ['en', 'da', 'en'] | True |
Parser.reset | (self) |
(Consumer) Reset the parser. Note that you can only call this
method immediately after you've created a parser; parser
instances cannot be reused.
|
(Consumer) Reset the parser. Note that you can only call this
method immediately after you've created a parser; parser
instances cannot be reused.
| def reset(self):
"""
(Consumer) Reset the parser. Note that you can only call this
method immediately after you've created a parser; parser
instances cannot be reused.
"""
assert self.data is None, "cannot reuse parsers" | [
"def",
"reset",
"(",
"self",
")",
":",
"assert",
"self",
".",
"data",
"is",
"None",
",",
"\"cannot reuse parsers\""
] | [
351,
4
] | [
357,
56
] | python | en | ['en', 'error', 'th'] | False |
Parser.feed | (self, data) |
(Consumer) Feed data to the parser.
:param data: A string buffer.
:exception OSError: If the parser failed to parse the image file.
|
(Consumer) Feed data to the parser. | def feed(self, data):
"""
(Consumer) Feed data to the parser.
:param data: A string buffer.
:exception OSError: If the parser failed to parse the image file.
"""
# collect data
if self.finished:
return
if self.data is None:
self.data = data
else:
self.data = self.data + data
# parse what we have
if self.decoder:
if self.offset > 0:
# skip header
skip = min(len(self.data), self.offset)
self.data = self.data[skip:]
self.offset = self.offset - skip
if self.offset > 0 or not self.data:
return
n, e = self.decoder.decode(self.data)
if n < 0:
# end of stream
self.data = None
self.finished = 1
if e < 0:
# decoding error
self.image = None
raise_oserror(e)
else:
# end of image
return
self.data = self.data[n:]
elif self.image:
# if we end up here with no decoder, this file cannot
# be incrementally parsed. wait until we've gotten all
# available data
pass
else:
# attempt to open this file
try:
with io.BytesIO(self.data) as fp:
im = Image.open(fp)
except OSError:
# traceback.print_exc()
pass # not enough data
else:
flag = hasattr(im, "load_seek") or hasattr(im, "load_read")
if flag or len(im.tile) != 1:
# custom load code, or multiple tiles
self.decode = None
else:
# initialize decoder
im.load_prepare()
d, e, o, a = im.tile[0]
im.tile = []
self.decoder = Image._getdecoder(im.mode, d, a, im.decoderconfig)
self.decoder.setimage(im.im, e)
# calculate decoder offset
self.offset = o
if self.offset <= len(self.data):
self.data = self.data[self.offset :]
self.offset = 0
self.image = im | [
"def",
"feed",
"(",
"self",
",",
"data",
")",
":",
"# collect data",
"if",
"self",
".",
"finished",
":",
"return",
"if",
"self",
".",
"data",
"is",
"None",
":",
"self",
".",
"data",
"=",
"data",
"else",
":",
"self",
".",
"data",
"=",
"self",
".",
"data",
"+",
"data",
"# parse what we have",
"if",
"self",
".",
"decoder",
":",
"if",
"self",
".",
"offset",
">",
"0",
":",
"# skip header",
"skip",
"=",
"min",
"(",
"len",
"(",
"self",
".",
"data",
")",
",",
"self",
".",
"offset",
")",
"self",
".",
"data",
"=",
"self",
".",
"data",
"[",
"skip",
":",
"]",
"self",
".",
"offset",
"=",
"self",
".",
"offset",
"-",
"skip",
"if",
"self",
".",
"offset",
">",
"0",
"or",
"not",
"self",
".",
"data",
":",
"return",
"n",
",",
"e",
"=",
"self",
".",
"decoder",
".",
"decode",
"(",
"self",
".",
"data",
")",
"if",
"n",
"<",
"0",
":",
"# end of stream",
"self",
".",
"data",
"=",
"None",
"self",
".",
"finished",
"=",
"1",
"if",
"e",
"<",
"0",
":",
"# decoding error",
"self",
".",
"image",
"=",
"None",
"raise_oserror",
"(",
"e",
")",
"else",
":",
"# end of image",
"return",
"self",
".",
"data",
"=",
"self",
".",
"data",
"[",
"n",
":",
"]",
"elif",
"self",
".",
"image",
":",
"# if we end up here with no decoder, this file cannot",
"# be incrementally parsed. wait until we've gotten all",
"# available data",
"pass",
"else",
":",
"# attempt to open this file",
"try",
":",
"with",
"io",
".",
"BytesIO",
"(",
"self",
".",
"data",
")",
"as",
"fp",
":",
"im",
"=",
"Image",
".",
"open",
"(",
"fp",
")",
"except",
"OSError",
":",
"# traceback.print_exc()",
"pass",
"# not enough data",
"else",
":",
"flag",
"=",
"hasattr",
"(",
"im",
",",
"\"load_seek\"",
")",
"or",
"hasattr",
"(",
"im",
",",
"\"load_read\"",
")",
"if",
"flag",
"or",
"len",
"(",
"im",
".",
"tile",
")",
"!=",
"1",
":",
"# custom load code, or multiple tiles",
"self",
".",
"decode",
"=",
"None",
"else",
":",
"# initialize decoder",
"im",
".",
"load_prepare",
"(",
")",
"d",
",",
"e",
",",
"o",
",",
"a",
"=",
"im",
".",
"tile",
"[",
"0",
"]",
"im",
".",
"tile",
"=",
"[",
"]",
"self",
".",
"decoder",
"=",
"Image",
".",
"_getdecoder",
"(",
"im",
".",
"mode",
",",
"d",
",",
"a",
",",
"im",
".",
"decoderconfig",
")",
"self",
".",
"decoder",
".",
"setimage",
"(",
"im",
".",
"im",
",",
"e",
")",
"# calculate decoder offset",
"self",
".",
"offset",
"=",
"o",
"if",
"self",
".",
"offset",
"<=",
"len",
"(",
"self",
".",
"data",
")",
":",
"self",
".",
"data",
"=",
"self",
".",
"data",
"[",
"self",
".",
"offset",
":",
"]",
"self",
".",
"offset",
"=",
"0",
"self",
".",
"image",
"=",
"im"
] | [
359,
4
] | [
437,
31
] | python | en | ['en', 'error', 'th'] | False |
Parser.close | (self) |
(Consumer) Close the stream.
:returns: An image object.
:exception OSError: If the parser failed to parse the image file either
because it cannot be identified or cannot be
decoded.
|
(Consumer) Close the stream. | def close(self):
"""
(Consumer) Close the stream.
:returns: An image object.
:exception OSError: If the parser failed to parse the image file either
because it cannot be identified or cannot be
decoded.
"""
# finish decoding
if self.decoder:
# get rid of what's left in the buffers
self.feed(b"")
self.data = self.decoder = None
if not self.finished:
raise OSError("image was incomplete")
if not self.image:
raise OSError("cannot parse this image")
if self.data:
# incremental parsing not possible; reopen the file
# not that we have all data
with io.BytesIO(self.data) as fp:
try:
self.image = Image.open(fp)
finally:
self.image.load()
return self.image | [
"def",
"close",
"(",
"self",
")",
":",
"# finish decoding",
"if",
"self",
".",
"decoder",
":",
"# get rid of what's left in the buffers",
"self",
".",
"feed",
"(",
"b\"\"",
")",
"self",
".",
"data",
"=",
"self",
".",
"decoder",
"=",
"None",
"if",
"not",
"self",
".",
"finished",
":",
"raise",
"OSError",
"(",
"\"image was incomplete\"",
")",
"if",
"not",
"self",
".",
"image",
":",
"raise",
"OSError",
"(",
"\"cannot parse this image\"",
")",
"if",
"self",
".",
"data",
":",
"# incremental parsing not possible; reopen the file",
"# not that we have all data",
"with",
"io",
".",
"BytesIO",
"(",
"self",
".",
"data",
")",
"as",
"fp",
":",
"try",
":",
"self",
".",
"image",
"=",
"Image",
".",
"open",
"(",
"fp",
")",
"finally",
":",
"self",
".",
"image",
".",
"load",
"(",
")",
"return",
"self",
".",
"image"
] | [
445,
4
] | [
471,
25
] | python | en | ['en', 'error', 'th'] | False |
PyDecoder.init | (self, args) |
Override to perform decoder specific initialization
:param args: Array of args items from the tile entry
:returns: None
|
Override to perform decoder specific initialization | def init(self, args):
"""
Override to perform decoder specific initialization
:param args: Array of args items from the tile entry
:returns: None
"""
self.args = args | [
"def",
"init",
"(",
"self",
",",
"args",
")",
":",
"self",
".",
"args",
"=",
"args"
] | [
603,
4
] | [
610,
24
] | python | en | ['en', 'error', 'th'] | False |
PyDecoder.decode | (self, buffer) |
Override to perform the decoding process.
:param buffer: A bytes object with the data to be decoded.
:returns: A tuple of ``(bytes consumed, errcode)``.
If finished with decoding return <0 for the bytes consumed.
Err codes are from :data:`.ImageFile.ERRORS`.
|
Override to perform the decoding process. | def decode(self, buffer):
"""
Override to perform the decoding process.
:param buffer: A bytes object with the data to be decoded.
:returns: A tuple of ``(bytes consumed, errcode)``.
If finished with decoding return <0 for the bytes consumed.
Err codes are from :data:`.ImageFile.ERRORS`.
"""
raise NotImplementedError() | [
"def",
"decode",
"(",
"self",
",",
"buffer",
")",
":",
"raise",
"NotImplementedError",
"(",
")"
] | [
616,
4
] | [
625,
35
] | python | en | ['en', 'error', 'th'] | False |
PyDecoder.cleanup | (self) |
Override to perform decoder specific cleanup
:returns: None
|
Override to perform decoder specific cleanup | def cleanup(self):
"""
Override to perform decoder specific cleanup
:returns: None
"""
pass | [
"def",
"cleanup",
"(",
"self",
")",
":",
"pass"
] | [
627,
4
] | [
633,
12
] | python | en | ['en', 'error', 'th'] | False |
PyDecoder.setfd | (self, fd) |
Called from ImageFile to set the python file-like object
:param fd: A python file-like object
:returns: None
|
Called from ImageFile to set the python file-like object | def setfd(self, fd):
"""
Called from ImageFile to set the python file-like object
:param fd: A python file-like object
:returns: None
"""
self.fd = fd | [
"def",
"setfd",
"(",
"self",
",",
"fd",
")",
":",
"self",
".",
"fd",
"=",
"fd"
] | [
635,
4
] | [
642,
20
] | python | en | ['en', 'error', 'th'] | False |
PyDecoder.setimage | (self, im, extents=None) |
Called from ImageFile to set the core output image for the decoder
:param im: A core image object
:param extents: a 4 tuple of (x0, y0, x1, y1) defining the rectangle
for this tile
:returns: None
|
Called from ImageFile to set the core output image for the decoder | def setimage(self, im, extents=None):
"""
Called from ImageFile to set the core output image for the decoder
:param im: A core image object
:param extents: a 4 tuple of (x0, y0, x1, y1) defining the rectangle
for this tile
:returns: None
"""
# following c code
self.im = im
if extents:
(x0, y0, x1, y1) = extents
else:
(x0, y0, x1, y1) = (0, 0, 0, 0)
if x0 == 0 and x1 == 0:
self.state.xsize, self.state.ysize = self.im.size
else:
self.state.xoff = x0
self.state.yoff = y0
self.state.xsize = x1 - x0
self.state.ysize = y1 - y0
if self.state.xsize <= 0 or self.state.ysize <= 0:
raise ValueError("Size cannot be negative")
if (
self.state.xsize + self.state.xoff > self.im.size[0]
or self.state.ysize + self.state.yoff > self.im.size[1]
):
raise ValueError("Tile cannot extend outside image") | [
"def",
"setimage",
"(",
"self",
",",
"im",
",",
"extents",
"=",
"None",
")",
":",
"# following c code",
"self",
".",
"im",
"=",
"im",
"if",
"extents",
":",
"(",
"x0",
",",
"y0",
",",
"x1",
",",
"y1",
")",
"=",
"extents",
"else",
":",
"(",
"x0",
",",
"y0",
",",
"x1",
",",
"y1",
")",
"=",
"(",
"0",
",",
"0",
",",
"0",
",",
"0",
")",
"if",
"x0",
"==",
"0",
"and",
"x1",
"==",
"0",
":",
"self",
".",
"state",
".",
"xsize",
",",
"self",
".",
"state",
".",
"ysize",
"=",
"self",
".",
"im",
".",
"size",
"else",
":",
"self",
".",
"state",
".",
"xoff",
"=",
"x0",
"self",
".",
"state",
".",
"yoff",
"=",
"y0",
"self",
".",
"state",
".",
"xsize",
"=",
"x1",
"-",
"x0",
"self",
".",
"state",
".",
"ysize",
"=",
"y1",
"-",
"y0",
"if",
"self",
".",
"state",
".",
"xsize",
"<=",
"0",
"or",
"self",
".",
"state",
".",
"ysize",
"<=",
"0",
":",
"raise",
"ValueError",
"(",
"\"Size cannot be negative\"",
")",
"if",
"(",
"self",
".",
"state",
".",
"xsize",
"+",
"self",
".",
"state",
".",
"xoff",
">",
"self",
".",
"im",
".",
"size",
"[",
"0",
"]",
"or",
"self",
".",
"state",
".",
"ysize",
"+",
"self",
".",
"state",
".",
"yoff",
">",
"self",
".",
"im",
".",
"size",
"[",
"1",
"]",
")",
":",
"raise",
"ValueError",
"(",
"\"Tile cannot extend outside image\"",
")"
] | [
644,
4
] | [
677,
64
] | python | en | ['en', 'error', 'th'] | False |
PyDecoder.set_as_raw | (self, data, rawmode=None) |
Convenience method to set the internal image from a stream of raw data
:param data: Bytes to be set
:param rawmode: The rawmode to be used for the decoder.
If not specified, it will default to the mode of the image
:returns: None
|
Convenience method to set the internal image from a stream of raw data | def set_as_raw(self, data, rawmode=None):
"""
Convenience method to set the internal image from a stream of raw data
:param data: Bytes to be set
:param rawmode: The rawmode to be used for the decoder.
If not specified, it will default to the mode of the image
:returns: None
"""
if not rawmode:
rawmode = self.mode
d = Image._getdecoder(self.mode, "raw", (rawmode))
d.setimage(self.im, self.state.extents())
s = d.decode(data)
if s[0] >= 0:
raise ValueError("not enough image data")
if s[1] != 0:
raise ValueError("cannot decode image data") | [
"def",
"set_as_raw",
"(",
"self",
",",
"data",
",",
"rawmode",
"=",
"None",
")",
":",
"if",
"not",
"rawmode",
":",
"rawmode",
"=",
"self",
".",
"mode",
"d",
"=",
"Image",
".",
"_getdecoder",
"(",
"self",
".",
"mode",
",",
"\"raw\"",
",",
"(",
"rawmode",
")",
")",
"d",
".",
"setimage",
"(",
"self",
".",
"im",
",",
"self",
".",
"state",
".",
"extents",
"(",
")",
")",
"s",
"=",
"d",
".",
"decode",
"(",
"data",
")",
"if",
"s",
"[",
"0",
"]",
">=",
"0",
":",
"raise",
"ValueError",
"(",
"\"not enough image data\"",
")",
"if",
"s",
"[",
"1",
"]",
"!=",
"0",
":",
"raise",
"ValueError",
"(",
"\"cannot decode image data\"",
")"
] | [
679,
4
] | [
698,
56
] | python | en | ['en', 'error', 'th'] | False |
Cutout.__call__ | (self, img) | cutout_image | cutout_image | def __call__(self, img):
""" cutout_image """
h, w = img.shape[:2]
mask = np.ones((h, w), np.float32)
for n in range(self.n_holes):
y = np.random.randint(h)
x = np.random.randint(w)
y1 = np.clip(y - self.length // 2, 0, h)
y2 = np.clip(y + self.length // 2, 0, h)
x1 = np.clip(x - self.length // 2, 0, w)
x2 = np.clip(x + self.length // 2, 0, w)
img[y1:y2, x1:x2] = 0
return img | [
"def",
"__call__",
"(",
"self",
",",
"img",
")",
":",
"h",
",",
"w",
"=",
"img",
".",
"shape",
"[",
":",
"2",
"]",
"mask",
"=",
"np",
".",
"ones",
"(",
"(",
"h",
",",
"w",
")",
",",
"np",
".",
"float32",
")",
"for",
"n",
"in",
"range",
"(",
"self",
".",
"n_holes",
")",
":",
"y",
"=",
"np",
".",
"random",
".",
"randint",
"(",
"h",
")",
"x",
"=",
"np",
".",
"random",
".",
"randint",
"(",
"w",
")",
"y1",
"=",
"np",
".",
"clip",
"(",
"y",
"-",
"self",
".",
"length",
"//",
"2",
",",
"0",
",",
"h",
")",
"y2",
"=",
"np",
".",
"clip",
"(",
"y",
"+",
"self",
".",
"length",
"//",
"2",
",",
"0",
",",
"h",
")",
"x1",
"=",
"np",
".",
"clip",
"(",
"x",
"-",
"self",
".",
"length",
"//",
"2",
",",
"0",
",",
"w",
")",
"x2",
"=",
"np",
".",
"clip",
"(",
"x",
"+",
"self",
".",
"length",
"//",
"2",
",",
"0",
",",
"w",
")",
"img",
"[",
"y1",
":",
"y2",
",",
"x1",
":",
"x2",
"]",
"=",
"0",
"return",
"img"
] | [
25,
4
] | [
40,
18
] | python | en | ['fr', 'ny', 'en'] | False |
get_user_agent | () |
Provides the `USER_AGENT` string that is passed to the Cloudinary servers.
Prepends `USER_PLATFORM` if it is defined.
:returns: the user agent
:rtype: str
|
Provides the `USER_AGENT` string that is passed to the Cloudinary servers.
Prepends `USER_PLATFORM` if it is defined. | def get_user_agent():
"""
Provides the `USER_AGENT` string that is passed to the Cloudinary servers.
Prepends `USER_PLATFORM` if it is defined.
:returns: the user agent
:rtype: str
"""
if USER_PLATFORM == "":
return USER_AGENT
else:
return USER_PLATFORM + " " + USER_AGENT | [
"def",
"get_user_agent",
"(",
")",
":",
"if",
"USER_PLATFORM",
"==",
"\"\"",
":",
"return",
"USER_AGENT",
"else",
":",
"return",
"USER_PLATFORM",
"+",
"\" \"",
"+",
"USER_AGENT"
] | [
57,
0
] | [
69,
47
] | python | en | ['en', 'error', 'th'] | False |
_run | (args) | TODO(praneetdutta): Formalize train sub-routine
| TODO(praneetdutta): Formalize train sub-routine
| def _run(args):
""" TODO(praneetdutta): Formalize train sub-routine
"""
with tf.Session() as sess:
env = gym.make(args[0].environment)
env = StackFrameEnv(env, args[0].frame_stack,
args[0].img_height, args[0].img_width)
state = env.reset()
num_actions = env.action_space.n
if args[0].mode == 'Train':
# Add timestamp; important for HP tuning so models don't clobber each
# other.
model_dir = hp_directory(args[0].model_dir)
else:
model_dir = args[0].model_dir
agent = Agent(
height=args[0].img_height,
width=args[0].img_width,
actions=env.action_space.n,
channels=args[0].frame_stack,
model_dir=model_dir,
discount=args[0].discount_factor)
agent.create_model(lr=args[0].learning_rate)
print ("STARTING...")
if not os.path.exists(model_dir) and args[0].save_model:
os.makedirs(model_dir)
print("MODEL WILL BE STORED AT: ", model_dir)
target_agent = Agent(height=args[0].img_height, width=args[0].img_width,
actions=env.action_space.n, channels=args[0].frame_stack)
target_agent.create_model(args[0].learning_rate)
target_network = target_agent.model
target_network.set_weights(agent.model.get_weights())
if args[0].mode != 'Train':
trained_model_path = args[0].load_model
try:
agent.model.load_weights(trained_model_path)
except:
print('{} is not a valid .h5 model.'.format(trained_model_path))
eta = args[0].init_eta
Buffer = ExpBuffer(max_size=args[0].buffer_size,
min_size=args[0].start_train)
episode_reward, episode_number, done = 0, 0, False
episode_run = True
for curr_step in range(args[0].steps):
if (episode_number % DISPLAY_RESULTS == 0 and
episode_run) or args[0].mode != 'Train':
episode_reward = agent.play(env, model_dir, args[0].mode)
print('CURRENT STEP: {}, EPISODE_NUMBER: {}, EPISODE REWARD: {},'
'EPSILON: {}'.format(curr_step, episode_number, episode_reward,
eta))
episode_run = False
if args[0].mode == 'Train':
eta = anneal_exploration(eta, curr_step, args[0].steps / 10.0,
args[0].start_train, args[0].init_eta,
args[0].min_eta, 'linear')
if eta > np.random.rand() or curr_step < args[0].start_train:
action = env.action_space.sample()
else:
action = agent.predict_action(state)
next_state, reward, done, info = env.step(action)
Buffer.add_exp([state, next_state, reward, action, done])
ready_to_update_model = curr_step > args[0].start_train and len(
Buffer.buffer) > Buffer.min_size
if ready_to_update_model:
exp_state, exp_next_state, exp_reward, exp_action, exp_done = Buffer.sample_experiences(
args[0].batch_size)
agent.batch_train(exp_state, exp_next_state, exp_reward, exp_action,
exp_done, target_network, args[0].Q_learning)
if curr_step % args[0].update_target == 0:
target_network.set_weights(agent.model.get_weights())
if curr_step % (SAVE_STEPS_FACTOR *
args[0].update_target) == 0 and args[0].save_model:
print('SAVING MODEL AT STEP: ', curr_step)
models.save_model(
agent.model,
model_dir + 'model_' + str(episode_number) + '_.h5')
#Resets state
if done or args[0].mode != 'Train':
episode_number += 1
episode_run = True
state = env.reset()
else:
state = next_state | [
"def",
"_run",
"(",
"args",
")",
":",
"with",
"tf",
".",
"Session",
"(",
")",
"as",
"sess",
":",
"env",
"=",
"gym",
".",
"make",
"(",
"args",
"[",
"0",
"]",
".",
"environment",
")",
"env",
"=",
"StackFrameEnv",
"(",
"env",
",",
"args",
"[",
"0",
"]",
".",
"frame_stack",
",",
"args",
"[",
"0",
"]",
".",
"img_height",
",",
"args",
"[",
"0",
"]",
".",
"img_width",
")",
"state",
"=",
"env",
".",
"reset",
"(",
")",
"num_actions",
"=",
"env",
".",
"action_space",
".",
"n",
"if",
"args",
"[",
"0",
"]",
".",
"mode",
"==",
"'Train'",
":",
"# Add timestamp; important for HP tuning so models don't clobber each",
"# other.",
"model_dir",
"=",
"hp_directory",
"(",
"args",
"[",
"0",
"]",
".",
"model_dir",
")",
"else",
":",
"model_dir",
"=",
"args",
"[",
"0",
"]",
".",
"model_dir",
"agent",
"=",
"Agent",
"(",
"height",
"=",
"args",
"[",
"0",
"]",
".",
"img_height",
",",
"width",
"=",
"args",
"[",
"0",
"]",
".",
"img_width",
",",
"actions",
"=",
"env",
".",
"action_space",
".",
"n",
",",
"channels",
"=",
"args",
"[",
"0",
"]",
".",
"frame_stack",
",",
"model_dir",
"=",
"model_dir",
",",
"discount",
"=",
"args",
"[",
"0",
"]",
".",
"discount_factor",
")",
"agent",
".",
"create_model",
"(",
"lr",
"=",
"args",
"[",
"0",
"]",
".",
"learning_rate",
")",
"print",
"(",
"\"STARTING...\"",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"model_dir",
")",
"and",
"args",
"[",
"0",
"]",
".",
"save_model",
":",
"os",
".",
"makedirs",
"(",
"model_dir",
")",
"print",
"(",
"\"MODEL WILL BE STORED AT: \"",
",",
"model_dir",
")",
"target_agent",
"=",
"Agent",
"(",
"height",
"=",
"args",
"[",
"0",
"]",
".",
"img_height",
",",
"width",
"=",
"args",
"[",
"0",
"]",
".",
"img_width",
",",
"actions",
"=",
"env",
".",
"action_space",
".",
"n",
",",
"channels",
"=",
"args",
"[",
"0",
"]",
".",
"frame_stack",
")",
"target_agent",
".",
"create_model",
"(",
"args",
"[",
"0",
"]",
".",
"learning_rate",
")",
"target_network",
"=",
"target_agent",
".",
"model",
"target_network",
".",
"set_weights",
"(",
"agent",
".",
"model",
".",
"get_weights",
"(",
")",
")",
"if",
"args",
"[",
"0",
"]",
".",
"mode",
"!=",
"'Train'",
":",
"trained_model_path",
"=",
"args",
"[",
"0",
"]",
".",
"load_model",
"try",
":",
"agent",
".",
"model",
".",
"load_weights",
"(",
"trained_model_path",
")",
"except",
":",
"print",
"(",
"'{} is not a valid .h5 model.'",
".",
"format",
"(",
"trained_model_path",
")",
")",
"eta",
"=",
"args",
"[",
"0",
"]",
".",
"init_eta",
"Buffer",
"=",
"ExpBuffer",
"(",
"max_size",
"=",
"args",
"[",
"0",
"]",
".",
"buffer_size",
",",
"min_size",
"=",
"args",
"[",
"0",
"]",
".",
"start_train",
")",
"episode_reward",
",",
"episode_number",
",",
"done",
"=",
"0",
",",
"0",
",",
"False",
"episode_run",
"=",
"True",
"for",
"curr_step",
"in",
"range",
"(",
"args",
"[",
"0",
"]",
".",
"steps",
")",
":",
"if",
"(",
"episode_number",
"%",
"DISPLAY_RESULTS",
"==",
"0",
"and",
"episode_run",
")",
"or",
"args",
"[",
"0",
"]",
".",
"mode",
"!=",
"'Train'",
":",
"episode_reward",
"=",
"agent",
".",
"play",
"(",
"env",
",",
"model_dir",
",",
"args",
"[",
"0",
"]",
".",
"mode",
")",
"print",
"(",
"'CURRENT STEP: {}, EPISODE_NUMBER: {}, EPISODE REWARD: {},'",
"'EPSILON: {}'",
".",
"format",
"(",
"curr_step",
",",
"episode_number",
",",
"episode_reward",
",",
"eta",
")",
")",
"episode_run",
"=",
"False",
"if",
"args",
"[",
"0",
"]",
".",
"mode",
"==",
"'Train'",
":",
"eta",
"=",
"anneal_exploration",
"(",
"eta",
",",
"curr_step",
",",
"args",
"[",
"0",
"]",
".",
"steps",
"/",
"10.0",
",",
"args",
"[",
"0",
"]",
".",
"start_train",
",",
"args",
"[",
"0",
"]",
".",
"init_eta",
",",
"args",
"[",
"0",
"]",
".",
"min_eta",
",",
"'linear'",
")",
"if",
"eta",
">",
"np",
".",
"random",
".",
"rand",
"(",
")",
"or",
"curr_step",
"<",
"args",
"[",
"0",
"]",
".",
"start_train",
":",
"action",
"=",
"env",
".",
"action_space",
".",
"sample",
"(",
")",
"else",
":",
"action",
"=",
"agent",
".",
"predict_action",
"(",
"state",
")",
"next_state",
",",
"reward",
",",
"done",
",",
"info",
"=",
"env",
".",
"step",
"(",
"action",
")",
"Buffer",
".",
"add_exp",
"(",
"[",
"state",
",",
"next_state",
",",
"reward",
",",
"action",
",",
"done",
"]",
")",
"ready_to_update_model",
"=",
"curr_step",
">",
"args",
"[",
"0",
"]",
".",
"start_train",
"and",
"len",
"(",
"Buffer",
".",
"buffer",
")",
">",
"Buffer",
".",
"min_size",
"if",
"ready_to_update_model",
":",
"exp_state",
",",
"exp_next_state",
",",
"exp_reward",
",",
"exp_action",
",",
"exp_done",
"=",
"Buffer",
".",
"sample_experiences",
"(",
"args",
"[",
"0",
"]",
".",
"batch_size",
")",
"agent",
".",
"batch_train",
"(",
"exp_state",
",",
"exp_next_state",
",",
"exp_reward",
",",
"exp_action",
",",
"exp_done",
",",
"target_network",
",",
"args",
"[",
"0",
"]",
".",
"Q_learning",
")",
"if",
"curr_step",
"%",
"args",
"[",
"0",
"]",
".",
"update_target",
"==",
"0",
":",
"target_network",
".",
"set_weights",
"(",
"agent",
".",
"model",
".",
"get_weights",
"(",
")",
")",
"if",
"curr_step",
"%",
"(",
"SAVE_STEPS_FACTOR",
"*",
"args",
"[",
"0",
"]",
".",
"update_target",
")",
"==",
"0",
"and",
"args",
"[",
"0",
"]",
".",
"save_model",
":",
"print",
"(",
"'SAVING MODEL AT STEP: '",
",",
"curr_step",
")",
"models",
".",
"save_model",
"(",
"agent",
".",
"model",
",",
"model_dir",
"+",
"'model_'",
"+",
"str",
"(",
"episode_number",
")",
"+",
"'_.h5'",
")",
"#Resets state",
"if",
"done",
"or",
"args",
"[",
"0",
"]",
".",
"mode",
"!=",
"'Train'",
":",
"episode_number",
"+=",
"1",
"episode_run",
"=",
"True",
"state",
"=",
"env",
".",
"reset",
"(",
")",
"else",
":",
"state",
"=",
"next_state"
] | [
39,
0
] | [
127,
28
] | python | en | ['en', 'it', 'sw'] | False |
_parse_arguments | (argv) | Parse command-line arguments. | Parse command-line arguments. | def _parse_arguments(argv):
"""Parse command-line arguments."""
parser = argparse.ArgumentParser()
parser.add_argument(
'--img_width',
help='Width of desired image observation',
type=float,
default=128)
parser.add_argument(
'--img_height',
help='Height of desired image observation',
type=float,
default=128)
parser.add_argument(
'--environment',
help='Atari Game Environment to be used.',
type=str,
default='Breakout-v0')
parser.add_argument(
'--frame_stack',
help='Number of frames to stack ',
type=float,
default=4)
parser.add_argument(
'--steps',
help='Number of steps for the agent to play the game',
type=int,
default=5000000)
parser.add_argument(
'--start_train',
help='Number of steps after which to start training',
type=int,
default=5000)
parser.add_argument(
'--update_target',
help='Number of steps after which to update the target network',
type=int,
default=1000)
parser.add_argument(
'--buffer_size',
help='Size of the experience buffer',
type=int,
default=200000)
parser.add_argument(
'--mode',
help='Whether we are training the agent or playing the game',
type=str,
default="Train")
parser.add_argument(
'--init_eta', help='Epsilon for taking actions', type=float, default=0.95)
parser.add_argument(
'--model_dir',
help='Directory where to save the given model',
type=str,
default='models/')
parser.add_argument(
'--save_model',
help='Whether to save the model',
type=bool,
default= False)
parser.add_argument(
'--batch_size',
help='Batch size for sampling and training model',
type=int,
default=32)
parser.add_argument(
'--learning_rate',
help='Learning rate for for agent and target network',
type=float,
default=0.00025)
parser.add_argument(
'--Q_learning',
help='Type of Q Learning to be implemented',
type=str,
default= "Double")
parser.add_argument(
'--min_eta', help='Lower bound of epsilon', type=float, default=0.1)
parser.add_argument(
'--discount_factor',
help='Discount Factor for TD Learning',
type=float,
default=0.95)
parser.add_argument(
'--load_model', help='Loads the model', type=str, default=None)
return parser.parse_known_args(argv) | [
"def",
"_parse_arguments",
"(",
"argv",
")",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
")",
"parser",
".",
"add_argument",
"(",
"'--img_width'",
",",
"help",
"=",
"'Width of desired image observation'",
",",
"type",
"=",
"float",
",",
"default",
"=",
"128",
")",
"parser",
".",
"add_argument",
"(",
"'--img_height'",
",",
"help",
"=",
"'Height of desired image observation'",
",",
"type",
"=",
"float",
",",
"default",
"=",
"128",
")",
"parser",
".",
"add_argument",
"(",
"'--environment'",
",",
"help",
"=",
"'Atari Game Environment to be used.'",
",",
"type",
"=",
"str",
",",
"default",
"=",
"'Breakout-v0'",
")",
"parser",
".",
"add_argument",
"(",
"'--frame_stack'",
",",
"help",
"=",
"'Number of frames to stack '",
",",
"type",
"=",
"float",
",",
"default",
"=",
"4",
")",
"parser",
".",
"add_argument",
"(",
"'--steps'",
",",
"help",
"=",
"'Number of steps for the agent to play the game'",
",",
"type",
"=",
"int",
",",
"default",
"=",
"5000000",
")",
"parser",
".",
"add_argument",
"(",
"'--start_train'",
",",
"help",
"=",
"'Number of steps after which to start training'",
",",
"type",
"=",
"int",
",",
"default",
"=",
"5000",
")",
"parser",
".",
"add_argument",
"(",
"'--update_target'",
",",
"help",
"=",
"'Number of steps after which to update the target network'",
",",
"type",
"=",
"int",
",",
"default",
"=",
"1000",
")",
"parser",
".",
"add_argument",
"(",
"'--buffer_size'",
",",
"help",
"=",
"'Size of the experience buffer'",
",",
"type",
"=",
"int",
",",
"default",
"=",
"200000",
")",
"parser",
".",
"add_argument",
"(",
"'--mode'",
",",
"help",
"=",
"'Whether we are training the agent or playing the game'",
",",
"type",
"=",
"str",
",",
"default",
"=",
"\"Train\"",
")",
"parser",
".",
"add_argument",
"(",
"'--init_eta'",
",",
"help",
"=",
"'Epsilon for taking actions'",
",",
"type",
"=",
"float",
",",
"default",
"=",
"0.95",
")",
"parser",
".",
"add_argument",
"(",
"'--model_dir'",
",",
"help",
"=",
"'Directory where to save the given model'",
",",
"type",
"=",
"str",
",",
"default",
"=",
"'models/'",
")",
"parser",
".",
"add_argument",
"(",
"'--save_model'",
",",
"help",
"=",
"'Whether to save the model'",
",",
"type",
"=",
"bool",
",",
"default",
"=",
"False",
")",
"parser",
".",
"add_argument",
"(",
"'--batch_size'",
",",
"help",
"=",
"'Batch size for sampling and training model'",
",",
"type",
"=",
"int",
",",
"default",
"=",
"32",
")",
"parser",
".",
"add_argument",
"(",
"'--learning_rate'",
",",
"help",
"=",
"'Learning rate for for agent and target network'",
",",
"type",
"=",
"float",
",",
"default",
"=",
"0.00025",
")",
"parser",
".",
"add_argument",
"(",
"'--Q_learning'",
",",
"help",
"=",
"'Type of Q Learning to be implemented'",
",",
"type",
"=",
"str",
",",
"default",
"=",
"\"Double\"",
")",
"parser",
".",
"add_argument",
"(",
"'--min_eta'",
",",
"help",
"=",
"'Lower bound of epsilon'",
",",
"type",
"=",
"float",
",",
"default",
"=",
"0.1",
")",
"parser",
".",
"add_argument",
"(",
"'--discount_factor'",
",",
"help",
"=",
"'Discount Factor for TD Learning'",
",",
"type",
"=",
"float",
",",
"default",
"=",
"0.95",
")",
"parser",
".",
"add_argument",
"(",
"'--load_model'",
",",
"help",
"=",
"'Loads the model'",
",",
"type",
"=",
"str",
",",
"default",
"=",
"None",
")",
"return",
"parser",
".",
"parse_known_args",
"(",
"argv",
")"
] | [
130,
0
] | [
215,
38
] | python | en | ['en', 'fr', 'en'] | True |
StaticFilesHandlerMixin._should_handle | (self, path) |
Check if the path should be handled. Ignore the path if:
* the host is provided as part of the base_url
* the request's path isn't under the media path (or equal)
|
Check if the path should be handled. Ignore the path if:
* the host is provided as part of the base_url
* the request's path isn't under the media path (or equal)
| def _should_handle(self, path):
"""
Check if the path should be handled. Ignore the path if:
* the host is provided as part of the base_url
* the request's path isn't under the media path (or equal)
"""
return path.startswith(self.base_url[2]) and not self.base_url[1] | [
"def",
"_should_handle",
"(",
"self",
",",
"path",
")",
":",
"return",
"path",
".",
"startswith",
"(",
"self",
".",
"base_url",
"[",
"2",
"]",
")",
"and",
"not",
"self",
".",
"base_url",
"[",
"1",
"]"
] | [
31,
4
] | [
37,
73
] | python | en | ['en', 'error', 'th'] | False |
StaticFilesHandlerMixin.file_path | (self, url) |
Return the relative path to the media file on disk for the given URL.
|
Return the relative path to the media file on disk for the given URL.
| def file_path(self, url):
"""
Return the relative path to the media file on disk for the given URL.
"""
relative_url = url[len(self.base_url[2]):]
return url2pathname(relative_url) | [
"def",
"file_path",
"(",
"self",
",",
"url",
")",
":",
"relative_url",
"=",
"url",
"[",
"len",
"(",
"self",
".",
"base_url",
"[",
"2",
"]",
")",
":",
"]",
"return",
"url2pathname",
"(",
"relative_url",
")"
] | [
39,
4
] | [
44,
41
] | python | en | ['en', 'error', 'th'] | False |
StaticFilesHandlerMixin.serve | (self, request) | Serve the request path. | Serve the request path. | def serve(self, request):
"""Serve the request path."""
return serve(request, self.file_path(request.path), insecure=True) | [
"def",
"serve",
"(",
"self",
",",
"request",
")",
":",
"return",
"serve",
"(",
"request",
",",
"self",
".",
"file_path",
"(",
"request",
".",
"path",
")",
",",
"insecure",
"=",
"True",
")"
] | [
46,
4
] | [
48,
74
] | python | en | ['en', 'en', 'en'] | True |
shquote | (arg) | Quote an argument for later parsing by shlex.split() | Quote an argument for later parsing by shlex.split() | def shquote(arg):
"""Quote an argument for later parsing by shlex.split()"""
for c in '"', "'", "\\", "#":
if c in arg:
return repr(arg)
if arg.split() != [arg]:
return repr(arg)
return arg | [
"def",
"shquote",
"(",
"arg",
")",
":",
"for",
"c",
"in",
"'\"'",
",",
"\"'\"",
",",
"\"\\\\\"",
",",
"\"#\"",
":",
"if",
"c",
"in",
"arg",
":",
"return",
"repr",
"(",
"arg",
")",
"if",
"arg",
".",
"split",
"(",
")",
"!=",
"[",
"arg",
"]",
":",
"return",
"repr",
"(",
"arg",
")",
"return",
"arg"
] | [
7,
0
] | [
14,
14
] | python | en | ['en', 'en', 'en'] | True |
SessionStore.flush | (self) |
Remove the current session data from the database and regenerate the
key.
|
Remove the current session data from the database and regenerate the
key.
| def flush(self):
"""
Remove the current session data from the database and regenerate the
key.
"""
self.clear()
self.delete(self.session_key)
self._session_key = None | [
"def",
"flush",
"(",
"self",
")",
":",
"self",
".",
"clear",
"(",
")",
"self",
".",
"delete",
"(",
"self",
".",
"session_key",
")",
"self",
".",
"_session_key",
"=",
"None"
] | [
57,
4
] | [
64,
32
] | python | en | ['en', 'error', 'th'] | False |
split_and_convert_string | (string_tensor) | Splits and converts string tensor into dense float tensor.
Given string tensor, splits string by delimiter, converts to and returns
dense float tensor.
Args:
string_tensor: tf.string tensor.
Returns:
tf.float64 tensor split along delimiter.
| Splits and converts string tensor into dense float tensor. | def split_and_convert_string(string_tensor):
"""Splits and converts string tensor into dense float tensor.
Given string tensor, splits string by delimiter, converts to and returns
dense float tensor.
Args:
string_tensor: tf.string tensor.
Returns:
tf.float64 tensor split along delimiter.
"""
# Split string tensor into a sparse tensor based on delimiter
split_string = tf.string_split(source=tf.expand_dims(
input=string_tensor, axis=0), delimiter=";")
# Converts the values of the sparse tensor to floats
converted_tensor = tf.string_to_number(
string_tensor=split_string.values,
out_type=tf.float64)
# Create a new sparse tensor with the new converted values,
# because the original sparse tensor values are immutable
new_sparse_tensor = tf.SparseTensor(
indices=split_string.indices,
values=converted_tensor,
dense_shape=split_string.dense_shape)
# Create a dense tensor of the float values that were converted from text csv
dense_floats = tf.sparse_tensor_to_dense(
sp_input=new_sparse_tensor, default_value=0.0)
dense_floats_vector = tf.squeeze(input=dense_floats, axis=0)
return dense_floats_vector | [
"def",
"split_and_convert_string",
"(",
"string_tensor",
")",
":",
"# Split string tensor into a sparse tensor based on delimiter",
"split_string",
"=",
"tf",
".",
"string_split",
"(",
"source",
"=",
"tf",
".",
"expand_dims",
"(",
"input",
"=",
"string_tensor",
",",
"axis",
"=",
"0",
")",
",",
"delimiter",
"=",
"\";\"",
")",
"# Converts the values of the sparse tensor to floats",
"converted_tensor",
"=",
"tf",
".",
"string_to_number",
"(",
"string_tensor",
"=",
"split_string",
".",
"values",
",",
"out_type",
"=",
"tf",
".",
"float64",
")",
"# Create a new sparse tensor with the new converted values,",
"# because the original sparse tensor values are immutable",
"new_sparse_tensor",
"=",
"tf",
".",
"SparseTensor",
"(",
"indices",
"=",
"split_string",
".",
"indices",
",",
"values",
"=",
"converted_tensor",
",",
"dense_shape",
"=",
"split_string",
".",
"dense_shape",
")",
"# Create a dense tensor of the float values that were converted from text csv",
"dense_floats",
"=",
"tf",
".",
"sparse_tensor_to_dense",
"(",
"sp_input",
"=",
"new_sparse_tensor",
",",
"default_value",
"=",
"0.0",
")",
"dense_floats_vector",
"=",
"tf",
".",
"squeeze",
"(",
"input",
"=",
"dense_floats",
",",
"axis",
"=",
"0",
")",
"return",
"dense_floats_vector"
] | [
14,
0
] | [
48,
30
] | python | en | ['en', 'en', 'en'] | True |
convert_sequences_from_strings_to_floats | (features, column_list, seq_len) | Converts sequences from single strings to a sequence of floats.
Given features dictionary and feature column names list, convert features
from strings to a sequence of floats.
Args:
features: Dictionary of tensors of our features as tf.strings.
column_list: List of column names of our features.
seq_len: Number of timesteps in sequence.
Returns:
Dictionary of tensors of our features as tf.float64s.
| Converts sequences from single strings to a sequence of floats. | def convert_sequences_from_strings_to_floats(features, column_list, seq_len):
"""Converts sequences from single strings to a sequence of floats.
Given features dictionary and feature column names list, convert features
from strings to a sequence of floats.
Args:
features: Dictionary of tensors of our features as tf.strings.
column_list: List of column names of our features.
seq_len: Number of timesteps in sequence.
Returns:
Dictionary of tensors of our features as tf.float64s.
"""
for column in column_list:
features[column] = split_and_convert_string(features[column])
# Since we know the sequence length, set the shape to remove the ambiguity
features[column].set_shape([seq_len])
return features | [
"def",
"convert_sequences_from_strings_to_floats",
"(",
"features",
",",
"column_list",
",",
"seq_len",
")",
":",
"for",
"column",
"in",
"column_list",
":",
"features",
"[",
"column",
"]",
"=",
"split_and_convert_string",
"(",
"features",
"[",
"column",
"]",
")",
"# Since we know the sequence length, set the shape to remove the ambiguity",
"features",
"[",
"column",
"]",
".",
"set_shape",
"(",
"[",
"seq_len",
"]",
")",
"return",
"features"
] | [
51,
0
] | [
70,
19
] | python | en | ['en', 'en', 'en'] | True |
decode_csv | (value_column, seq_len) | Decodes CSV file into tensors.
Given single string tensor and sequence length, returns features dictionary
of tensors and labels tensor.
Args:
value_column: tf.string tensor of shape () compromising entire line of
CSV file.
seq_len: Number of timesteps in sequence.
Returns:
Features dictionary of tensors and labels tensor.
| Decodes CSV file into tensors. | def decode_csv(value_column, seq_len):
"""Decodes CSV file into tensors.
Given single string tensor and sequence length, returns features dictionary
of tensors and labels tensor.
Args:
value_column: tf.string tensor of shape () compromising entire line of
CSV file.
seq_len: Number of timesteps in sequence.
Returns:
Features dictionary of tensors and labels tensor.
"""
columns = tf.decode_csv(
records=value_column,
record_defaults=DEFAULTS,
field_delim=",")
features = dict(zip(CSV_COLUMNS, columns))
labels = tf.cast(x=features.pop(LABEL_COLUMN), dtype=tf.float64)
features = convert_sequences_from_strings_to_floats(
features=features,
column_list=CSV_COLUMNS[:-1],
seq_len=seq_len)
return features, labels | [
"def",
"decode_csv",
"(",
"value_column",
",",
"seq_len",
")",
":",
"columns",
"=",
"tf",
".",
"decode_csv",
"(",
"records",
"=",
"value_column",
",",
"record_defaults",
"=",
"DEFAULTS",
",",
"field_delim",
"=",
"\",\"",
")",
"features",
"=",
"dict",
"(",
"zip",
"(",
"CSV_COLUMNS",
",",
"columns",
")",
")",
"labels",
"=",
"tf",
".",
"cast",
"(",
"x",
"=",
"features",
".",
"pop",
"(",
"LABEL_COLUMN",
")",
",",
"dtype",
"=",
"tf",
".",
"float64",
")",
"features",
"=",
"convert_sequences_from_strings_to_floats",
"(",
"features",
"=",
"features",
",",
"column_list",
"=",
"CSV_COLUMNS",
"[",
":",
"-",
"1",
"]",
",",
"seq_len",
"=",
"seq_len",
")",
"return",
"features",
",",
"labels"
] | [
73,
0
] | [
101,
27
] | python | en | ['en', 'en', 'pt'] | True |
read_dataset | (filename, mode, batch_size, seq_len) | Reads CSV time series dataset using tf.data, doing necessary preprocessing.
Given filename, mode, batch size and other parameters, read CSV dataset using
Dataset API, apply necessary preprocessing, and return an input function to
the Estimator API.
Args:
filename: The file pattern that we want to read into our tf.data dataset.
mode: The estimator ModeKeys. Can be TRAIN or EVAL.
batch_size: Number of examples to read and combine into a single tensor.
seq_len: Number of timesteps for each example.
Returns:
An input function.
| Reads CSV time series dataset using tf.data, doing necessary preprocessing. | def read_dataset(filename, mode, batch_size, seq_len):
"""Reads CSV time series dataset using tf.data, doing necessary preprocessing.
Given filename, mode, batch size and other parameters, read CSV dataset using
Dataset API, apply necessary preprocessing, and return an input function to
the Estimator API.
Args:
filename: The file pattern that we want to read into our tf.data dataset.
mode: The estimator ModeKeys. Can be TRAIN or EVAL.
batch_size: Number of examples to read and combine into a single tensor.
seq_len: Number of timesteps for each example.
Returns:
An input function.
"""
def _input_fn():
"""Wrapper input function to be used by Estimator API to get data tensors.
Returns:
Batched dataset object of dictionary of feature tensors and label tensor.
"""
# Create list of files that match pattern
file_list = tf.gfile.Glob(filename=filename)
# Create dataset from file list
dataset = tf.data.TextLineDataset(filenames=file_list).skip(count=1) # Read text file
# Decode the CSV file into a features dictionary of tensors
dataset = dataset.map(
map_func=lambda x: decode_csv(
value_column=x,
seq_len=seq_len))
# Determine amount of times to repeat file if we are training or evaluating
if mode == tf.estimator.ModeKeys.TRAIN:
num_epochs = None # indefinitely
else:
num_epochs = 1 # end-of-input after this
# Repeat files num_epoch times
dataset = dataset.repeat(count=num_epochs)
# Group the data into batches
dataset = dataset.batch(batch_size=batch_size)
# Determine if we should shuffle based on if we are training or evaluating
if mode == tf.estimator.ModeKeys.TRAIN:
dataset = dataset.shuffle(buffer_size=10 * batch_size)
# Create a iterator, then pull batch of features from the example queue
batched_dataset = dataset.make_one_shot_iterator().get_next()
return batched_dataset
return _input_fn | [
"def",
"read_dataset",
"(",
"filename",
",",
"mode",
",",
"batch_size",
",",
"seq_len",
")",
":",
"def",
"_input_fn",
"(",
")",
":",
"\"\"\"Wrapper input function to be used by Estimator API to get data tensors.\n\n Returns:\n Batched dataset object of dictionary of feature tensors and label tensor.\n \"\"\"",
"# Create list of files that match pattern",
"file_list",
"=",
"tf",
".",
"gfile",
".",
"Glob",
"(",
"filename",
"=",
"filename",
")",
"# Create dataset from file list",
"dataset",
"=",
"tf",
".",
"data",
".",
"TextLineDataset",
"(",
"filenames",
"=",
"file_list",
")",
".",
"skip",
"(",
"count",
"=",
"1",
")",
"# Read text file",
"# Decode the CSV file into a features dictionary of tensors",
"dataset",
"=",
"dataset",
".",
"map",
"(",
"map_func",
"=",
"lambda",
"x",
":",
"decode_csv",
"(",
"value_column",
"=",
"x",
",",
"seq_len",
"=",
"seq_len",
")",
")",
"# Determine amount of times to repeat file if we are training or evaluating",
"if",
"mode",
"==",
"tf",
".",
"estimator",
".",
"ModeKeys",
".",
"TRAIN",
":",
"num_epochs",
"=",
"None",
"# indefinitely",
"else",
":",
"num_epochs",
"=",
"1",
"# end-of-input after this",
"# Repeat files num_epoch times",
"dataset",
"=",
"dataset",
".",
"repeat",
"(",
"count",
"=",
"num_epochs",
")",
"# Group the data into batches",
"dataset",
"=",
"dataset",
".",
"batch",
"(",
"batch_size",
"=",
"batch_size",
")",
"# Determine if we should shuffle based on if we are training or evaluating",
"if",
"mode",
"==",
"tf",
".",
"estimator",
".",
"ModeKeys",
".",
"TRAIN",
":",
"dataset",
"=",
"dataset",
".",
"shuffle",
"(",
"buffer_size",
"=",
"10",
"*",
"batch_size",
")",
"# Create a iterator, then pull batch of features from the example queue",
"batched_dataset",
"=",
"dataset",
".",
"make_one_shot_iterator",
"(",
")",
".",
"get_next",
"(",
")",
"return",
"batched_dataset",
"return",
"_input_fn"
] | [
104,
0
] | [
160,
20
] | python | en | ['en', 'en', 'en'] | True |
create_LSTM_stack | (lstm_hidden_units) | Create LSTM stacked cells.
Given list of LSTM hidden units return `MultiRNNCell`.
Args:
lstm_hidden_units: List of integers for the number of hidden units in each
layer.
Returns:
`MultiRNNCell` object of stacked LSTM layers.
| Create LSTM stacked cells. | def create_LSTM_stack(lstm_hidden_units):
"""Create LSTM stacked cells.
Given list of LSTM hidden units return `MultiRNNCell`.
Args:
lstm_hidden_units: List of integers for the number of hidden units in each
layer.
Returns:
`MultiRNNCell` object of stacked LSTM layers.
"""
# First create a list of LSTM cell objects using our list of lstm hidden
# unit sizes
lstm_cells = [tf.contrib.rnn.BasicLSTMCell(
num_units=units,
forget_bias=1.0,
state_is_tuple=True)
for units in lstm_hidden_units]
# Create a stack of layers of LSTM cells
# Combines list into MultiRNNCell object
stacked_lstm_cells = tf.contrib.rnn.MultiRNNCell(
cells=lstm_cells,
state_is_tuple=True)
return stacked_lstm_cells | [
"def",
"create_LSTM_stack",
"(",
"lstm_hidden_units",
")",
":",
"# First create a list of LSTM cell objects using our list of lstm hidden",
"# unit sizes",
"lstm_cells",
"=",
"[",
"tf",
".",
"contrib",
".",
"rnn",
".",
"BasicLSTMCell",
"(",
"num_units",
"=",
"units",
",",
"forget_bias",
"=",
"1.0",
",",
"state_is_tuple",
"=",
"True",
")",
"for",
"units",
"in",
"lstm_hidden_units",
"]",
"# Create a stack of layers of LSTM cells",
"# Combines list into MultiRNNCell object",
"stacked_lstm_cells",
"=",
"tf",
".",
"contrib",
".",
"rnn",
".",
"MultiRNNCell",
"(",
"cells",
"=",
"lstm_cells",
",",
"state_is_tuple",
"=",
"True",
")",
"return",
"stacked_lstm_cells"
] | [
163,
0
] | [
189,
27
] | python | en | ['en', 'en', 'en'] | True |
sequence_to_one_model | (features, labels, mode, params) | Custom Estimator model function for sequence to one.
Given dictionary of feature tensors, labels tensor, Estimator mode, and
dictionary for parameters, return EstimatorSpec object for custom Estimator.
Args:
features: Dictionary of feature tensors.
labels: Labels tensor or None.
mode: Estimator ModeKeys. Can take values of TRAIN, EVAL, and PREDICT.
params: Dictionary of parameters.
Returns:
EstimatorSpec object.
| Custom Estimator model function for sequence to one. | def sequence_to_one_model(features, labels, mode, params):
"""Custom Estimator model function for sequence to one.
Given dictionary of feature tensors, labels tensor, Estimator mode, and
dictionary for parameters, return EstimatorSpec object for custom Estimator.
Args:
features: Dictionary of feature tensors.
labels: Labels tensor or None.
mode: Estimator ModeKeys. Can take values of TRAIN, EVAL, and PREDICT.
params: Dictionary of parameters.
Returns:
EstimatorSpec object.
"""
# Get input sequence tensor into correct shape
# Stack all of the features into a 3-D tensor
X = tf.stack(values=[features[key] for key in CSV_COLUMNS[:-1]], axis=2)
# Unstack 3-D features tensor into a sequence(list) of 2-D tensors
X_sequence = tf.unstack(value=X, num=params["seq_len"], axis=1)
# 1. Configure the RNN
stacked_lstm_cells = create_LSTM_stack(params["lstm_hidden_units"])
lstm_cell = tf.compat.v1.nn.rnn_cell.LSTMCell(
num_units=params["lstm_hidden_units"], forget_bias=1.0)
outputs, _ = tf.compat.v1.nn.static_rnn(
cell=stacked_lstm_cells, inputs=X_sequence, dtype=tf.float64)
# Slice to keep only the last cell of the RNN
output = outputs[-1]
# Output is result of linear activation of last layer of RNN
predictions = tf.layers.dense(inputs=output, units=1, activation=None)
# 2. Loss function, training/eval ops
if mode == tf.estimator.ModeKeys.TRAIN or mode == tf.estimator.ModeKeys.EVAL:
labels = tf.expand_dims(input=labels, axis=-1)
loss = tf.losses.mean_squared_error(labels=labels, predictions=predictions)
train_op = tf.contrib.layers.optimize_loss(
loss=loss,
global_step=tf.train.get_global_step(),
learning_rate=params["learning_rate"],
optimizer="SGD")
eval_metric_ops = {
"rmse": tf.metrics.root_mean_squared_error(labels=labels, predictions=predictions)
}
else:
loss = None
train_op = None
eval_metric_ops = None
# 3. Create predictions
predictions_dict = {"predicted": predictions}
# 4. Create export outputs
export_outputs = {"predict_export_outputs": tf.estimator.export.PredictOutput(outputs=predictions)}
# 5. Return EstimatorSpec
return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions_dict,
loss=loss,
train_op=train_op,
eval_metric_ops=eval_metric_ops,
export_outputs=export_outputs) | [
"def",
"sequence_to_one_model",
"(",
"features",
",",
"labels",
",",
"mode",
",",
"params",
")",
":",
"# Get input sequence tensor into correct shape",
"# Stack all of the features into a 3-D tensor",
"X",
"=",
"tf",
".",
"stack",
"(",
"values",
"=",
"[",
"features",
"[",
"key",
"]",
"for",
"key",
"in",
"CSV_COLUMNS",
"[",
":",
"-",
"1",
"]",
"]",
",",
"axis",
"=",
"2",
")",
"# Unstack 3-D features tensor into a sequence(list) of 2-D tensors",
"X_sequence",
"=",
"tf",
".",
"unstack",
"(",
"value",
"=",
"X",
",",
"num",
"=",
"params",
"[",
"\"seq_len\"",
"]",
",",
"axis",
"=",
"1",
")",
"# 1. Configure the RNN",
"stacked_lstm_cells",
"=",
"create_LSTM_stack",
"(",
"params",
"[",
"\"lstm_hidden_units\"",
"]",
")",
"lstm_cell",
"=",
"tf",
".",
"compat",
".",
"v1",
".",
"nn",
".",
"rnn_cell",
".",
"LSTMCell",
"(",
"num_units",
"=",
"params",
"[",
"\"lstm_hidden_units\"",
"]",
",",
"forget_bias",
"=",
"1.0",
")",
"outputs",
",",
"_",
"=",
"tf",
".",
"compat",
".",
"v1",
".",
"nn",
".",
"static_rnn",
"(",
"cell",
"=",
"stacked_lstm_cells",
",",
"inputs",
"=",
"X_sequence",
",",
"dtype",
"=",
"tf",
".",
"float64",
")",
"# Slice to keep only the last cell of the RNN",
"output",
"=",
"outputs",
"[",
"-",
"1",
"]",
"# Output is result of linear activation of last layer of RNN",
"predictions",
"=",
"tf",
".",
"layers",
".",
"dense",
"(",
"inputs",
"=",
"output",
",",
"units",
"=",
"1",
",",
"activation",
"=",
"None",
")",
"# 2. Loss function, training/eval ops",
"if",
"mode",
"==",
"tf",
".",
"estimator",
".",
"ModeKeys",
".",
"TRAIN",
"or",
"mode",
"==",
"tf",
".",
"estimator",
".",
"ModeKeys",
".",
"EVAL",
":",
"labels",
"=",
"tf",
".",
"expand_dims",
"(",
"input",
"=",
"labels",
",",
"axis",
"=",
"-",
"1",
")",
"loss",
"=",
"tf",
".",
"losses",
".",
"mean_squared_error",
"(",
"labels",
"=",
"labels",
",",
"predictions",
"=",
"predictions",
")",
"train_op",
"=",
"tf",
".",
"contrib",
".",
"layers",
".",
"optimize_loss",
"(",
"loss",
"=",
"loss",
",",
"global_step",
"=",
"tf",
".",
"train",
".",
"get_global_step",
"(",
")",
",",
"learning_rate",
"=",
"params",
"[",
"\"learning_rate\"",
"]",
",",
"optimizer",
"=",
"\"SGD\"",
")",
"eval_metric_ops",
"=",
"{",
"\"rmse\"",
":",
"tf",
".",
"metrics",
".",
"root_mean_squared_error",
"(",
"labels",
"=",
"labels",
",",
"predictions",
"=",
"predictions",
")",
"}",
"else",
":",
"loss",
"=",
"None",
"train_op",
"=",
"None",
"eval_metric_ops",
"=",
"None",
"# 3. Create predictions",
"predictions_dict",
"=",
"{",
"\"predicted\"",
":",
"predictions",
"}",
"# 4. Create export outputs",
"export_outputs",
"=",
"{",
"\"predict_export_outputs\"",
":",
"tf",
".",
"estimator",
".",
"export",
".",
"PredictOutput",
"(",
"outputs",
"=",
"predictions",
")",
"}",
"# 5. Return EstimatorSpec",
"return",
"tf",
".",
"estimator",
".",
"EstimatorSpec",
"(",
"mode",
"=",
"mode",
",",
"predictions",
"=",
"predictions_dict",
",",
"loss",
"=",
"loss",
",",
"train_op",
"=",
"train_op",
",",
"eval_metric_ops",
"=",
"eval_metric_ops",
",",
"export_outputs",
"=",
"export_outputs",
")"
] | [
193,
0
] | [
261,
38
] | python | en | ['en', 'en', 'en'] | True |
fix_shape_and_type_for_serving | (placeholder) | Fixes the shape and type of serving input strings.
Given placeholder tensor, return parsed and processed feature tensor.
Args:
placeholder: Placeholder tensor holding raw data from serving input
function.
Returns:
Parsed and processed feature tensor.
| Fixes the shape and type of serving input strings. | def fix_shape_and_type_for_serving(placeholder):
"""Fixes the shape and type of serving input strings.
Given placeholder tensor, return parsed and processed feature tensor.
Args:
placeholder: Placeholder tensor holding raw data from serving input
function.
Returns:
Parsed and processed feature tensor.
"""
cur_batch_size = tf.shape(input=placeholder, out_type=tf.int64)[0]
# String split each string in batch and output values from the resulting
# SparseTensors
# shape = (batch_size, seq_len)
split_string = tf.stack(values=tf.map_fn(
fn=lambda x: tf.string_split(
source=[placeholder[x]], delimiter=";").values,
elems=tf.range(
start=0, limit=cur_batch_size, dtype=tf.int64),
dtype=tf.string), axis=0)
# Convert each string in the split tensor to float
# shape = (batch_size, seq_len)
feature_tensor = tf.string_to_number(
string_tensor=split_string, out_type=tf.float64)
return feature_tensor | [
"def",
"fix_shape_and_type_for_serving",
"(",
"placeholder",
")",
":",
"cur_batch_size",
"=",
"tf",
".",
"shape",
"(",
"input",
"=",
"placeholder",
",",
"out_type",
"=",
"tf",
".",
"int64",
")",
"[",
"0",
"]",
"# String split each string in batch and output values from the resulting",
"# SparseTensors",
"# shape = (batch_size, seq_len)",
"split_string",
"=",
"tf",
".",
"stack",
"(",
"values",
"=",
"tf",
".",
"map_fn",
"(",
"fn",
"=",
"lambda",
"x",
":",
"tf",
".",
"string_split",
"(",
"source",
"=",
"[",
"placeholder",
"[",
"x",
"]",
"]",
",",
"delimiter",
"=",
"\";\"",
")",
".",
"values",
",",
"elems",
"=",
"tf",
".",
"range",
"(",
"start",
"=",
"0",
",",
"limit",
"=",
"cur_batch_size",
",",
"dtype",
"=",
"tf",
".",
"int64",
")",
",",
"dtype",
"=",
"tf",
".",
"string",
")",
",",
"axis",
"=",
"0",
")",
"# Convert each string in the split tensor to float",
"# shape = (batch_size, seq_len)",
"feature_tensor",
"=",
"tf",
".",
"string_to_number",
"(",
"string_tensor",
"=",
"split_string",
",",
"out_type",
"=",
"tf",
".",
"float64",
")",
"return",
"feature_tensor"
] | [
265,
0
] | [
294,
25
] | python | en | ['en', 'en', 'en'] | True |
get_shape_and_set_modified_shape_2D | (tensor, additional_dimension_sizes) | Fixes the shape and type of serving input strings.
Given feature tensor and additional dimension size, sequence length,
fixes dynamic shape ambiguity of last dimension so that we will be able to
use it in our DNN (since tf.layers.dense require the last dimension to be
known).
Args:
tensor: tf.float64 vector feature tensor.
additional_dimension_sizes: Additional dimension size, namely sequence
length.
Returns:
Feature tensor with set static shape for sequence length.
| Fixes the shape and type of serving input strings. | def get_shape_and_set_modified_shape_2D(tensor, additional_dimension_sizes):
"""Fixes the shape and type of serving input strings.
Given feature tensor and additional dimension size, sequence length,
fixes dynamic shape ambiguity of last dimension so that we will be able to
use it in our DNN (since tf.layers.dense require the last dimension to be
known).
Args:
tensor: tf.float64 vector feature tensor.
additional_dimension_sizes: Additional dimension size, namely sequence
length.
Returns:
Feature tensor with set static shape for sequence length.
"""
# Get static shape for tensor and convert it to list
shape = tensor.get_shape().as_list()
# Set outer shape to additional_dimension_sizes[0] since know this is the
# correct size
shape[1] = additional_dimension_sizes[0]
# Set the shape of tensor to our modified shape
# shape = (batch_size, additional_dimension_sizes[0])
tensor.set_shape(shape=shape)
return tensor | [
"def",
"get_shape_and_set_modified_shape_2D",
"(",
"tensor",
",",
"additional_dimension_sizes",
")",
":",
"# Get static shape for tensor and convert it to list",
"shape",
"=",
"tensor",
".",
"get_shape",
"(",
")",
".",
"as_list",
"(",
")",
"# Set outer shape to additional_dimension_sizes[0] since know this is the",
"# correct size",
"shape",
"[",
"1",
"]",
"=",
"additional_dimension_sizes",
"[",
"0",
"]",
"# Set the shape of tensor to our modified shape",
"# shape = (batch_size, additional_dimension_sizes[0])",
"tensor",
".",
"set_shape",
"(",
"shape",
"=",
"shape",
")",
"return",
"tensor"
] | [
297,
0
] | [
322,
17
] | python | en | ['en', 'en', 'en'] | True |
serving_input_fn | (seq_len) | Serving input function.
Given the sequence length, return ServingInputReceiver object.
Args:
seq_len: Number of timesteps in sequence.
Returns:
ServingInputReceiver object containing features and receiver tensors.
| Serving input function. | def serving_input_fn(seq_len):
"""Serving input function.
Given the sequence length, return ServingInputReceiver object.
Args:
seq_len: Number of timesteps in sequence.
Returns:
ServingInputReceiver object containing features and receiver tensors.
"""
# Create placeholders to accept the data sent to the model at serving time
# All features come in as a batch of strings, shape = (batch_size,),
# this was so because of passing the arrays to online ml-engine prediction
feature_placeholders = {
feature: tf.placeholder(
dtype=tf.string, shape=[None])
for feature in CSV_COLUMNS[:-1]
}
# Create feature tensors
features = {key: fix_shape_and_type_for_serving(placeholder=tensor)
for key, tensor in feature_placeholders.items()}
# Fix dynamic shape ambiguity of feature tensors for our dense layers
features = {key: get_shape_and_set_modified_shape_2D(
tensor=tensor, additional_dimension_sizes=[seq_len])
for key, tensor in features.items()}
return tf.estimator.export.ServingInputReceiver(
features=features, receiver_tensors=feature_placeholders) | [
"def",
"serving_input_fn",
"(",
"seq_len",
")",
":",
"# Create placeholders to accept the data sent to the model at serving time",
"# All features come in as a batch of strings, shape = (batch_size,),",
"# this was so because of passing the arrays to online ml-engine prediction",
"feature_placeholders",
"=",
"{",
"feature",
":",
"tf",
".",
"placeholder",
"(",
"dtype",
"=",
"tf",
".",
"string",
",",
"shape",
"=",
"[",
"None",
"]",
")",
"for",
"feature",
"in",
"CSV_COLUMNS",
"[",
":",
"-",
"1",
"]",
"}",
"# Create feature tensors",
"features",
"=",
"{",
"key",
":",
"fix_shape_and_type_for_serving",
"(",
"placeholder",
"=",
"tensor",
")",
"for",
"key",
",",
"tensor",
"in",
"feature_placeholders",
".",
"items",
"(",
")",
"}",
"# Fix dynamic shape ambiguity of feature tensors for our dense layers",
"features",
"=",
"{",
"key",
":",
"get_shape_and_set_modified_shape_2D",
"(",
"tensor",
"=",
"tensor",
",",
"additional_dimension_sizes",
"=",
"[",
"seq_len",
"]",
")",
"for",
"key",
",",
"tensor",
"in",
"features",
".",
"items",
"(",
")",
"}",
"return",
"tf",
".",
"estimator",
".",
"export",
".",
"ServingInputReceiver",
"(",
"features",
"=",
"features",
",",
"receiver_tensors",
"=",
"feature_placeholders",
")"
] | [
325,
0
] | [
355,
69
] | python | en | ['en', 'en', 'en'] | True |
connection_from_url | (url, **kw) |
Given a url, return an :class:`.ConnectionPool` instance of its host.
This is a shortcut for not having to parse out the scheme, host, and port
of the url before creating an :class:`.ConnectionPool` instance.
:param url:
Absolute URL string that must include the scheme. Port is optional.
:param \\**kw:
Passes additional parameters to the constructor of the appropriate
:class:`.ConnectionPool`. Useful for specifying things like
timeout, maxsize, headers, etc.
Example::
>>> conn = connection_from_url('http://google.com/')
>>> r = conn.request('GET', '/')
|
Given a url, return an :class:`.ConnectionPool` instance of its host. | def connection_from_url(url, **kw):
"""
Given a url, return an :class:`.ConnectionPool` instance of its host.
This is a shortcut for not having to parse out the scheme, host, and port
of the url before creating an :class:`.ConnectionPool` instance.
:param url:
Absolute URL string that must include the scheme. Port is optional.
:param \\**kw:
Passes additional parameters to the constructor of the appropriate
:class:`.ConnectionPool`. Useful for specifying things like
timeout, maxsize, headers, etc.
Example::
>>> conn = connection_from_url('http://google.com/')
>>> r = conn.request('GET', '/')
"""
scheme, host, port = get_host(url)
port = port or port_by_scheme.get(scheme, 80)
if scheme == "https":
return HTTPSConnectionPool(host, port=port, **kw)
else:
return HTTPConnectionPool(host, port=port, **kw) | [
"def",
"connection_from_url",
"(",
"url",
",",
"*",
"*",
"kw",
")",
":",
"scheme",
",",
"host",
",",
"port",
"=",
"get_host",
"(",
"url",
")",
"port",
"=",
"port",
"or",
"port_by_scheme",
".",
"get",
"(",
"scheme",
",",
"80",
")",
"if",
"scheme",
"==",
"\"https\"",
":",
"return",
"HTTPSConnectionPool",
"(",
"host",
",",
"port",
"=",
"port",
",",
"*",
"*",
"kw",
")",
"else",
":",
"return",
"HTTPConnectionPool",
"(",
"host",
",",
"port",
"=",
"port",
",",
"*",
"*",
"kw",
")"
] | [
1023,
0
] | [
1048,
56
] | python | en | ['en', 'error', 'th'] | False |
_normalize_host | (host, scheme) |
Normalize hosts for comparisons and use with sockets.
|
Normalize hosts for comparisons and use with sockets.
| def _normalize_host(host, scheme):
"""
Normalize hosts for comparisons and use with sockets.
"""
host = normalize_host(host, scheme)
# httplib doesn't like it when we include brackets in IPv6 addresses
# Specifically, if we include brackets but also pass the port then
# httplib crazily doubles up the square brackets on the Host header.
# Instead, we need to make sure we never pass ``None`` as the port.
# However, for backward compatibility reasons we can't actually
# *assert* that. See http://bugs.python.org/issue28539
if host.startswith("[") and host.endswith("]"):
host = host[1:-1]
return host | [
"def",
"_normalize_host",
"(",
"host",
",",
"scheme",
")",
":",
"host",
"=",
"normalize_host",
"(",
"host",
",",
"scheme",
")",
"# httplib doesn't like it when we include brackets in IPv6 addresses",
"# Specifically, if we include brackets but also pass the port then",
"# httplib crazily doubles up the square brackets on the Host header.",
"# Instead, we need to make sure we never pass ``None`` as the port.",
"# However, for backward compatibility reasons we can't actually",
"# *assert* that. See http://bugs.python.org/issue28539",
"if",
"host",
".",
"startswith",
"(",
"\"[\"",
")",
"and",
"host",
".",
"endswith",
"(",
"\"]\"",
")",
":",
"host",
"=",
"host",
"[",
"1",
":",
"-",
"1",
"]",
"return",
"host"
] | [
1051,
0
] | [
1066,
15
] | python | en | ['en', 'error', 'th'] | False |
ConnectionPool.close | (self) |
Close all pooled connections and disable the pool.
|
Close all pooled connections and disable the pool.
| def close(self):
"""
Close all pooled connections and disable the pool.
"""
pass | [
"def",
"close",
"(",
"self",
")",
":",
"pass"
] | [
92,
4
] | [
96,
12
] | python | en | ['en', 'error', 'th'] | False |
HTTPConnectionPool._new_conn | (self) |
Return a fresh :class:`HTTPConnection`.
|
Return a fresh :class:`HTTPConnection`.
| def _new_conn(self):
"""
Return a fresh :class:`HTTPConnection`.
"""
self.num_connections += 1
log.debug(
"Starting new HTTP connection (%d): %s:%s",
self.num_connections,
self.host,
self.port or "80",
)
conn = self.ConnectionCls(
host=self.host,
port=self.port,
timeout=self.timeout.connect_timeout,
strict=self.strict,
**self.conn_kw
)
return conn | [
"def",
"_new_conn",
"(",
"self",
")",
":",
"self",
".",
"num_connections",
"+=",
"1",
"log",
".",
"debug",
"(",
"\"Starting new HTTP connection (%d): %s:%s\"",
",",
"self",
".",
"num_connections",
",",
"self",
".",
"host",
",",
"self",
".",
"port",
"or",
"\"80\"",
",",
")",
"conn",
"=",
"self",
".",
"ConnectionCls",
"(",
"host",
"=",
"self",
".",
"host",
",",
"port",
"=",
"self",
".",
"port",
",",
"timeout",
"=",
"self",
".",
"timeout",
".",
"connect_timeout",
",",
"strict",
"=",
"self",
".",
"strict",
",",
"*",
"*",
"self",
".",
"conn_kw",
")",
"return",
"conn"
] | [
221,
4
] | [
240,
19
] | python | en | ['en', 'error', 'th'] | False |
HTTPConnectionPool._get_conn | (self, timeout=None) |
Get a connection. Will return a pooled connection if one is available.
If no connections are available and :prop:`.block` is ``False``, then a
fresh connection is returned.
:param timeout:
Seconds to wait before giving up and raising
:class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
:prop:`.block` is ``True``.
|
Get a connection. Will return a pooled connection if one is available. | def _get_conn(self, timeout=None):
"""
Get a connection. Will return a pooled connection if one is available.
If no connections are available and :prop:`.block` is ``False``, then a
fresh connection is returned.
:param timeout:
Seconds to wait before giving up and raising
:class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
:prop:`.block` is ``True``.
"""
conn = None
try:
conn = self.pool.get(block=self.block, timeout=timeout)
except AttributeError: # self.pool is None
raise ClosedPoolError(self, "Pool is closed.")
except queue.Empty:
if self.block:
raise EmptyPoolError(
self,
"Pool reached maximum size and no more connections are allowed.",
)
pass # Oh well, we'll create a new connection then
# If this is a persistent connection, check if it got disconnected
if conn and is_connection_dropped(conn):
log.debug("Resetting dropped connection: %s", self.host)
conn.close()
if getattr(conn, "auto_open", 1) == 0:
# This is a proxied connection that has been mutated by
# http.client._tunnel() and cannot be reused (since it would
# attempt to bypass the proxy)
conn = None
return conn or self._new_conn() | [
"def",
"_get_conn",
"(",
"self",
",",
"timeout",
"=",
"None",
")",
":",
"conn",
"=",
"None",
"try",
":",
"conn",
"=",
"self",
".",
"pool",
".",
"get",
"(",
"block",
"=",
"self",
".",
"block",
",",
"timeout",
"=",
"timeout",
")",
"except",
"AttributeError",
":",
"# self.pool is None",
"raise",
"ClosedPoolError",
"(",
"self",
",",
"\"Pool is closed.\"",
")",
"except",
"queue",
".",
"Empty",
":",
"if",
"self",
".",
"block",
":",
"raise",
"EmptyPoolError",
"(",
"self",
",",
"\"Pool reached maximum size and no more connections are allowed.\"",
",",
")",
"pass",
"# Oh well, we'll create a new connection then",
"# If this is a persistent connection, check if it got disconnected",
"if",
"conn",
"and",
"is_connection_dropped",
"(",
"conn",
")",
":",
"log",
".",
"debug",
"(",
"\"Resetting dropped connection: %s\"",
",",
"self",
".",
"host",
")",
"conn",
".",
"close",
"(",
")",
"if",
"getattr",
"(",
"conn",
",",
"\"auto_open\"",
",",
"1",
")",
"==",
"0",
":",
"# This is a proxied connection that has been mutated by",
"# http.client._tunnel() and cannot be reused (since it would",
"# attempt to bypass the proxy)",
"conn",
"=",
"None",
"return",
"conn",
"or",
"self",
".",
"_new_conn",
"(",
")"
] | [
242,
4
] | [
279,
39
] | python | en | ['en', 'error', 'th'] | False |
HTTPConnectionPool._put_conn | (self, conn) |
Put a connection back into the pool.
:param conn:
Connection object for the current host and port as returned by
:meth:`._new_conn` or :meth:`._get_conn`.
If the pool is already full, the connection is closed and discarded
because we exceeded maxsize. If connections are discarded frequently,
then maxsize should be increased.
If the pool is closed, then the connection will be closed and discarded.
|
Put a connection back into the pool. | def _put_conn(self, conn):
"""
Put a connection back into the pool.
:param conn:
Connection object for the current host and port as returned by
:meth:`._new_conn` or :meth:`._get_conn`.
If the pool is already full, the connection is closed and discarded
because we exceeded maxsize. If connections are discarded frequently,
then maxsize should be increased.
If the pool is closed, then the connection will be closed and discarded.
"""
try:
self.pool.put(conn, block=False)
return # Everything is dandy, done.
except AttributeError:
# self.pool is None.
pass
except queue.Full:
# This should never happen if self.block == True
log.warning("Connection pool is full, discarding connection: %s", self.host)
# Connection never got put back into the pool, close it.
if conn:
conn.close() | [
"def",
"_put_conn",
"(",
"self",
",",
"conn",
")",
":",
"try",
":",
"self",
".",
"pool",
".",
"put",
"(",
"conn",
",",
"block",
"=",
"False",
")",
"return",
"# Everything is dandy, done.",
"except",
"AttributeError",
":",
"# self.pool is None.",
"pass",
"except",
"queue",
".",
"Full",
":",
"# This should never happen if self.block == True",
"log",
".",
"warning",
"(",
"\"Connection pool is full, discarding connection: %s\"",
",",
"self",
".",
"host",
")",
"# Connection never got put back into the pool, close it.",
"if",
"conn",
":",
"conn",
".",
"close",
"(",
")"
] | [
281,
4
] | [
307,
24
] | python | en | ['en', 'error', 'th'] | False |
HTTPConnectionPool._validate_conn | (self, conn) |
Called right before a request is made, after the socket is created.
|
Called right before a request is made, after the socket is created.
| def _validate_conn(self, conn):
"""
Called right before a request is made, after the socket is created.
"""
pass | [
"def",
"_validate_conn",
"(",
"self",
",",
"conn",
")",
":",
"pass"
] | [
309,
4
] | [
313,
12
] | python | en | ['en', 'error', 'th'] | False |
HTTPConnectionPool._get_timeout | (self, timeout) | Helper that always returns a :class:`urllib3.util.Timeout` | Helper that always returns a :class:`urllib3.util.Timeout` | def _get_timeout(self, timeout):
"""Helper that always returns a :class:`urllib3.util.Timeout`"""
if timeout is _Default:
return self.timeout.clone()
if isinstance(timeout, Timeout):
return timeout.clone()
else:
# User passed us an int/float. This is for backwards compatibility,
# can be removed later
return Timeout.from_float(timeout) | [
"def",
"_get_timeout",
"(",
"self",
",",
"timeout",
")",
":",
"if",
"timeout",
"is",
"_Default",
":",
"return",
"self",
".",
"timeout",
".",
"clone",
"(",
")",
"if",
"isinstance",
"(",
"timeout",
",",
"Timeout",
")",
":",
"return",
"timeout",
".",
"clone",
"(",
")",
"else",
":",
"# User passed us an int/float. This is for backwards compatibility,",
"# can be removed later",
"return",
"Timeout",
".",
"from_float",
"(",
"timeout",
")"
] | [
319,
4
] | [
329,
46
] | python | en | ['en', 'lb', 'en'] | True |
HTTPConnectionPool._raise_timeout | (self, err, url, timeout_value) | Is the error actually a timeout? Will raise a ReadTimeout or pass | Is the error actually a timeout? Will raise a ReadTimeout or pass | def _raise_timeout(self, err, url, timeout_value):
"""Is the error actually a timeout? Will raise a ReadTimeout or pass"""
if isinstance(err, SocketTimeout):
raise ReadTimeoutError(
self, url, "Read timed out. (read timeout=%s)" % timeout_value
)
# See the above comment about EAGAIN in Python 3. In Python 2 we have
# to specifically catch it and throw the timeout error
if hasattr(err, "errno") and err.errno in _blocking_errnos:
raise ReadTimeoutError(
self, url, "Read timed out. (read timeout=%s)" % timeout_value
)
# Catch possible read timeouts thrown as SSL errors. If not the
# case, rethrow the original. We need to do this because of:
# http://bugs.python.org/issue10272
if "timed out" in str(err) or "did not complete (read)" in str(
err
): # Python < 2.7.4
raise ReadTimeoutError(
self, url, "Read timed out. (read timeout=%s)" % timeout_value
) | [
"def",
"_raise_timeout",
"(",
"self",
",",
"err",
",",
"url",
",",
"timeout_value",
")",
":",
"if",
"isinstance",
"(",
"err",
",",
"SocketTimeout",
")",
":",
"raise",
"ReadTimeoutError",
"(",
"self",
",",
"url",
",",
"\"Read timed out. (read timeout=%s)\"",
"%",
"timeout_value",
")",
"# See the above comment about EAGAIN in Python 3. In Python 2 we have",
"# to specifically catch it and throw the timeout error",
"if",
"hasattr",
"(",
"err",
",",
"\"errno\"",
")",
"and",
"err",
".",
"errno",
"in",
"_blocking_errnos",
":",
"raise",
"ReadTimeoutError",
"(",
"self",
",",
"url",
",",
"\"Read timed out. (read timeout=%s)\"",
"%",
"timeout_value",
")",
"# Catch possible read timeouts thrown as SSL errors. If not the",
"# case, rethrow the original. We need to do this because of:",
"# http://bugs.python.org/issue10272",
"if",
"\"timed out\"",
"in",
"str",
"(",
"err",
")",
"or",
"\"did not complete (read)\"",
"in",
"str",
"(",
"err",
")",
":",
"# Python < 2.7.4",
"raise",
"ReadTimeoutError",
"(",
"self",
",",
"url",
",",
"\"Read timed out. (read timeout=%s)\"",
"%",
"timeout_value",
")"
] | [
331,
4
] | [
354,
13
] | python | en | ['en', 'en', 'en'] | True |
HTTPConnectionPool._make_request | (
self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
) |
Perform a request on a given urllib connection object taken from our
pool.
:param conn:
a connection from one of our connection pools
:param timeout:
Socket timeout in seconds for the request. This can be a
float or integer, which will set the same timeout value for
the socket connect and the socket read, or an instance of
:class:`urllib3.util.Timeout`, which gives you more fine-grained
control over your timeouts.
|
Perform a request on a given urllib connection object taken from our
pool. | def _make_request(
self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
):
"""
Perform a request on a given urllib connection object taken from our
pool.
:param conn:
a connection from one of our connection pools
:param timeout:
Socket timeout in seconds for the request. This can be a
float or integer, which will set the same timeout value for
the socket connect and the socket read, or an instance of
:class:`urllib3.util.Timeout`, which gives you more fine-grained
control over your timeouts.
"""
self.num_requests += 1
timeout_obj = self._get_timeout(timeout)
timeout_obj.start_connect()
conn.timeout = timeout_obj.connect_timeout
# Trigger any extra validation we need to do.
try:
self._validate_conn(conn)
except (SocketTimeout, BaseSSLError) as e:
# Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
raise
# conn.request() calls http.client.*.request, not the method in
# urllib3.request. It also calls makefile (recv) on the socket.
try:
if chunked:
conn.request_chunked(method, url, **httplib_request_kw)
else:
conn.request(method, url, **httplib_request_kw)
# We are swallowing BrokenPipeError (errno.EPIPE) since the server is
# legitimately able to close the connection after sending a valid response.
# With this behaviour, the received response is still readable.
except BrokenPipeError:
# Python 3
pass
except IOError as e:
# Python 2 and macOS/Linux
# EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS
# https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
if e.errno not in {
errno.EPIPE,
errno.ESHUTDOWN,
errno.EPROTOTYPE,
}:
raise
# Reset the timeout for the recv() on the socket
read_timeout = timeout_obj.read_timeout
# App Engine doesn't have a sock attr
if getattr(conn, "sock", None):
# In Python 3 socket.py will catch EAGAIN and return None when you
# try and read into the file pointer created by http.client, which
# instead raises a BadStatusLine exception. Instead of catching
# the exception and assuming all BadStatusLine exceptions are read
# timeouts, check for a zero timeout before making the request.
if read_timeout == 0:
raise ReadTimeoutError(
self, url, "Read timed out. (read timeout=%s)" % read_timeout
)
if read_timeout is Timeout.DEFAULT_TIMEOUT:
conn.sock.settimeout(socket.getdefaulttimeout())
else: # None or a value
conn.sock.settimeout(read_timeout)
# Receive the response from the server
try:
try:
# Python 2.7, use buffering of HTTP responses
httplib_response = conn.getresponse(buffering=True)
except TypeError:
# Python 3
try:
httplib_response = conn.getresponse()
except BaseException as e:
# Remove the TypeError from the exception chain in
# Python 3 (including for exceptions like SystemExit).
# Otherwise it looks like a bug in the code.
six.raise_from(e, None)
except (SocketTimeout, BaseSSLError, SocketError) as e:
self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
raise
# AppEngine doesn't have a version attr.
http_version = getattr(conn, "_http_vsn_str", "HTTP/?")
log.debug(
'%s://%s:%s "%s %s %s" %s %s',
self.scheme,
self.host,
self.port,
method,
url,
http_version,
httplib_response.status,
httplib_response.length,
)
try:
assert_header_parsing(httplib_response.msg)
except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3
log.warning(
"Failed to parse headers (url=%s): %s",
self._absolute_url(url),
hpe,
exc_info=True,
)
return httplib_response | [
"def",
"_make_request",
"(",
"self",
",",
"conn",
",",
"method",
",",
"url",
",",
"timeout",
"=",
"_Default",
",",
"chunked",
"=",
"False",
",",
"*",
"*",
"httplib_request_kw",
")",
":",
"self",
".",
"num_requests",
"+=",
"1",
"timeout_obj",
"=",
"self",
".",
"_get_timeout",
"(",
"timeout",
")",
"timeout_obj",
".",
"start_connect",
"(",
")",
"conn",
".",
"timeout",
"=",
"timeout_obj",
".",
"connect_timeout",
"# Trigger any extra validation we need to do.",
"try",
":",
"self",
".",
"_validate_conn",
"(",
"conn",
")",
"except",
"(",
"SocketTimeout",
",",
"BaseSSLError",
")",
"as",
"e",
":",
"# Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.",
"self",
".",
"_raise_timeout",
"(",
"err",
"=",
"e",
",",
"url",
"=",
"url",
",",
"timeout_value",
"=",
"conn",
".",
"timeout",
")",
"raise",
"# conn.request() calls http.client.*.request, not the method in",
"# urllib3.request. It also calls makefile (recv) on the socket.",
"try",
":",
"if",
"chunked",
":",
"conn",
".",
"request_chunked",
"(",
"method",
",",
"url",
",",
"*",
"*",
"httplib_request_kw",
")",
"else",
":",
"conn",
".",
"request",
"(",
"method",
",",
"url",
",",
"*",
"*",
"httplib_request_kw",
")",
"# We are swallowing BrokenPipeError (errno.EPIPE) since the server is",
"# legitimately able to close the connection after sending a valid response.",
"# With this behaviour, the received response is still readable.",
"except",
"BrokenPipeError",
":",
"# Python 3",
"pass",
"except",
"IOError",
"as",
"e",
":",
"# Python 2 and macOS/Linux",
"# EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS",
"# https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/",
"if",
"e",
".",
"errno",
"not",
"in",
"{",
"errno",
".",
"EPIPE",
",",
"errno",
".",
"ESHUTDOWN",
",",
"errno",
".",
"EPROTOTYPE",
",",
"}",
":",
"raise",
"# Reset the timeout for the recv() on the socket",
"read_timeout",
"=",
"timeout_obj",
".",
"read_timeout",
"# App Engine doesn't have a sock attr",
"if",
"getattr",
"(",
"conn",
",",
"\"sock\"",
",",
"None",
")",
":",
"# In Python 3 socket.py will catch EAGAIN and return None when you",
"# try and read into the file pointer created by http.client, which",
"# instead raises a BadStatusLine exception. Instead of catching",
"# the exception and assuming all BadStatusLine exceptions are read",
"# timeouts, check for a zero timeout before making the request.",
"if",
"read_timeout",
"==",
"0",
":",
"raise",
"ReadTimeoutError",
"(",
"self",
",",
"url",
",",
"\"Read timed out. (read timeout=%s)\"",
"%",
"read_timeout",
")",
"if",
"read_timeout",
"is",
"Timeout",
".",
"DEFAULT_TIMEOUT",
":",
"conn",
".",
"sock",
".",
"settimeout",
"(",
"socket",
".",
"getdefaulttimeout",
"(",
")",
")",
"else",
":",
"# None or a value",
"conn",
".",
"sock",
".",
"settimeout",
"(",
"read_timeout",
")",
"# Receive the response from the server",
"try",
":",
"try",
":",
"# Python 2.7, use buffering of HTTP responses",
"httplib_response",
"=",
"conn",
".",
"getresponse",
"(",
"buffering",
"=",
"True",
")",
"except",
"TypeError",
":",
"# Python 3",
"try",
":",
"httplib_response",
"=",
"conn",
".",
"getresponse",
"(",
")",
"except",
"BaseException",
"as",
"e",
":",
"# Remove the TypeError from the exception chain in",
"# Python 3 (including for exceptions like SystemExit).",
"# Otherwise it looks like a bug in the code.",
"six",
".",
"raise_from",
"(",
"e",
",",
"None",
")",
"except",
"(",
"SocketTimeout",
",",
"BaseSSLError",
",",
"SocketError",
")",
"as",
"e",
":",
"self",
".",
"_raise_timeout",
"(",
"err",
"=",
"e",
",",
"url",
"=",
"url",
",",
"timeout_value",
"=",
"read_timeout",
")",
"raise",
"# AppEngine doesn't have a version attr.",
"http_version",
"=",
"getattr",
"(",
"conn",
",",
"\"_http_vsn_str\"",
",",
"\"HTTP/?\"",
")",
"log",
".",
"debug",
"(",
"'%s://%s:%s \"%s %s %s\" %s %s'",
",",
"self",
".",
"scheme",
",",
"self",
".",
"host",
",",
"self",
".",
"port",
",",
"method",
",",
"url",
",",
"http_version",
",",
"httplib_response",
".",
"status",
",",
"httplib_response",
".",
"length",
",",
")",
"try",
":",
"assert_header_parsing",
"(",
"httplib_response",
".",
"msg",
")",
"except",
"(",
"HeaderParsingError",
",",
"TypeError",
")",
"as",
"hpe",
":",
"# Platform-specific: Python 3",
"log",
".",
"warning",
"(",
"\"Failed to parse headers (url=%s): %s\"",
",",
"self",
".",
"_absolute_url",
"(",
"url",
")",
",",
"hpe",
",",
"exc_info",
"=",
"True",
",",
")",
"return",
"httplib_response"
] | [
356,
4
] | [
473,
31
] | python | en | ['en', 'error', 'th'] | False |
HTTPConnectionPool.close | (self) |
Close all pooled connections and disable the pool.
|
Close all pooled connections and disable the pool.
| def close(self):
"""
Close all pooled connections and disable the pool.
"""
if self.pool is None:
return
# Disable access to the pool
old_pool, self.pool = self.pool, None
try:
while True:
conn = old_pool.get(block=False)
if conn:
conn.close()
except queue.Empty:
pass | [
"def",
"close",
"(",
"self",
")",
":",
"if",
"self",
".",
"pool",
"is",
"None",
":",
"return",
"# Disable access to the pool",
"old_pool",
",",
"self",
".",
"pool",
"=",
"self",
".",
"pool",
",",
"None",
"try",
":",
"while",
"True",
":",
"conn",
"=",
"old_pool",
".",
"get",
"(",
"block",
"=",
"False",
")",
"if",
"conn",
":",
"conn",
".",
"close",
"(",
")",
"except",
"queue",
".",
"Empty",
":",
"pass"
] | [
478,
4
] | [
494,
16
] | python | en | ['en', 'error', 'th'] | False |
HTTPConnectionPool.is_same_host | (self, url) |
Check if the given ``url`` is a member of the same host as this
connection pool.
|
Check if the given ``url`` is a member of the same host as this
connection pool.
| def is_same_host(self, url):
"""
Check if the given ``url`` is a member of the same host as this
connection pool.
"""
if url.startswith("/"):
return True
# TODO: Add optional support for socket.gethostbyname checking.
scheme, host, port = get_host(url)
if host is not None:
host = _normalize_host(host, scheme=scheme)
# Use explicit default port for comparison when none is given
if self.port and not port:
port = port_by_scheme.get(scheme)
elif not self.port and port == port_by_scheme.get(scheme):
port = None
return (scheme, host, port) == (self.scheme, self.host, self.port) | [
"def",
"is_same_host",
"(",
"self",
",",
"url",
")",
":",
"if",
"url",
".",
"startswith",
"(",
"\"/\"",
")",
":",
"return",
"True",
"# TODO: Add optional support for socket.gethostbyname checking.",
"scheme",
",",
"host",
",",
"port",
"=",
"get_host",
"(",
"url",
")",
"if",
"host",
"is",
"not",
"None",
":",
"host",
"=",
"_normalize_host",
"(",
"host",
",",
"scheme",
"=",
"scheme",
")",
"# Use explicit default port for comparison when none is given",
"if",
"self",
".",
"port",
"and",
"not",
"port",
":",
"port",
"=",
"port_by_scheme",
".",
"get",
"(",
"scheme",
")",
"elif",
"not",
"self",
".",
"port",
"and",
"port",
"==",
"port_by_scheme",
".",
"get",
"(",
"scheme",
")",
":",
"port",
"=",
"None",
"return",
"(",
"scheme",
",",
"host",
",",
"port",
")",
"==",
"(",
"self",
".",
"scheme",
",",
"self",
".",
"host",
",",
"self",
".",
"port",
")"
] | [
496,
4
] | [
515,
74
] | python | en | ['en', 'error', 'th'] | False |
HTTPConnectionPool.urlopen | (
self,
method,
url,
body=None,
headers=None,
retries=None,
redirect=True,
assert_same_host=True,
timeout=_Default,
pool_timeout=None,
release_conn=None,
chunked=False,
body_pos=None,
**response_kw
) |
Get a connection from the pool and perform an HTTP request. This is the
lowest level call for making a request, so you'll need to specify all
the raw details.
.. note::
More commonly, it's appropriate to use a convenience method provided
by :class:`.RequestMethods`, such as :meth:`request`.
.. note::
`release_conn` will only behave as expected if
`preload_content=False` because we want to make
`preload_content=False` the default behaviour someday soon without
breaking backwards compatibility.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param url:
The URL to perform the request on.
:param body:
Data to send in the request body, either :class:`str`, :class:`bytes`,
an iterable of :class:`str`/:class:`bytes`, or a file-like object.
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
:param retries:
Configure the number of retries to allow before raising a
:class:`~urllib3.exceptions.MaxRetryError` exception.
Pass ``None`` to retry until you receive a response. Pass a
:class:`~urllib3.util.retry.Retry` object for fine-grained control
over different types of retries.
Pass an integer number to retry connection errors that many times,
but no other types of errors. Pass zero to never retry.
If ``False``, then retries are disabled and any exception is raised
immediately. Also, instead of raising a MaxRetryError on redirects,
the redirect response will be returned.
:type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
:param redirect:
If True, automatically handle redirects (status codes 301, 302,
303, 307, 308). Each redirect counts as a retry. Disabling retries
will disable redirect, too.
:param assert_same_host:
If ``True``, will make sure that the host of the pool requests is
consistent else will raise HostChangedError. When ``False``, you can
use the pool on an HTTP proxy and request foreign hosts.
:param timeout:
If specified, overrides the default timeout for this one
request. It may be a float (in seconds) or an instance of
:class:`urllib3.util.Timeout`.
:param pool_timeout:
If set and the pool is set to block=True, then this method will
block for ``pool_timeout`` seconds and raise EmptyPoolError if no
connection is available within the time period.
:param release_conn:
If False, then the urlopen call will not release the connection
back into the pool once a response is received (but will release if
you read the entire contents of the response such as when
`preload_content=True`). This is useful if you're not preloading
the response's content immediately. You will need to call
``r.release_conn()`` on the response ``r`` to return the connection
back into the pool. If None, it takes the value of
``response_kw.get('preload_content', True)``.
:param chunked:
If True, urllib3 will send the body using chunked transfer
encoding. Otherwise, urllib3 will send the body using the standard
content-length form. Defaults to False.
:param int body_pos:
Position to seek to in file-like body in the event of a retry or
redirect. Typically this won't need to be set because urllib3 will
auto-populate the value when needed.
:param \\**response_kw:
Additional parameters are passed to
:meth:`urllib3.response.HTTPResponse.from_httplib`
|
Get a connection from the pool and perform an HTTP request. This is the
lowest level call for making a request, so you'll need to specify all
the raw details. | def urlopen(
self,
method,
url,
body=None,
headers=None,
retries=None,
redirect=True,
assert_same_host=True,
timeout=_Default,
pool_timeout=None,
release_conn=None,
chunked=False,
body_pos=None,
**response_kw
):
"""
Get a connection from the pool and perform an HTTP request. This is the
lowest level call for making a request, so you'll need to specify all
the raw details.
.. note::
More commonly, it's appropriate to use a convenience method provided
by :class:`.RequestMethods`, such as :meth:`request`.
.. note::
`release_conn` will only behave as expected if
`preload_content=False` because we want to make
`preload_content=False` the default behaviour someday soon without
breaking backwards compatibility.
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
:param url:
The URL to perform the request on.
:param body:
Data to send in the request body, either :class:`str`, :class:`bytes`,
an iterable of :class:`str`/:class:`bytes`, or a file-like object.
:param headers:
Dictionary of custom headers to send, such as User-Agent,
If-None-Match, etc. If None, pool headers are used. If provided,
these headers completely replace any pool-specific headers.
:param retries:
Configure the number of retries to allow before raising a
:class:`~urllib3.exceptions.MaxRetryError` exception.
Pass ``None`` to retry until you receive a response. Pass a
:class:`~urllib3.util.retry.Retry` object for fine-grained control
over different types of retries.
Pass an integer number to retry connection errors that many times,
but no other types of errors. Pass zero to never retry.
If ``False``, then retries are disabled and any exception is raised
immediately. Also, instead of raising a MaxRetryError on redirects,
the redirect response will be returned.
:type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
:param redirect:
If True, automatically handle redirects (status codes 301, 302,
303, 307, 308). Each redirect counts as a retry. Disabling retries
will disable redirect, too.
:param assert_same_host:
If ``True``, will make sure that the host of the pool requests is
consistent else will raise HostChangedError. When ``False``, you can
use the pool on an HTTP proxy and request foreign hosts.
:param timeout:
If specified, overrides the default timeout for this one
request. It may be a float (in seconds) or an instance of
:class:`urllib3.util.Timeout`.
:param pool_timeout:
If set and the pool is set to block=True, then this method will
block for ``pool_timeout`` seconds and raise EmptyPoolError if no
connection is available within the time period.
:param release_conn:
If False, then the urlopen call will not release the connection
back into the pool once a response is received (but will release if
you read the entire contents of the response such as when
`preload_content=True`). This is useful if you're not preloading
the response's content immediately. You will need to call
``r.release_conn()`` on the response ``r`` to return the connection
back into the pool. If None, it takes the value of
``response_kw.get('preload_content', True)``.
:param chunked:
If True, urllib3 will send the body using chunked transfer
encoding. Otherwise, urllib3 will send the body using the standard
content-length form. Defaults to False.
:param int body_pos:
Position to seek to in file-like body in the event of a retry or
redirect. Typically this won't need to be set because urllib3 will
auto-populate the value when needed.
:param \\**response_kw:
Additional parameters are passed to
:meth:`urllib3.response.HTTPResponse.from_httplib`
"""
parsed_url = parse_url(url)
destination_scheme = parsed_url.scheme
if headers is None:
headers = self.headers
if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
if release_conn is None:
release_conn = response_kw.get("preload_content", True)
# Check host
if assert_same_host and not self.is_same_host(url):
raise HostChangedError(self, url, retries)
# Ensure that the URL we're connecting to is properly encoded
if url.startswith("/"):
url = six.ensure_str(_encode_target(url))
else:
url = six.ensure_str(parsed_url.url)
conn = None
# Track whether `conn` needs to be released before
# returning/raising/recursing. Update this variable if necessary, and
# leave `release_conn` constant throughout the function. That way, if
# the function recurses, the original value of `release_conn` will be
# passed down into the recursive call, and its value will be respected.
#
# See issue #651 [1] for details.
#
# [1] <https://github.com/urllib3/urllib3/issues/651>
release_this_conn = release_conn
http_tunnel_required = connection_requires_http_tunnel(
self.proxy, self.proxy_config, destination_scheme
)
# Merge the proxy headers. Only done when not using HTTP CONNECT. We
# have to copy the headers dict so we can safely change it without those
# changes being reflected in anyone else's copy.
if not http_tunnel_required:
headers = headers.copy()
headers.update(self.proxy_headers)
# Must keep the exception bound to a separate variable or else Python 3
# complains about UnboundLocalError.
err = None
# Keep track of whether we cleanly exited the except block. This
# ensures we do proper cleanup in finally.
clean_exit = False
# Rewind body position, if needed. Record current position
# for future rewinds in the event of a redirect/retry.
body_pos = set_file_position(body, body_pos)
try:
# Request a connection from the queue.
timeout_obj = self._get_timeout(timeout)
conn = self._get_conn(timeout=pool_timeout)
conn.timeout = timeout_obj.connect_timeout
is_new_proxy_conn = self.proxy is not None and not getattr(
conn, "sock", None
)
if is_new_proxy_conn and http_tunnel_required:
self._prepare_proxy(conn)
# Make the request on the httplib connection object.
httplib_response = self._make_request(
conn,
method,
url,
timeout=timeout_obj,
body=body,
headers=headers,
chunked=chunked,
)
# If we're going to release the connection in ``finally:``, then
# the response doesn't need to know about the connection. Otherwise
# it will also try to release it and we'll have a double-release
# mess.
response_conn = conn if not release_conn else None
# Pass method to Response for length checking
response_kw["request_method"] = method
# Import httplib's response into our own wrapper object
response = self.ResponseCls.from_httplib(
httplib_response,
pool=self,
connection=response_conn,
retries=retries,
**response_kw
)
# Everything went great!
clean_exit = True
except EmptyPoolError:
# Didn't get a connection from the pool, no need to clean up
clean_exit = True
release_this_conn = False
raise
except (
TimeoutError,
HTTPException,
SocketError,
ProtocolError,
BaseSSLError,
SSLError,
CertificateError,
) as e:
# Discard the connection for these exceptions. It will be
# replaced during the next _get_conn() call.
clean_exit = False
if isinstance(e, (BaseSSLError, CertificateError)):
e = SSLError(e)
elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
e = ProxyError("Cannot connect to proxy.", e)
elif isinstance(e, (SocketError, HTTPException)):
e = ProtocolError("Connection aborted.", e)
retries = retries.increment(
method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
)
retries.sleep()
# Keep track of the error for the retry warning.
err = e
finally:
if not clean_exit:
# We hit some kind of exception, handled or otherwise. We need
# to throw the connection away unless explicitly told not to.
# Close the connection, set the variable to None, and make sure
# we put the None back in the pool to avoid leaking it.
conn = conn and conn.close()
release_this_conn = True
if release_this_conn:
# Put the connection back to be reused. If the connection is
# expired then it will be None, which will get replaced with a
# fresh connection during _get_conn.
self._put_conn(conn)
if not conn:
# Try again
log.warning(
"Retrying (%r) after connection broken by '%r': %s", retries, err, url
)
return self.urlopen(
method,
url,
body,
headers,
retries,
redirect,
assert_same_host,
timeout=timeout,
pool_timeout=pool_timeout,
release_conn=release_conn,
chunked=chunked,
body_pos=body_pos,
**response_kw
)
# Handle redirect?
redirect_location = redirect and response.get_redirect_location()
if redirect_location:
if response.status == 303:
method = "GET"
try:
retries = retries.increment(method, url, response=response, _pool=self)
except MaxRetryError:
if retries.raise_on_redirect:
response.drain_conn()
raise
return response
response.drain_conn()
retries.sleep_for_retry(response)
log.debug("Redirecting %s -> %s", url, redirect_location)
return self.urlopen(
method,
redirect_location,
body,
headers,
retries=retries,
redirect=redirect,
assert_same_host=assert_same_host,
timeout=timeout,
pool_timeout=pool_timeout,
release_conn=release_conn,
chunked=chunked,
body_pos=body_pos,
**response_kw
)
# Check if we should retry the HTTP response.
has_retry_after = bool(response.getheader("Retry-After"))
if retries.is_retry(method, response.status, has_retry_after):
try:
retries = retries.increment(method, url, response=response, _pool=self)
except MaxRetryError:
if retries.raise_on_status:
response.drain_conn()
raise
return response
response.drain_conn()
retries.sleep(response)
log.debug("Retry: %s", url)
return self.urlopen(
method,
url,
body,
headers,
retries=retries,
redirect=redirect,
assert_same_host=assert_same_host,
timeout=timeout,
pool_timeout=pool_timeout,
release_conn=release_conn,
chunked=chunked,
body_pos=body_pos,
**response_kw
)
return response | [
"def",
"urlopen",
"(",
"self",
",",
"method",
",",
"url",
",",
"body",
"=",
"None",
",",
"headers",
"=",
"None",
",",
"retries",
"=",
"None",
",",
"redirect",
"=",
"True",
",",
"assert_same_host",
"=",
"True",
",",
"timeout",
"=",
"_Default",
",",
"pool_timeout",
"=",
"None",
",",
"release_conn",
"=",
"None",
",",
"chunked",
"=",
"False",
",",
"body_pos",
"=",
"None",
",",
"*",
"*",
"response_kw",
")",
":",
"parsed_url",
"=",
"parse_url",
"(",
"url",
")",
"destination_scheme",
"=",
"parsed_url",
".",
"scheme",
"if",
"headers",
"is",
"None",
":",
"headers",
"=",
"self",
".",
"headers",
"if",
"not",
"isinstance",
"(",
"retries",
",",
"Retry",
")",
":",
"retries",
"=",
"Retry",
".",
"from_int",
"(",
"retries",
",",
"redirect",
"=",
"redirect",
",",
"default",
"=",
"self",
".",
"retries",
")",
"if",
"release_conn",
"is",
"None",
":",
"release_conn",
"=",
"response_kw",
".",
"get",
"(",
"\"preload_content\"",
",",
"True",
")",
"# Check host",
"if",
"assert_same_host",
"and",
"not",
"self",
".",
"is_same_host",
"(",
"url",
")",
":",
"raise",
"HostChangedError",
"(",
"self",
",",
"url",
",",
"retries",
")",
"# Ensure that the URL we're connecting to is properly encoded",
"if",
"url",
".",
"startswith",
"(",
"\"/\"",
")",
":",
"url",
"=",
"six",
".",
"ensure_str",
"(",
"_encode_target",
"(",
"url",
")",
")",
"else",
":",
"url",
"=",
"six",
".",
"ensure_str",
"(",
"parsed_url",
".",
"url",
")",
"conn",
"=",
"None",
"# Track whether `conn` needs to be released before",
"# returning/raising/recursing. Update this variable if necessary, and",
"# leave `release_conn` constant throughout the function. That way, if",
"# the function recurses, the original value of `release_conn` will be",
"# passed down into the recursive call, and its value will be respected.",
"#",
"# See issue #651 [1] for details.",
"#",
"# [1] <https://github.com/urllib3/urllib3/issues/651>",
"release_this_conn",
"=",
"release_conn",
"http_tunnel_required",
"=",
"connection_requires_http_tunnel",
"(",
"self",
".",
"proxy",
",",
"self",
".",
"proxy_config",
",",
"destination_scheme",
")",
"# Merge the proxy headers. Only done when not using HTTP CONNECT. We",
"# have to copy the headers dict so we can safely change it without those",
"# changes being reflected in anyone else's copy.",
"if",
"not",
"http_tunnel_required",
":",
"headers",
"=",
"headers",
".",
"copy",
"(",
")",
"headers",
".",
"update",
"(",
"self",
".",
"proxy_headers",
")",
"# Must keep the exception bound to a separate variable or else Python 3",
"# complains about UnboundLocalError.",
"err",
"=",
"None",
"# Keep track of whether we cleanly exited the except block. This",
"# ensures we do proper cleanup in finally.",
"clean_exit",
"=",
"False",
"# Rewind body position, if needed. Record current position",
"# for future rewinds in the event of a redirect/retry.",
"body_pos",
"=",
"set_file_position",
"(",
"body",
",",
"body_pos",
")",
"try",
":",
"# Request a connection from the queue.",
"timeout_obj",
"=",
"self",
".",
"_get_timeout",
"(",
"timeout",
")",
"conn",
"=",
"self",
".",
"_get_conn",
"(",
"timeout",
"=",
"pool_timeout",
")",
"conn",
".",
"timeout",
"=",
"timeout_obj",
".",
"connect_timeout",
"is_new_proxy_conn",
"=",
"self",
".",
"proxy",
"is",
"not",
"None",
"and",
"not",
"getattr",
"(",
"conn",
",",
"\"sock\"",
",",
"None",
")",
"if",
"is_new_proxy_conn",
"and",
"http_tunnel_required",
":",
"self",
".",
"_prepare_proxy",
"(",
"conn",
")",
"# Make the request on the httplib connection object.",
"httplib_response",
"=",
"self",
".",
"_make_request",
"(",
"conn",
",",
"method",
",",
"url",
",",
"timeout",
"=",
"timeout_obj",
",",
"body",
"=",
"body",
",",
"headers",
"=",
"headers",
",",
"chunked",
"=",
"chunked",
",",
")",
"# If we're going to release the connection in ``finally:``, then",
"# the response doesn't need to know about the connection. Otherwise",
"# it will also try to release it and we'll have a double-release",
"# mess.",
"response_conn",
"=",
"conn",
"if",
"not",
"release_conn",
"else",
"None",
"# Pass method to Response for length checking",
"response_kw",
"[",
"\"request_method\"",
"]",
"=",
"method",
"# Import httplib's response into our own wrapper object",
"response",
"=",
"self",
".",
"ResponseCls",
".",
"from_httplib",
"(",
"httplib_response",
",",
"pool",
"=",
"self",
",",
"connection",
"=",
"response_conn",
",",
"retries",
"=",
"retries",
",",
"*",
"*",
"response_kw",
")",
"# Everything went great!",
"clean_exit",
"=",
"True",
"except",
"EmptyPoolError",
":",
"# Didn't get a connection from the pool, no need to clean up",
"clean_exit",
"=",
"True",
"release_this_conn",
"=",
"False",
"raise",
"except",
"(",
"TimeoutError",
",",
"HTTPException",
",",
"SocketError",
",",
"ProtocolError",
",",
"BaseSSLError",
",",
"SSLError",
",",
"CertificateError",
",",
")",
"as",
"e",
":",
"# Discard the connection for these exceptions. It will be",
"# replaced during the next _get_conn() call.",
"clean_exit",
"=",
"False",
"if",
"isinstance",
"(",
"e",
",",
"(",
"BaseSSLError",
",",
"CertificateError",
")",
")",
":",
"e",
"=",
"SSLError",
"(",
"e",
")",
"elif",
"isinstance",
"(",
"e",
",",
"(",
"SocketError",
",",
"NewConnectionError",
")",
")",
"and",
"self",
".",
"proxy",
":",
"e",
"=",
"ProxyError",
"(",
"\"Cannot connect to proxy.\"",
",",
"e",
")",
"elif",
"isinstance",
"(",
"e",
",",
"(",
"SocketError",
",",
"HTTPException",
")",
")",
":",
"e",
"=",
"ProtocolError",
"(",
"\"Connection aborted.\"",
",",
"e",
")",
"retries",
"=",
"retries",
".",
"increment",
"(",
"method",
",",
"url",
",",
"error",
"=",
"e",
",",
"_pool",
"=",
"self",
",",
"_stacktrace",
"=",
"sys",
".",
"exc_info",
"(",
")",
"[",
"2",
"]",
")",
"retries",
".",
"sleep",
"(",
")",
"# Keep track of the error for the retry warning.",
"err",
"=",
"e",
"finally",
":",
"if",
"not",
"clean_exit",
":",
"# We hit some kind of exception, handled or otherwise. We need",
"# to throw the connection away unless explicitly told not to.",
"# Close the connection, set the variable to None, and make sure",
"# we put the None back in the pool to avoid leaking it.",
"conn",
"=",
"conn",
"and",
"conn",
".",
"close",
"(",
")",
"release_this_conn",
"=",
"True",
"if",
"release_this_conn",
":",
"# Put the connection back to be reused. If the connection is",
"# expired then it will be None, which will get replaced with a",
"# fresh connection during _get_conn.",
"self",
".",
"_put_conn",
"(",
"conn",
")",
"if",
"not",
"conn",
":",
"# Try again",
"log",
".",
"warning",
"(",
"\"Retrying (%r) after connection broken by '%r': %s\"",
",",
"retries",
",",
"err",
",",
"url",
")",
"return",
"self",
".",
"urlopen",
"(",
"method",
",",
"url",
",",
"body",
",",
"headers",
",",
"retries",
",",
"redirect",
",",
"assert_same_host",
",",
"timeout",
"=",
"timeout",
",",
"pool_timeout",
"=",
"pool_timeout",
",",
"release_conn",
"=",
"release_conn",
",",
"chunked",
"=",
"chunked",
",",
"body_pos",
"=",
"body_pos",
",",
"*",
"*",
"response_kw",
")",
"# Handle redirect?",
"redirect_location",
"=",
"redirect",
"and",
"response",
".",
"get_redirect_location",
"(",
")",
"if",
"redirect_location",
":",
"if",
"response",
".",
"status",
"==",
"303",
":",
"method",
"=",
"\"GET\"",
"try",
":",
"retries",
"=",
"retries",
".",
"increment",
"(",
"method",
",",
"url",
",",
"response",
"=",
"response",
",",
"_pool",
"=",
"self",
")",
"except",
"MaxRetryError",
":",
"if",
"retries",
".",
"raise_on_redirect",
":",
"response",
".",
"drain_conn",
"(",
")",
"raise",
"return",
"response",
"response",
".",
"drain_conn",
"(",
")",
"retries",
".",
"sleep_for_retry",
"(",
"response",
")",
"log",
".",
"debug",
"(",
"\"Redirecting %s -> %s\"",
",",
"url",
",",
"redirect_location",
")",
"return",
"self",
".",
"urlopen",
"(",
"method",
",",
"redirect_location",
",",
"body",
",",
"headers",
",",
"retries",
"=",
"retries",
",",
"redirect",
"=",
"redirect",
",",
"assert_same_host",
"=",
"assert_same_host",
",",
"timeout",
"=",
"timeout",
",",
"pool_timeout",
"=",
"pool_timeout",
",",
"release_conn",
"=",
"release_conn",
",",
"chunked",
"=",
"chunked",
",",
"body_pos",
"=",
"body_pos",
",",
"*",
"*",
"response_kw",
")",
"# Check if we should retry the HTTP response.",
"has_retry_after",
"=",
"bool",
"(",
"response",
".",
"getheader",
"(",
"\"Retry-After\"",
")",
")",
"if",
"retries",
".",
"is_retry",
"(",
"method",
",",
"response",
".",
"status",
",",
"has_retry_after",
")",
":",
"try",
":",
"retries",
"=",
"retries",
".",
"increment",
"(",
"method",
",",
"url",
",",
"response",
"=",
"response",
",",
"_pool",
"=",
"self",
")",
"except",
"MaxRetryError",
":",
"if",
"retries",
".",
"raise_on_status",
":",
"response",
".",
"drain_conn",
"(",
")",
"raise",
"return",
"response",
"response",
".",
"drain_conn",
"(",
")",
"retries",
".",
"sleep",
"(",
"response",
")",
"log",
".",
"debug",
"(",
"\"Retry: %s\"",
",",
"url",
")",
"return",
"self",
".",
"urlopen",
"(",
"method",
",",
"url",
",",
"body",
",",
"headers",
",",
"retries",
"=",
"retries",
",",
"redirect",
"=",
"redirect",
",",
"assert_same_host",
"=",
"assert_same_host",
",",
"timeout",
"=",
"timeout",
",",
"pool_timeout",
"=",
"pool_timeout",
",",
"release_conn",
"=",
"release_conn",
",",
"chunked",
"=",
"chunked",
",",
"body_pos",
"=",
"body_pos",
",",
"*",
"*",
"response_kw",
")",
"return",
"response"
] | [
517,
4
] | [
861,
23
] | python | en | ['en', 'error', 'th'] | False |
HTTPSConnectionPool._prepare_conn | (self, conn) |
Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
and establish the tunnel if proxy is used.
|
Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
and establish the tunnel if proxy is used.
| def _prepare_conn(self, conn):
"""
Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
and establish the tunnel if proxy is used.
"""
if isinstance(conn, VerifiedHTTPSConnection):
conn.set_cert(
key_file=self.key_file,
key_password=self.key_password,
cert_file=self.cert_file,
cert_reqs=self.cert_reqs,
ca_certs=self.ca_certs,
ca_cert_dir=self.ca_cert_dir,
assert_hostname=self.assert_hostname,
assert_fingerprint=self.assert_fingerprint,
)
conn.ssl_version = self.ssl_version
return conn | [
"def",
"_prepare_conn",
"(",
"self",
",",
"conn",
")",
":",
"if",
"isinstance",
"(",
"conn",
",",
"VerifiedHTTPSConnection",
")",
":",
"conn",
".",
"set_cert",
"(",
"key_file",
"=",
"self",
".",
"key_file",
",",
"key_password",
"=",
"self",
".",
"key_password",
",",
"cert_file",
"=",
"self",
".",
"cert_file",
",",
"cert_reqs",
"=",
"self",
".",
"cert_reqs",
",",
"ca_certs",
"=",
"self",
".",
"ca_certs",
",",
"ca_cert_dir",
"=",
"self",
".",
"ca_cert_dir",
",",
"assert_hostname",
"=",
"self",
".",
"assert_hostname",
",",
"assert_fingerprint",
"=",
"self",
".",
"assert_fingerprint",
",",
")",
"conn",
".",
"ssl_version",
"=",
"self",
".",
"ssl_version",
"return",
"conn"
] | [
930,
4
] | [
948,
19
] | python | en | ['en', 'error', 'th'] | False |
HTTPSConnectionPool._prepare_proxy | (self, conn) |
Establishes a tunnel connection through HTTP CONNECT.
Tunnel connection is established early because otherwise httplib would
improperly set Host: header to proxy's IP:port.
|
Establishes a tunnel connection through HTTP CONNECT. | def _prepare_proxy(self, conn):
"""
Establishes a tunnel connection through HTTP CONNECT.
Tunnel connection is established early because otherwise httplib would
improperly set Host: header to proxy's IP:port.
"""
conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
if self.proxy.scheme == "https":
conn.tls_in_tls_required = True
conn.connect() | [
"def",
"_prepare_proxy",
"(",
"self",
",",
"conn",
")",
":",
"conn",
".",
"set_tunnel",
"(",
"self",
".",
"_proxy_host",
",",
"self",
".",
"port",
",",
"self",
".",
"proxy_headers",
")",
"if",
"self",
".",
"proxy",
".",
"scheme",
"==",
"\"https\"",
":",
"conn",
".",
"tls_in_tls_required",
"=",
"True",
"conn",
".",
"connect",
"(",
")"
] | [
950,
4
] | [
963,
22
] | python | en | ['en', 'error', 'th'] | False |
HTTPSConnectionPool._new_conn | (self) |
Return a fresh :class:`http.client.HTTPSConnection`.
|
Return a fresh :class:`http.client.HTTPSConnection`.
| def _new_conn(self):
"""
Return a fresh :class:`http.client.HTTPSConnection`.
"""
self.num_connections += 1
log.debug(
"Starting new HTTPS connection (%d): %s:%s",
self.num_connections,
self.host,
self.port or "443",
)
if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
raise SSLError(
"Can't connect to HTTPS URL because the SSL module is not available."
)
actual_host = self.host
actual_port = self.port
if self.proxy is not None:
actual_host = self.proxy.host
actual_port = self.proxy.port
conn = self.ConnectionCls(
host=actual_host,
port=actual_port,
timeout=self.timeout.connect_timeout,
strict=self.strict,
cert_file=self.cert_file,
key_file=self.key_file,
key_password=self.key_password,
**self.conn_kw
)
return self._prepare_conn(conn) | [
"def",
"_new_conn",
"(",
"self",
")",
":",
"self",
".",
"num_connections",
"+=",
"1",
"log",
".",
"debug",
"(",
"\"Starting new HTTPS connection (%d): %s:%s\"",
",",
"self",
".",
"num_connections",
",",
"self",
".",
"host",
",",
"self",
".",
"port",
"or",
"\"443\"",
",",
")",
"if",
"not",
"self",
".",
"ConnectionCls",
"or",
"self",
".",
"ConnectionCls",
"is",
"DummyConnection",
":",
"raise",
"SSLError",
"(",
"\"Can't connect to HTTPS URL because the SSL module is not available.\"",
")",
"actual_host",
"=",
"self",
".",
"host",
"actual_port",
"=",
"self",
".",
"port",
"if",
"self",
".",
"proxy",
"is",
"not",
"None",
":",
"actual_host",
"=",
"self",
".",
"proxy",
".",
"host",
"actual_port",
"=",
"self",
".",
"proxy",
".",
"port",
"conn",
"=",
"self",
".",
"ConnectionCls",
"(",
"host",
"=",
"actual_host",
",",
"port",
"=",
"actual_port",
",",
"timeout",
"=",
"self",
".",
"timeout",
".",
"connect_timeout",
",",
"strict",
"=",
"self",
".",
"strict",
",",
"cert_file",
"=",
"self",
".",
"cert_file",
",",
"key_file",
"=",
"self",
".",
"key_file",
",",
"key_password",
"=",
"self",
".",
"key_password",
",",
"*",
"*",
"self",
".",
"conn_kw",
")",
"return",
"self",
".",
"_prepare_conn",
"(",
"conn",
")"
] | [
965,
4
] | [
999,
39
] | python | en | ['en', 'error', 'th'] | False |
HTTPSConnectionPool._validate_conn | (self, conn) |
Called right before a request is made, after the socket is created.
|
Called right before a request is made, after the socket is created.
| def _validate_conn(self, conn):
"""
Called right before a request is made, after the socket is created.
"""
super(HTTPSConnectionPool, self)._validate_conn(conn)
# Force connect early to allow us to validate the connection.
if not getattr(conn, "sock", None): # AppEngine might not have `.sock`
conn.connect()
if not conn.is_verified:
warnings.warn(
(
"Unverified HTTPS request is being made to host '%s'. "
"Adding certificate verification is strongly advised. See: "
"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
"#ssl-warnings" % conn.host
),
InsecureRequestWarning,
) | [
"def",
"_validate_conn",
"(",
"self",
",",
"conn",
")",
":",
"super",
"(",
"HTTPSConnectionPool",
",",
"self",
")",
".",
"_validate_conn",
"(",
"conn",
")",
"# Force connect early to allow us to validate the connection.",
"if",
"not",
"getattr",
"(",
"conn",
",",
"\"sock\"",
",",
"None",
")",
":",
"# AppEngine might not have `.sock`",
"conn",
".",
"connect",
"(",
")",
"if",
"not",
"conn",
".",
"is_verified",
":",
"warnings",
".",
"warn",
"(",
"(",
"\"Unverified HTTPS request is being made to host '%s'. \"",
"\"Adding certificate verification is strongly advised. See: \"",
"\"https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html\"",
"\"#ssl-warnings\"",
"%",
"conn",
".",
"host",
")",
",",
"InsecureRequestWarning",
",",
")"
] | [
1001,
4
] | [
1020,
13
] | python | en | ['en', 'error', 'th'] | False |
split_identifier | (identifier) |
Split an SQL identifier into a two element tuple of (namespace, name).
The identifier could be a table, column, or sequence name might be prefixed
by a namespace.
|
Split an SQL identifier into a two element tuple of (namespace, name). | def split_identifier(identifier):
"""
Split an SQL identifier into a two element tuple of (namespace, name).
The identifier could be a table, column, or sequence name might be prefixed
by a namespace.
"""
try:
namespace, name = identifier.split('"."')
except ValueError:
namespace, name = '', identifier
return namespace.strip('"'), name.strip('"') | [
"def",
"split_identifier",
"(",
"identifier",
")",
":",
"try",
":",
"namespace",
",",
"name",
"=",
"identifier",
".",
"split",
"(",
"'\".\"'",
")",
"except",
"ValueError",
":",
"namespace",
",",
"name",
"=",
"''",
",",
"identifier",
"return",
"namespace",
".",
"strip",
"(",
"'\"'",
")",
",",
"name",
".",
"strip",
"(",
"'\"'",
")"
] | [
181,
0
] | [
192,
48
] | python | en | ['en', 'error', 'th'] | False |
truncate_name | (identifier, length=None, hash_len=4) |
Shorten an SQL identifier to a repeatable mangled version with the given
length.
If a quote stripped name contains a namespace, e.g. USERNAME"."TABLE,
truncate the table portion only.
|
Shorten an SQL identifier to a repeatable mangled version with the given
length. | def truncate_name(identifier, length=None, hash_len=4):
"""
Shorten an SQL identifier to a repeatable mangled version with the given
length.
If a quote stripped name contains a namespace, e.g. USERNAME"."TABLE,
truncate the table portion only.
"""
namespace, name = split_identifier(identifier)
if length is None or len(name) <= length:
return identifier
digest = names_digest(name, length=hash_len)
return '%s%s%s' % ('%s"."' % namespace if namespace else '', name[:length - hash_len], digest) | [
"def",
"truncate_name",
"(",
"identifier",
",",
"length",
"=",
"None",
",",
"hash_len",
"=",
"4",
")",
":",
"namespace",
",",
"name",
"=",
"split_identifier",
"(",
"identifier",
")",
"if",
"length",
"is",
"None",
"or",
"len",
"(",
"name",
")",
"<=",
"length",
":",
"return",
"identifier",
"digest",
"=",
"names_digest",
"(",
"name",
",",
"length",
"=",
"hash_len",
")",
"return",
"'%s%s%s'",
"%",
"(",
"'%s\".\"'",
"%",
"namespace",
"if",
"namespace",
"else",
"''",
",",
"name",
"[",
":",
"length",
"-",
"hash_len",
"]",
",",
"digest",
")"
] | [
195,
0
] | [
209,
98
] | python | en | ['en', 'error', 'th'] | False |
names_digest | (*args, length) |
Generate a 32-bit digest of a set of arguments that can be used to shorten
identifying names.
|
Generate a 32-bit digest of a set of arguments that can be used to shorten
identifying names.
| def names_digest(*args, length):
"""
Generate a 32-bit digest of a set of arguments that can be used to shorten
identifying names.
"""
h = hashlib.md5()
for arg in args:
h.update(arg.encode())
return h.hexdigest()[:length] | [
"def",
"names_digest",
"(",
"*",
"args",
",",
"length",
")",
":",
"h",
"=",
"hashlib",
".",
"md5",
"(",
")",
"for",
"arg",
"in",
"args",
":",
"h",
".",
"update",
"(",
"arg",
".",
"encode",
"(",
")",
")",
"return",
"h",
".",
"hexdigest",
"(",
")",
"[",
":",
"length",
"]"
] | [
212,
0
] | [
220,
33
] | python | en | ['en', 'error', 'th'] | False |
format_number | (value, max_digits, decimal_places) |
Format a number into a string with the requisite number of digits and
decimal places.
|
Format a number into a string with the requisite number of digits and
decimal places.
| def format_number(value, max_digits, decimal_places):
"""
Format a number into a string with the requisite number of digits and
decimal places.
"""
if value is None:
return None
context = decimal.getcontext().copy()
if max_digits is not None:
context.prec = max_digits
if decimal_places is not None:
value = value.quantize(decimal.Decimal(1).scaleb(-decimal_places), context=context)
else:
context.traps[decimal.Rounded] = 1
value = context.create_decimal(value)
return "{:f}".format(value) | [
"def",
"format_number",
"(",
"value",
",",
"max_digits",
",",
"decimal_places",
")",
":",
"if",
"value",
"is",
"None",
":",
"return",
"None",
"context",
"=",
"decimal",
".",
"getcontext",
"(",
")",
".",
"copy",
"(",
")",
"if",
"max_digits",
"is",
"not",
"None",
":",
"context",
".",
"prec",
"=",
"max_digits",
"if",
"decimal_places",
"is",
"not",
"None",
":",
"value",
"=",
"value",
".",
"quantize",
"(",
"decimal",
".",
"Decimal",
"(",
"1",
")",
".",
"scaleb",
"(",
"-",
"decimal_places",
")",
",",
"context",
"=",
"context",
")",
"else",
":",
"context",
".",
"traps",
"[",
"decimal",
".",
"Rounded",
"]",
"=",
"1",
"value",
"=",
"context",
".",
"create_decimal",
"(",
"value",
")",
"return",
"\"{:f}\"",
".",
"format",
"(",
"value",
")"
] | [
223,
0
] | [
238,
31
] | python | en | ['en', 'error', 'th'] | False |
strip_quotes | (table_name) |
Strip quotes off of quoted table names to make them safe for use in index
names, sequence names, etc. For example '"USER"."TABLE"' (an Oracle naming
scheme) becomes 'USER"."TABLE'.
|
Strip quotes off of quoted table names to make them safe for use in index
names, sequence names, etc. For example '"USER"."TABLE"' (an Oracle naming
scheme) becomes 'USER"."TABLE'.
| def strip_quotes(table_name):
"""
Strip quotes off of quoted table names to make them safe for use in index
names, sequence names, etc. For example '"USER"."TABLE"' (an Oracle naming
scheme) becomes 'USER"."TABLE'.
"""
has_quotes = table_name.startswith('"') and table_name.endswith('"')
return table_name[1:-1] if has_quotes else table_name | [
"def",
"strip_quotes",
"(",
"table_name",
")",
":",
"has_quotes",
"=",
"table_name",
".",
"startswith",
"(",
"'\"'",
")",
"and",
"table_name",
".",
"endswith",
"(",
"'\"'",
")",
"return",
"table_name",
"[",
"1",
":",
"-",
"1",
"]",
"if",
"has_quotes",
"else",
"table_name"
] | [
241,
0
] | [
248,
57
] | python | en | ['en', 'error', 'th'] | False |
find_module | (module, paths=None) | Just like 'imp.find_module()', but with package support | Just like 'imp.find_module()', but with package support | def find_module(module, paths=None):
"""Just like 'imp.find_module()', but with package support"""
parts = module.split('.')
while parts:
part = parts.pop(0)
f, path, (suffix, mode, kind) = info = imp.find_module(part, paths)
if kind == PKG_DIRECTORY:
parts = parts or ['__init__']
paths = [path]
elif parts:
raise ImportError("Can't find %r in %s" % (parts, module))
return info | [
"def",
"find_module",
"(",
"module",
",",
"paths",
"=",
"None",
")",
":",
"parts",
"=",
"module",
".",
"split",
"(",
"'.'",
")",
"while",
"parts",
":",
"part",
"=",
"parts",
".",
"pop",
"(",
"0",
")",
"f",
",",
"path",
",",
"(",
"suffix",
",",
"mode",
",",
"kind",
")",
"=",
"info",
"=",
"imp",
".",
"find_module",
"(",
"part",
",",
"paths",
")",
"if",
"kind",
"==",
"PKG_DIRECTORY",
":",
"parts",
"=",
"parts",
"or",
"[",
"'__init__'",
"]",
"paths",
"=",
"[",
"path",
"]",
"elif",
"parts",
":",
"raise",
"ImportError",
"(",
"\"Can't find %r in %s\"",
"%",
"(",
"parts",
",",
"module",
")",
")",
"return",
"info"
] | [
81,
0
] | [
97,
15
] | python | en | ['en', 'en', 'en'] | True |
get_module_constant | (module, symbol, default=-1, paths=None) | Find 'module' by searching 'paths', and extract 'symbol'
Return 'None' if 'module' does not exist on 'paths', or it does not define
'symbol'. If the module defines 'symbol' as a constant, return the
constant. Otherwise, return 'default'. | Find 'module' by searching 'paths', and extract 'symbol' | def get_module_constant(module, symbol, default=-1, paths=None):
"""Find 'module' by searching 'paths', and extract 'symbol'
Return 'None' if 'module' does not exist on 'paths', or it does not define
'symbol'. If the module defines 'symbol' as a constant, return the
constant. Otherwise, return 'default'."""
try:
f, path, (suffix, mode, kind) = find_module(module, paths)
except ImportError:
# Module doesn't exist
return None
try:
if kind == PY_COMPILED:
f.read(8) # skip magic & date
code = marshal.load(f)
elif kind == PY_FROZEN:
code = imp.get_frozen_object(module)
elif kind == PY_SOURCE:
code = compile(f.read(), path, 'exec')
else:
# Not something we can parse; we'll have to import it. :(
if module not in sys.modules:
imp.load_module(module, f, path, (suffix, mode, kind))
return getattr(sys.modules[module], symbol, None)
finally:
if f:
f.close()
return extract_constant(code, symbol, default) | [
"def",
"get_module_constant",
"(",
"module",
",",
"symbol",
",",
"default",
"=",
"-",
"1",
",",
"paths",
"=",
"None",
")",
":",
"try",
":",
"f",
",",
"path",
",",
"(",
"suffix",
",",
"mode",
",",
"kind",
")",
"=",
"find_module",
"(",
"module",
",",
"paths",
")",
"except",
"ImportError",
":",
"# Module doesn't exist",
"return",
"None",
"try",
":",
"if",
"kind",
"==",
"PY_COMPILED",
":",
"f",
".",
"read",
"(",
"8",
")",
"# skip magic & date",
"code",
"=",
"marshal",
".",
"load",
"(",
"f",
")",
"elif",
"kind",
"==",
"PY_FROZEN",
":",
"code",
"=",
"imp",
".",
"get_frozen_object",
"(",
"module",
")",
"elif",
"kind",
"==",
"PY_SOURCE",
":",
"code",
"=",
"compile",
"(",
"f",
".",
"read",
"(",
")",
",",
"path",
",",
"'exec'",
")",
"else",
":",
"# Not something we can parse; we'll have to import it. :(",
"if",
"module",
"not",
"in",
"sys",
".",
"modules",
":",
"imp",
".",
"load_module",
"(",
"module",
",",
"f",
",",
"path",
",",
"(",
"suffix",
",",
"mode",
",",
"kind",
")",
")",
"return",
"getattr",
"(",
"sys",
".",
"modules",
"[",
"module",
"]",
",",
"symbol",
",",
"None",
")",
"finally",
":",
"if",
"f",
":",
"f",
".",
"close",
"(",
")",
"return",
"extract_constant",
"(",
"code",
",",
"symbol",
",",
"default",
")"
] | [
100,
0
] | [
131,
50
] | python | en | ['en', 'en', 'en'] | True |
extract_constant | (code, symbol, default=-1) | Extract the constant value of 'symbol' from 'code'
If the name 'symbol' is bound to a constant value by the Python code
object 'code', return that value. If 'symbol' is bound to an expression,
return 'default'. Otherwise, return 'None'.
Return value is based on the first assignment to 'symbol'. 'symbol' must
be a global, or at least a non-"fast" local in the code block. That is,
only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
must be present in 'code.co_names'.
| Extract the constant value of 'symbol' from 'code' | def extract_constant(code, symbol, default=-1):
"""Extract the constant value of 'symbol' from 'code'
If the name 'symbol' is bound to a constant value by the Python code
object 'code', return that value. If 'symbol' is bound to an expression,
return 'default'. Otherwise, return 'None'.
Return value is based on the first assignment to 'symbol'. 'symbol' must
be a global, or at least a non-"fast" local in the code block. That is,
only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
must be present in 'code.co_names'.
"""
if symbol not in code.co_names:
# name's not there, can't possibly be an assignment
return None
name_idx = list(code.co_names).index(symbol)
STORE_NAME = 90
STORE_GLOBAL = 97
LOAD_CONST = 100
const = default
for byte_code in Bytecode(code):
op = byte_code.opcode
arg = byte_code.arg
if op == LOAD_CONST:
const = code.co_consts[arg]
elif arg == name_idx and (op == STORE_NAME or op == STORE_GLOBAL):
return const
else:
const = default | [
"def",
"extract_constant",
"(",
"code",
",",
"symbol",
",",
"default",
"=",
"-",
"1",
")",
":",
"if",
"symbol",
"not",
"in",
"code",
".",
"co_names",
":",
"# name's not there, can't possibly be an assignment",
"return",
"None",
"name_idx",
"=",
"list",
"(",
"code",
".",
"co_names",
")",
".",
"index",
"(",
"symbol",
")",
"STORE_NAME",
"=",
"90",
"STORE_GLOBAL",
"=",
"97",
"LOAD_CONST",
"=",
"100",
"const",
"=",
"default",
"for",
"byte_code",
"in",
"Bytecode",
"(",
"code",
")",
":",
"op",
"=",
"byte_code",
".",
"opcode",
"arg",
"=",
"byte_code",
".",
"arg",
"if",
"op",
"==",
"LOAD_CONST",
":",
"const",
"=",
"code",
".",
"co_consts",
"[",
"arg",
"]",
"elif",
"arg",
"==",
"name_idx",
"and",
"(",
"op",
"==",
"STORE_NAME",
"or",
"op",
"==",
"STORE_GLOBAL",
")",
":",
"return",
"const",
"else",
":",
"const",
"=",
"default"
] | [
134,
0
] | [
167,
27
] | python | en | ['en', 'en', 'en'] | True |
_update_globals | () |
Patch the globals to remove the objects not available on some platforms.
XXX it'd be better to test assertions about bytecode instead.
|
Patch the globals to remove the objects not available on some platforms. | def _update_globals():
"""
Patch the globals to remove the objects not available on some platforms.
XXX it'd be better to test assertions about bytecode instead.
"""
if not sys.platform.startswith('java') and sys.platform != 'cli':
return
incompatible = 'extract_constant', 'get_module_constant'
for name in incompatible:
del globals()[name]
__all__.remove(name) | [
"def",
"_update_globals",
"(",
")",
":",
"if",
"not",
"sys",
".",
"platform",
".",
"startswith",
"(",
"'java'",
")",
"and",
"sys",
".",
"platform",
"!=",
"'cli'",
":",
"return",
"incompatible",
"=",
"'extract_constant'",
",",
"'get_module_constant'",
"for",
"name",
"in",
"incompatible",
":",
"del",
"globals",
"(",
")",
"[",
"name",
"]",
"__all__",
".",
"remove",
"(",
"name",
")"
] | [
170,
0
] | [
182,
28
] | python | en | ['en', 'error', 'th'] | False |
Require.full_name | (self) | Return full package/distribution name, w/version | Return full package/distribution name, w/version | def full_name(self):
"""Return full package/distribution name, w/version"""
if self.requested_version is not None:
return '%s-%s' % (self.name, self.requested_version)
return self.name | [
"def",
"full_name",
"(",
"self",
")",
":",
"if",
"self",
".",
"requested_version",
"is",
"not",
"None",
":",
"return",
"'%s-%s'",
"%",
"(",
"self",
".",
"name",
",",
"self",
".",
"requested_version",
")",
"return",
"self",
".",
"name"
] | [
31,
4
] | [
35,
24
] | python | en | ['en', 'en', 'en'] | True |
Require.version_ok | (self, version) | Is 'version' sufficiently up-to-date? | Is 'version' sufficiently up-to-date? | def version_ok(self, version):
"""Is 'version' sufficiently up-to-date?"""
return self.attribute is None or self.format is None or \
str(version) != "unknown" and version >= self.requested_version | [
"def",
"version_ok",
"(",
"self",
",",
"version",
")",
":",
"return",
"self",
".",
"attribute",
"is",
"None",
"or",
"self",
".",
"format",
"is",
"None",
"or",
"str",
"(",
"version",
")",
"!=",
"\"unknown\"",
"and",
"version",
">=",
"self",
".",
"requested_version"
] | [
37,
4
] | [
40,
75
] | python | en | ['en', 'en', 'en'] | True |
Require.get_version | (self, paths=None, default="unknown") | Get version number of installed module, 'None', or 'default'
Search 'paths' for module. If not found, return 'None'. If found,
return the extracted version attribute, or 'default' if no version
attribute was specified, or the value cannot be determined without
importing the module. The version is formatted according to the
requirement's version format (if any), unless it is 'None' or the
supplied 'default'.
| Get version number of installed module, 'None', or 'default' | def get_version(self, paths=None, default="unknown"):
"""Get version number of installed module, 'None', or 'default'
Search 'paths' for module. If not found, return 'None'. If found,
return the extracted version attribute, or 'default' if no version
attribute was specified, or the value cannot be determined without
importing the module. The version is formatted according to the
requirement's version format (if any), unless it is 'None' or the
supplied 'default'.
"""
if self.attribute is None:
try:
f, p, i = find_module(self.module, paths)
if f:
f.close()
return default
except ImportError:
return None
v = get_module_constant(self.module, self.attribute, default, paths)
if v is not None and v is not default and self.format is not None:
return self.format(v)
return v | [
"def",
"get_version",
"(",
"self",
",",
"paths",
"=",
"None",
",",
"default",
"=",
"\"unknown\"",
")",
":",
"if",
"self",
".",
"attribute",
"is",
"None",
":",
"try",
":",
"f",
",",
"p",
",",
"i",
"=",
"find_module",
"(",
"self",
".",
"module",
",",
"paths",
")",
"if",
"f",
":",
"f",
".",
"close",
"(",
")",
"return",
"default",
"except",
"ImportError",
":",
"return",
"None",
"v",
"=",
"get_module_constant",
"(",
"self",
".",
"module",
",",
"self",
".",
"attribute",
",",
"default",
",",
"paths",
")",
"if",
"v",
"is",
"not",
"None",
"and",
"v",
"is",
"not",
"default",
"and",
"self",
".",
"format",
"is",
"not",
"None",
":",
"return",
"self",
".",
"format",
"(",
"v",
")",
"return",
"v"
] | [
42,
4
] | [
67,
16
] | python | en | ['en', 'en', 'en'] | True |
Require.is_present | (self, paths=None) | Return true if dependency is present on 'paths | Return true if dependency is present on 'paths | def is_present(self, paths=None):
"""Return true if dependency is present on 'paths'"""
return self.get_version(paths) is not None | [
"def",
"is_present",
"(",
"self",
",",
"paths",
"=",
"None",
")",
":",
"return",
"self",
".",
"get_version",
"(",
"paths",
")",
"is",
"not",
"None"
] | [
69,
4
] | [
71,
50
] | python | en | ['en', 'af', 'en'] | True |
Require.is_current | (self, paths=None) | Return true if dependency is present and up-to-date on 'paths | Return true if dependency is present and up-to-date on 'paths | def is_current(self, paths=None):
"""Return true if dependency is present and up-to-date on 'paths'"""
version = self.get_version(paths)
if version is None:
return False
return self.version_ok(version) | [
"def",
"is_current",
"(",
"self",
",",
"paths",
"=",
"None",
")",
":",
"version",
"=",
"self",
".",
"get_version",
"(",
"paths",
")",
"if",
"version",
"is",
"None",
":",
"return",
"False",
"return",
"self",
".",
"version_ok",
"(",
"version",
")"
] | [
73,
4
] | [
78,
39
] | python | en | ['en', 'en', 'en'] | True |
ContainerIO.__init__ | (self, file, offset, length) |
Create file object.
:param file: Existing file.
:param offset: Start of region, in bytes.
:param length: Size of region, in bytes.
|
Create file object. | def __init__(self, file, offset, length):
"""
Create file object.
:param file: Existing file.
:param offset: Start of region, in bytes.
:param length: Size of region, in bytes.
"""
self.fh = file
self.pos = 0
self.offset = offset
self.length = length
self.fh.seek(offset) | [
"def",
"__init__",
"(",
"self",
",",
"file",
",",
"offset",
",",
"length",
")",
":",
"self",
".",
"fh",
"=",
"file",
"self",
".",
"pos",
"=",
"0",
"self",
".",
"offset",
"=",
"offset",
"self",
".",
"length",
"=",
"length",
"self",
".",
"fh",
".",
"seek",
"(",
"offset",
")"
] | [
26,
4
] | [
38,
28
] | python | en | ['en', 'error', 'th'] | False |
ContainerIO.seek | (self, offset, mode=io.SEEK_SET) |
Move file pointer.
:param offset: Offset in bytes.
:param mode: Starting position. Use 0 for beginning of region, 1
for current offset, and 2 for end of region. You cannot move
the pointer outside the defined region.
|
Move file pointer. | def seek(self, offset, mode=io.SEEK_SET):
"""
Move file pointer.
:param offset: Offset in bytes.
:param mode: Starting position. Use 0 for beginning of region, 1
for current offset, and 2 for end of region. You cannot move
the pointer outside the defined region.
"""
if mode == 1:
self.pos = self.pos + offset
elif mode == 2:
self.pos = self.length + offset
else:
self.pos = offset
# clamp
self.pos = max(0, min(self.pos, self.length))
self.fh.seek(self.offset + self.pos) | [
"def",
"seek",
"(",
"self",
",",
"offset",
",",
"mode",
"=",
"io",
".",
"SEEK_SET",
")",
":",
"if",
"mode",
"==",
"1",
":",
"self",
".",
"pos",
"=",
"self",
".",
"pos",
"+",
"offset",
"elif",
"mode",
"==",
"2",
":",
"self",
".",
"pos",
"=",
"self",
".",
"length",
"+",
"offset",
"else",
":",
"self",
".",
"pos",
"=",
"offset",
"# clamp",
"self",
".",
"pos",
"=",
"max",
"(",
"0",
",",
"min",
"(",
"self",
".",
"pos",
",",
"self",
".",
"length",
")",
")",
"self",
".",
"fh",
".",
"seek",
"(",
"self",
".",
"offset",
"+",
"self",
".",
"pos",
")"
] | [
46,
4
] | [
63,
44
] | python | en | ['en', 'error', 'th'] | False |
ContainerIO.tell | (self) |
Get current file pointer.
:returns: Offset from start of region, in bytes.
|
Get current file pointer. | def tell(self):
"""
Get current file pointer.
:returns: Offset from start of region, in bytes.
"""
return self.pos | [
"def",
"tell",
"(",
"self",
")",
":",
"return",
"self",
".",
"pos"
] | [
65,
4
] | [
71,
23
] | python | en | ['en', 'error', 'th'] | False |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.