Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
make_docker_context | (
get_steps_fn, github_project, opts=None, default_context_dir=None
) |
Returns a path to the Docker context directory. See parse_args.py.
Helper for making a command-line utility that writes your project's
Dockerfile and associated data into a (temporary) directory. Your main
program might look something like this:
print(make_docker_context(
lambda builder: [builder.step(...), ...],
'facebook/your_project',
))
|
Returns a path to the Docker context directory. See parse_args.py. | def make_docker_context(
get_steps_fn, github_project, opts=None, default_context_dir=None
):
'''
Returns a path to the Docker context directory. See parse_args.py.
Helper for making a command-line utility that writes your project's
Dockerfile and associated data into a (temporary) directory. Your main
program might look something like this:
print(make_docker_context(
lambda builder: [builder.step(...), ...],
'facebook/your_project',
))
'''
if opts is None:
opts = {}
valid_versions = (
('ubuntu:16.04', '5'),
)
def add_args(parser):
parser.add_argument(
'--docker-context-dir', metavar='DIR',
default=default_context_dir,
help='Write the Dockerfile and its context into this directory. '
'If empty, make a temporary directory. Default: %(default)s.',
)
parser.add_argument(
'--user', metavar='NAME', default=opts.get('user', 'nobody'),
help='Build and install as this user. Default: %(default)s.',
)
parser.add_argument(
'--prefix', metavar='DIR',
default=opts.get('prefix', '/home/install'),
help='Install all libraries in this prefix. Default: %(default)s.',
)
parser.add_argument(
'--projects-dir', metavar='DIR',
default=opts.get('projects_dir', '/home'),
help='Place project code directories here. Default: %(default)s.',
)
parser.add_argument(
'--os-image', metavar='IMG', choices=zip(*valid_versions)[0],
default=opts.get('os_image', valid_versions[0][0]),
help='Docker OS image -- be sure to use only ones you trust (See '
'README.docker). Choices: %(choices)s. Default: %(default)s.',
)
parser.add_argument(
'--gcc-version', metavar='VER',
choices=set(zip(*valid_versions)[1]),
default=opts.get('gcc_version', valid_versions[0][1]),
help='Choices: %(choices)s. Default: %(default)s.',
)
parser.add_argument(
'--make-parallelism', metavar='NUM', type=int,
default=opts.get('make_parallelism', 1),
help='Use `make -j` on multi-CPU systems with lots of RAM. '
'Default: %(default)s.',
)
parser.add_argument(
'--local-repo-dir', metavar='DIR',
help='If set, build {0} from a local directory instead of Github.'
.format(github_project),
)
parser.add_argument(
'--ccache-tgz', metavar='PATH',
help='If set, enable ccache for the build. To initialize the '
'cache, first try to hardlink, then to copy --cache-tgz '
'as ccache.tgz into the --docker-context-dir.'
)
opts = parse_args_to_fbcode_builder_opts(
add_args,
# These have add_argument() calls, others are set via --option.
(
'docker_context_dir',
'user',
'prefix',
'projects_dir',
'os_image',
'gcc_version',
'make_parallelism',
'local_repo_dir',
'ccache_tgz',
),
opts,
help=textwrap.dedent('''
Reads `fbcode_builder_config.py` from the current directory, and
prepares a Docker context directory to build {github_project} and
its dependencies. Prints to stdout the path to the context
directory.
Pass --option {github_project}:git_hash SHA1 to build something
other than the master branch from Github.
Or, pass --option {github_project}:local_repo_dir LOCAL_PATH to
build from a local repo instead of cloning from Github.
Usage:
(cd $(./make_docker_context.py) && docker build . 2>&1 | tee log)
'''.format(github_project=github_project)),
)
# This allows travis_docker_build.sh not to know the main Github project.
local_repo_dir = opts.pop('local_repo_dir', None)
if local_repo_dir is not None:
opts['{0}:local_repo_dir'.format(github_project)] = local_repo_dir
if (opts.get('os_image'), opts.get('gcc_version')) not in valid_versions:
raise Exception(
'Due to 4/5 ABI changes (std::string), we can only use {0}'.format(
' / '.join('GCC {1} on {0}'.format(*p) for p in valid_versions)
)
)
if opts.get('docker_context_dir') is None:
opts['docker_context_dir'] = tempfile.mkdtemp(prefix='docker-context-')
elif not os.path.exists(opts.get('docker_context_dir')):
os.makedirs(opts.get('docker_context_dir'))
builder = DockerFBCodeBuilder(**opts)
context_dir = builder.option('docker_context_dir') # Mark option "in-use"
# The renderer may also populate some files into the context_dir.
dockerfile = builder.render(get_steps_fn(builder))
with os.fdopen(os.open(
os.path.join(context_dir, 'Dockerfile'),
os.O_RDWR | os.O_CREAT | os.O_EXCL, # Do not overwrite existing files
0o644,
), 'w') as f:
f.write(dockerfile)
return context_dir | [
"def",
"make_docker_context",
"(",
"get_steps_fn",
",",
"github_project",
",",
"opts",
"=",
"None",
",",
"default_context_dir",
"=",
"None",
")",
":",
"if",
"opts",
"is",
"None",
":",
"opts",
"=",
"{",
"}",
"valid_versions",
"=",
"(",
"(",
"'ubuntu:16.04'",
",",
"'5'",
")",
",",
")",
"def",
"add_args",
"(",
"parser",
")",
":",
"parser",
".",
"add_argument",
"(",
"'--docker-context-dir'",
",",
"metavar",
"=",
"'DIR'",
",",
"default",
"=",
"default_context_dir",
",",
"help",
"=",
"'Write the Dockerfile and its context into this directory. '",
"'If empty, make a temporary directory. Default: %(default)s.'",
",",
")",
"parser",
".",
"add_argument",
"(",
"'--user'",
",",
"metavar",
"=",
"'NAME'",
",",
"default",
"=",
"opts",
".",
"get",
"(",
"'user'",
",",
"'nobody'",
")",
",",
"help",
"=",
"'Build and install as this user. Default: %(default)s.'",
",",
")",
"parser",
".",
"add_argument",
"(",
"'--prefix'",
",",
"metavar",
"=",
"'DIR'",
",",
"default",
"=",
"opts",
".",
"get",
"(",
"'prefix'",
",",
"'/home/install'",
")",
",",
"help",
"=",
"'Install all libraries in this prefix. Default: %(default)s.'",
",",
")",
"parser",
".",
"add_argument",
"(",
"'--projects-dir'",
",",
"metavar",
"=",
"'DIR'",
",",
"default",
"=",
"opts",
".",
"get",
"(",
"'projects_dir'",
",",
"'/home'",
")",
",",
"help",
"=",
"'Place project code directories here. Default: %(default)s.'",
",",
")",
"parser",
".",
"add_argument",
"(",
"'--os-image'",
",",
"metavar",
"=",
"'IMG'",
",",
"choices",
"=",
"zip",
"(",
"*",
"valid_versions",
")",
"[",
"0",
"]",
",",
"default",
"=",
"opts",
".",
"get",
"(",
"'os_image'",
",",
"valid_versions",
"[",
"0",
"]",
"[",
"0",
"]",
")",
",",
"help",
"=",
"'Docker OS image -- be sure to use only ones you trust (See '",
"'README.docker). Choices: %(choices)s. Default: %(default)s.'",
",",
")",
"parser",
".",
"add_argument",
"(",
"'--gcc-version'",
",",
"metavar",
"=",
"'VER'",
",",
"choices",
"=",
"set",
"(",
"zip",
"(",
"*",
"valid_versions",
")",
"[",
"1",
"]",
")",
",",
"default",
"=",
"opts",
".",
"get",
"(",
"'gcc_version'",
",",
"valid_versions",
"[",
"0",
"]",
"[",
"1",
"]",
")",
",",
"help",
"=",
"'Choices: %(choices)s. Default: %(default)s.'",
",",
")",
"parser",
".",
"add_argument",
"(",
"'--make-parallelism'",
",",
"metavar",
"=",
"'NUM'",
",",
"type",
"=",
"int",
",",
"default",
"=",
"opts",
".",
"get",
"(",
"'make_parallelism'",
",",
"1",
")",
",",
"help",
"=",
"'Use `make -j` on multi-CPU systems with lots of RAM. '",
"'Default: %(default)s.'",
",",
")",
"parser",
".",
"add_argument",
"(",
"'--local-repo-dir'",
",",
"metavar",
"=",
"'DIR'",
",",
"help",
"=",
"'If set, build {0} from a local directory instead of Github.'",
".",
"format",
"(",
"github_project",
")",
",",
")",
"parser",
".",
"add_argument",
"(",
"'--ccache-tgz'",
",",
"metavar",
"=",
"'PATH'",
",",
"help",
"=",
"'If set, enable ccache for the build. To initialize the '",
"'cache, first try to hardlink, then to copy --cache-tgz '",
"'as ccache.tgz into the --docker-context-dir.'",
")",
"opts",
"=",
"parse_args_to_fbcode_builder_opts",
"(",
"add_args",
",",
"# These have add_argument() calls, others are set via --option.",
"(",
"'docker_context_dir'",
",",
"'user'",
",",
"'prefix'",
",",
"'projects_dir'",
",",
"'os_image'",
",",
"'gcc_version'",
",",
"'make_parallelism'",
",",
"'local_repo_dir'",
",",
"'ccache_tgz'",
",",
")",
",",
"opts",
",",
"help",
"=",
"textwrap",
".",
"dedent",
"(",
"'''\n\n Reads `fbcode_builder_config.py` from the current directory, and\n prepares a Docker context directory to build {github_project} and\n its dependencies. Prints to stdout the path to the context\n directory.\n\n Pass --option {github_project}:git_hash SHA1 to build something\n other than the master branch from Github.\n\n Or, pass --option {github_project}:local_repo_dir LOCAL_PATH to\n build from a local repo instead of cloning from Github.\n\n Usage:\n (cd $(./make_docker_context.py) && docker build . 2>&1 | tee log)\n\n '''",
".",
"format",
"(",
"github_project",
"=",
"github_project",
")",
")",
",",
")",
"# This allows travis_docker_build.sh not to know the main Github project.",
"local_repo_dir",
"=",
"opts",
".",
"pop",
"(",
"'local_repo_dir'",
",",
"None",
")",
"if",
"local_repo_dir",
"is",
"not",
"None",
":",
"opts",
"[",
"'{0}:local_repo_dir'",
".",
"format",
"(",
"github_project",
")",
"]",
"=",
"local_repo_dir",
"if",
"(",
"opts",
".",
"get",
"(",
"'os_image'",
")",
",",
"opts",
".",
"get",
"(",
"'gcc_version'",
")",
")",
"not",
"in",
"valid_versions",
":",
"raise",
"Exception",
"(",
"'Due to 4/5 ABI changes (std::string), we can only use {0}'",
".",
"format",
"(",
"' / '",
".",
"join",
"(",
"'GCC {1} on {0}'",
".",
"format",
"(",
"*",
"p",
")",
"for",
"p",
"in",
"valid_versions",
")",
")",
")",
"if",
"opts",
".",
"get",
"(",
"'docker_context_dir'",
")",
"is",
"None",
":",
"opts",
"[",
"'docker_context_dir'",
"]",
"=",
"tempfile",
".",
"mkdtemp",
"(",
"prefix",
"=",
"'docker-context-'",
")",
"elif",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"opts",
".",
"get",
"(",
"'docker_context_dir'",
")",
")",
":",
"os",
".",
"makedirs",
"(",
"opts",
".",
"get",
"(",
"'docker_context_dir'",
")",
")",
"builder",
"=",
"DockerFBCodeBuilder",
"(",
"*",
"*",
"opts",
")",
"context_dir",
"=",
"builder",
".",
"option",
"(",
"'docker_context_dir'",
")",
"# Mark option \"in-use\"",
"# The renderer may also populate some files into the context_dir.",
"dockerfile",
"=",
"builder",
".",
"render",
"(",
"get_steps_fn",
"(",
"builder",
")",
")",
"with",
"os",
".",
"fdopen",
"(",
"os",
".",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"context_dir",
",",
"'Dockerfile'",
")",
",",
"os",
".",
"O_RDWR",
"|",
"os",
".",
"O_CREAT",
"|",
"os",
".",
"O_EXCL",
",",
"# Do not overwrite existing files",
"0o644",
",",
")",
",",
"'w'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"dockerfile",
")",
"return",
"context_dir"
] | [
25,
0
] | [
162,
22
] | python | en | ['en', 'error', 'th'] | False |
_route_to_regex | (route, is_endpoint=False) |
Convert a path pattern into a regular expression. Return the regular
expression and a dictionary mapping the capture names to the converters.
For example, 'foo/<int:pk>' returns '^foo\\/(?P<pk>[0-9]+)'
and {'pk': <django.urls.converters.IntConverter>}.
|
Convert a path pattern into a regular expression. Return the regular
expression and a dictionary mapping the capture names to the converters.
For example, 'foo/<int:pk>' returns '^foo\\/(?P<pk>[0-9]+)'
and {'pk': <django.urls.converters.IntConverter>}.
| def _route_to_regex(route, is_endpoint=False):
"""
Convert a path pattern into a regular expression. Return the regular
expression and a dictionary mapping the capture names to the converters.
For example, 'foo/<int:pk>' returns '^foo\\/(?P<pk>[0-9]+)'
and {'pk': <django.urls.converters.IntConverter>}.
"""
original_route = route
parts = ['^']
converters = {}
while True:
match = _PATH_PARAMETER_COMPONENT_RE.search(route)
if not match:
parts.append(re.escape(route))
break
elif not set(match.group()).isdisjoint(string.whitespace):
raise ImproperlyConfigured(
"URL route '%s' cannot contain whitespace in angle brackets "
"<…>." % original_route
)
parts.append(re.escape(route[:match.start()]))
route = route[match.end():]
parameter = match['parameter']
if not parameter.isidentifier():
raise ImproperlyConfigured(
"URL route '%s' uses parameter name %r which isn't a valid "
"Python identifier." % (original_route, parameter)
)
raw_converter = match['converter']
if raw_converter is None:
# If a converter isn't specified, the default is `str`.
raw_converter = 'str'
try:
converter = get_converter(raw_converter)
except KeyError as e:
raise ImproperlyConfigured(
'URL route %r uses invalid converter %r.'
% (original_route, raw_converter)
) from e
converters[parameter] = converter
parts.append('(?P<' + parameter + '>' + converter.regex + ')')
if is_endpoint:
parts.append('$')
return ''.join(parts), converters | [
"def",
"_route_to_regex",
"(",
"route",
",",
"is_endpoint",
"=",
"False",
")",
":",
"original_route",
"=",
"route",
"parts",
"=",
"[",
"'^'",
"]",
"converters",
"=",
"{",
"}",
"while",
"True",
":",
"match",
"=",
"_PATH_PARAMETER_COMPONENT_RE",
".",
"search",
"(",
"route",
")",
"if",
"not",
"match",
":",
"parts",
".",
"append",
"(",
"re",
".",
"escape",
"(",
"route",
")",
")",
"break",
"elif",
"not",
"set",
"(",
"match",
".",
"group",
"(",
")",
")",
".",
"isdisjoint",
"(",
"string",
".",
"whitespace",
")",
":",
"raise",
"ImproperlyConfigured",
"(",
"\"URL route '%s' cannot contain whitespace in angle brackets \"",
"\"<…>.\" %",
"o",
"iginal_route",
")",
"parts",
".",
"append",
"(",
"re",
".",
"escape",
"(",
"route",
"[",
":",
"match",
".",
"start",
"(",
")",
"]",
")",
")",
"route",
"=",
"route",
"[",
"match",
".",
"end",
"(",
")",
":",
"]",
"parameter",
"=",
"match",
"[",
"'parameter'",
"]",
"if",
"not",
"parameter",
".",
"isidentifier",
"(",
")",
":",
"raise",
"ImproperlyConfigured",
"(",
"\"URL route '%s' uses parameter name %r which isn't a valid \"",
"\"Python identifier.\"",
"%",
"(",
"original_route",
",",
"parameter",
")",
")",
"raw_converter",
"=",
"match",
"[",
"'converter'",
"]",
"if",
"raw_converter",
"is",
"None",
":",
"# If a converter isn't specified, the default is `str`.",
"raw_converter",
"=",
"'str'",
"try",
":",
"converter",
"=",
"get_converter",
"(",
"raw_converter",
")",
"except",
"KeyError",
"as",
"e",
":",
"raise",
"ImproperlyConfigured",
"(",
"'URL route %r uses invalid converter %r.'",
"%",
"(",
"original_route",
",",
"raw_converter",
")",
")",
"from",
"e",
"converters",
"[",
"parameter",
"]",
"=",
"converter",
"parts",
".",
"append",
"(",
"'(?P<'",
"+",
"parameter",
"+",
"'>'",
"+",
"converter",
".",
"regex",
"+",
"')'",
")",
"if",
"is_endpoint",
":",
"parts",
".",
"append",
"(",
"'$'",
")",
"return",
"''",
".",
"join",
"(",
"parts",
")",
",",
"converters"
] | [
204,
0
] | [
247,
37
] | python | en | ['en', 'error', 'th'] | False |
LocaleRegexDescriptor.__get__ | (self, instance, cls=None) |
Return a compiled regular expression based on the active language.
|
Return a compiled regular expression based on the active language.
| def __get__(self, instance, cls=None):
"""
Return a compiled regular expression based on the active language.
"""
if instance is None:
return self
# As a performance optimization, if the given regex string is a regular
# string (not a lazily-translated string proxy), compile it once and
# avoid per-language compilation.
pattern = getattr(instance, self.attr)
if isinstance(pattern, str):
instance.__dict__['regex'] = instance._compile(pattern)
return instance.__dict__['regex']
language_code = get_language()
if language_code not in instance._regex_dict:
instance._regex_dict[language_code] = instance._compile(str(pattern))
return instance._regex_dict[language_code] | [
"def",
"__get__",
"(",
"self",
",",
"instance",
",",
"cls",
"=",
"None",
")",
":",
"if",
"instance",
"is",
"None",
":",
"return",
"self",
"# As a performance optimization, if the given regex string is a regular",
"# string (not a lazily-translated string proxy), compile it once and",
"# avoid per-language compilation.",
"pattern",
"=",
"getattr",
"(",
"instance",
",",
"self",
".",
"attr",
")",
"if",
"isinstance",
"(",
"pattern",
",",
"str",
")",
":",
"instance",
".",
"__dict__",
"[",
"'regex'",
"]",
"=",
"instance",
".",
"_compile",
"(",
"pattern",
")",
"return",
"instance",
".",
"__dict__",
"[",
"'regex'",
"]",
"language_code",
"=",
"get_language",
"(",
")",
"if",
"language_code",
"not",
"in",
"instance",
".",
"_regex_dict",
":",
"instance",
".",
"_regex_dict",
"[",
"language_code",
"]",
"=",
"instance",
".",
"_compile",
"(",
"str",
"(",
"pattern",
")",
")",
"return",
"instance",
".",
"_regex_dict",
"[",
"language_code",
"]"
] | [
93,
4
] | [
109,
50
] | python | en | ['en', 'error', 'th'] | False |
CheckURLMixin.describe | (self) |
Format the URL pattern for display in warning messages.
|
Format the URL pattern for display in warning messages.
| def describe(self):
"""
Format the URL pattern for display in warning messages.
"""
description = "'{}'".format(self)
if self.name:
description += " [name='{}']".format(self.name)
return description | [
"def",
"describe",
"(",
"self",
")",
":",
"description",
"=",
"\"'{}'\"",
".",
"format",
"(",
"self",
")",
"if",
"self",
".",
"name",
":",
"description",
"+=",
"\" [name='{}']\"",
".",
"format",
"(",
"self",
".",
"name",
")",
"return",
"description"
] | [
113,
4
] | [
120,
26
] | python | en | ['en', 'error', 'th'] | False |
CheckURLMixin._check_pattern_startswith_slash | (self) |
Check that the pattern does not begin with a forward slash.
|
Check that the pattern does not begin with a forward slash.
| def _check_pattern_startswith_slash(self):
"""
Check that the pattern does not begin with a forward slash.
"""
regex_pattern = self.regex.pattern
if not settings.APPEND_SLASH:
# Skip check as it can be useful to start a URL pattern with a slash
# when APPEND_SLASH=False.
return []
if regex_pattern.startswith(('/', '^/', '^\\/')) and not regex_pattern.endswith('/'):
warning = Warning(
"Your URL pattern {} has a route beginning with a '/'. Remove this "
"slash as it is unnecessary. If this pattern is targeted in an "
"include(), ensure the include() pattern has a trailing '/'.".format(
self.describe()
),
id="urls.W002",
)
return [warning]
else:
return [] | [
"def",
"_check_pattern_startswith_slash",
"(",
"self",
")",
":",
"regex_pattern",
"=",
"self",
".",
"regex",
".",
"pattern",
"if",
"not",
"settings",
".",
"APPEND_SLASH",
":",
"# Skip check as it can be useful to start a URL pattern with a slash",
"# when APPEND_SLASH=False.",
"return",
"[",
"]",
"if",
"regex_pattern",
".",
"startswith",
"(",
"(",
"'/'",
",",
"'^/'",
",",
"'^\\\\/'",
")",
")",
"and",
"not",
"regex_pattern",
".",
"endswith",
"(",
"'/'",
")",
":",
"warning",
"=",
"Warning",
"(",
"\"Your URL pattern {} has a route beginning with a '/'. Remove this \"",
"\"slash as it is unnecessary. If this pattern is targeted in an \"",
"\"include(), ensure the include() pattern has a trailing '/'.\"",
".",
"format",
"(",
"self",
".",
"describe",
"(",
")",
")",
",",
"id",
"=",
"\"urls.W002\"",
",",
")",
"return",
"[",
"warning",
"]",
"else",
":",
"return",
"[",
"]"
] | [
122,
4
] | [
142,
21
] | python | en | ['en', 'error', 'th'] | False |
RegexPattern._compile | (self, regex) | Compile and return the given regular expression. | Compile and return the given regular expression. | def _compile(self, regex):
"""Compile and return the given regular expression."""
try:
return re.compile(regex)
except re.error as e:
raise ImproperlyConfigured(
'"%s" is not a valid regular expression: %s' % (regex, e)
) from e | [
"def",
"_compile",
"(",
"self",
",",
"regex",
")",
":",
"try",
":",
"return",
"re",
".",
"compile",
"(",
"regex",
")",
"except",
"re",
".",
"error",
"as",
"e",
":",
"raise",
"ImproperlyConfigured",
"(",
"'\"%s\" is not a valid regular expression: %s'",
"%",
"(",
"regex",
",",
"e",
")",
")",
"from",
"e"
] | [
186,
4
] | [
193,
20
] | python | en | ['en', 'en', 'en'] | True |
URLPattern._check_pattern_name | (self) |
Check that the pattern name does not contain a colon.
|
Check that the pattern name does not contain a colon.
| def _check_pattern_name(self):
"""
Check that the pattern name does not contain a colon.
"""
if self.pattern.name is not None and ":" in self.pattern.name:
warning = Warning(
"Your URL pattern {} has a name including a ':'. Remove the colon, to "
"avoid ambiguous namespace references.".format(self.pattern.describe()),
id="urls.W003",
)
return [warning]
else:
return [] | [
"def",
"_check_pattern_name",
"(",
"self",
")",
":",
"if",
"self",
".",
"pattern",
".",
"name",
"is",
"not",
"None",
"and",
"\":\"",
"in",
"self",
".",
"pattern",
".",
"name",
":",
"warning",
"=",
"Warning",
"(",
"\"Your URL pattern {} has a name including a ':'. Remove the colon, to \"",
"\"avoid ambiguous namespace references.\"",
".",
"format",
"(",
"self",
".",
"pattern",
".",
"describe",
"(",
")",
")",
",",
"id",
"=",
"\"urls.W003\"",
",",
")",
"return",
"[",
"warning",
"]",
"else",
":",
"return",
"[",
"]"
] | [
342,
4
] | [
354,
21
] | python | en | ['en', 'error', 'th'] | False |
URLPattern.lookup_str | (self) |
A string that identifies the view (e.g. 'path.to.view_function' or
'path.to.ClassBasedView').
|
A string that identifies the view (e.g. 'path.to.view_function' or
'path.to.ClassBasedView').
| def lookup_str(self):
"""
A string that identifies the view (e.g. 'path.to.view_function' or
'path.to.ClassBasedView').
"""
callback = self.callback
if isinstance(callback, functools.partial):
callback = callback.func
if not hasattr(callback, '__name__'):
return callback.__module__ + "." + callback.__class__.__name__
return callback.__module__ + "." + callback.__qualname__ | [
"def",
"lookup_str",
"(",
"self",
")",
":",
"callback",
"=",
"self",
".",
"callback",
"if",
"isinstance",
"(",
"callback",
",",
"functools",
".",
"partial",
")",
":",
"callback",
"=",
"callback",
".",
"func",
"if",
"not",
"hasattr",
"(",
"callback",
",",
"'__name__'",
")",
":",
"return",
"callback",
".",
"__module__",
"+",
"\".\"",
"+",
"callback",
".",
"__class__",
".",
"__name__",
"return",
"callback",
".",
"__module__",
"+",
"\".\"",
"+",
"callback",
".",
"__qualname__"
] | [
365,
4
] | [
375,
64
] | python | en | ['en', 'error', 'th'] | False |
URLResolver._join_route | (route1, route2) | Join two routes, without the starting ^ in the second route. | Join two routes, without the starting ^ in the second route. | def _join_route(route1, route2):
"""Join two routes, without the starting ^ in the second route."""
if not route1:
return route2
if route2.startswith('^'):
route2 = route2[1:]
return route1 + route2 | [
"def",
"_join_route",
"(",
"route1",
",",
"route2",
")",
":",
"if",
"not",
"route1",
":",
"return",
"route2",
"if",
"route2",
".",
"startswith",
"(",
"'^'",
")",
":",
"route2",
"=",
"route2",
"[",
"1",
":",
"]",
"return",
"route1",
"+",
"route2"
] | [
536,
4
] | [
542,
30
] | python | en | ['en', 'en', 'en'] | True |
KohlschuetterFeatures.fit | (self, blocks, y=None) |
This method returns the current instance unchanged, since no fitting is
required for this ``Feature``. It's here only for API consistency.
|
This method returns the current instance unchanged, since no fitting is
required for this ``Feature``. It's here only for API consistency.
| def fit(self, blocks, y=None):
"""
This method returns the current instance unchanged, since no fitting is
required for this ``Feature``. It's here only for API consistency.
"""
return self | [
"def",
"fit",
"(",
"self",
",",
"blocks",
",",
"y",
"=",
"None",
")",
":",
"return",
"self"
] | [
19,
4
] | [
24,
19
] | python | en | ['en', 'error', 'th'] | False |
KohlschuetterFeatures.transform | (self, blocks, y=None) |
Transform an ordered sequence of blocks into a 2D features matrix with
shape (num blocks, num features).
Args:
blocks (List[Block]): as output by :class:`Blockifier.blockify`
y (None): This isn't used, it's only here for API consistency.
Returns:
`np.ndarray`: 2D array of shape (num blocks, 6), where values are
floats corresponding to the link and text densities of
a block and its immediate neighbors in the sequence.
|
Transform an ordered sequence of blocks into a 2D features matrix with
shape (num blocks, num features). | def transform(self, blocks, y=None):
"""
Transform an ordered sequence of blocks into a 2D features matrix with
shape (num blocks, num features).
Args:
blocks (List[Block]): as output by :class:`Blockifier.blockify`
y (None): This isn't used, it's only here for API consistency.
Returns:
`np.ndarray`: 2D array of shape (num blocks, 6), where values are
floats corresponding to the link and text densities of
a block and its immediate neighbors in the sequence.
"""
return make_kohlschuetter_features(blocks) | [
"def",
"transform",
"(",
"self",
",",
"blocks",
",",
"y",
"=",
"None",
")",
":",
"return",
"make_kohlschuetter_features",
"(",
"blocks",
")"
] | [
26,
4
] | [
40,
50
] | python | en | ['en', 'error', 'th'] | False |
OracleOperations.geo_db_type | (self, f) |
Return the geometry database type for Oracle. Unlike other spatial
backends, no stored procedure is necessary and it's the same for all
geometry types.
|
Return the geometry database type for Oracle. Unlike other spatial
backends, no stored procedure is necessary and it's the same for all
geometry types.
| def geo_db_type(self, f):
"""
Return the geometry database type for Oracle. Unlike other spatial
backends, no stored procedure is necessary and it's the same for all
geometry types.
"""
return 'MDSYS.SDO_GEOMETRY' | [
"def",
"geo_db_type",
"(",
"self",
",",
"f",
")",
":",
"return",
"'MDSYS.SDO_GEOMETRY'"
] | [
141,
4
] | [
147,
35
] | python | en | ['en', 'error', 'th'] | False |
OracleOperations.get_distance | (self, f, value, lookup_type) |
Return the distance parameters given the value and the lookup type.
On Oracle, geometry columns with a geodetic coordinate system behave
implicitly like a geography column, and thus meters will be used as
the distance parameter on them.
|
Return the distance parameters given the value and the lookup type.
On Oracle, geometry columns with a geodetic coordinate system behave
implicitly like a geography column, and thus meters will be used as
the distance parameter on them.
| def get_distance(self, f, value, lookup_type):
"""
Return the distance parameters given the value and the lookup type.
On Oracle, geometry columns with a geodetic coordinate system behave
implicitly like a geography column, and thus meters will be used as
the distance parameter on them.
"""
if not value:
return []
value = value[0]
if isinstance(value, Distance):
if f.geodetic(self.connection):
dist_param = value.m
else:
dist_param = getattr(value, Distance.unit_attname(f.units_name(self.connection)))
else:
dist_param = value
# dwithin lookups on Oracle require a special string parameter
# that starts with "distance=".
if lookup_type == 'dwithin':
dist_param = 'distance=%s' % dist_param
return [dist_param] | [
"def",
"get_distance",
"(",
"self",
",",
"f",
",",
"value",
",",
"lookup_type",
")",
":",
"if",
"not",
"value",
":",
"return",
"[",
"]",
"value",
"=",
"value",
"[",
"0",
"]",
"if",
"isinstance",
"(",
"value",
",",
"Distance",
")",
":",
"if",
"f",
".",
"geodetic",
"(",
"self",
".",
"connection",
")",
":",
"dist_param",
"=",
"value",
".",
"m",
"else",
":",
"dist_param",
"=",
"getattr",
"(",
"value",
",",
"Distance",
".",
"unit_attname",
"(",
"f",
".",
"units_name",
"(",
"self",
".",
"connection",
")",
")",
")",
"else",
":",
"dist_param",
"=",
"value",
"# dwithin lookups on Oracle require a special string parameter",
"# that starts with \"distance=\".",
"if",
"lookup_type",
"==",
"'dwithin'",
":",
"dist_param",
"=",
"'distance=%s'",
"%",
"dist_param",
"return",
"[",
"dist_param",
"]"
] | [
149,
4
] | [
172,
27
] | python | en | ['en', 'error', 'th'] | False |
OracleOperations.spatial_aggregate_name | (self, agg_name) |
Return the spatial aggregate SQL name.
|
Return the spatial aggregate SQL name.
| def spatial_aggregate_name(self, agg_name):
"""
Return the spatial aggregate SQL name.
"""
agg_name = 'unionagg' if agg_name.lower() == 'union' else agg_name.lower()
return getattr(self, agg_name) | [
"def",
"spatial_aggregate_name",
"(",
"self",
",",
"agg_name",
")",
":",
"agg_name",
"=",
"'unionagg'",
"if",
"agg_name",
".",
"lower",
"(",
")",
"==",
"'union'",
"else",
"agg_name",
".",
"lower",
"(",
")",
"return",
"getattr",
"(",
"self",
",",
"agg_name",
")"
] | [
179,
4
] | [
184,
38
] | python | en | ['en', 'error', 'th'] | False |
OracleOperations.modify_insert_params | (self, placeholder, params) | Drop out insert parameters for NULL placeholder. Needed for Oracle Spatial
backend due to #10888.
| Drop out insert parameters for NULL placeholder. Needed for Oracle Spatial
backend due to #10888.
| def modify_insert_params(self, placeholder, params):
"""Drop out insert parameters for NULL placeholder. Needed for Oracle Spatial
backend due to #10888.
"""
if placeholder == 'NULL':
return []
return super().modify_insert_params(placeholder, params) | [
"def",
"modify_insert_params",
"(",
"self",
",",
"placeholder",
",",
"params",
")",
":",
"if",
"placeholder",
"==",
"'NULL'",
":",
"return",
"[",
"]",
"return",
"super",
"(",
")",
".",
"modify_insert_params",
"(",
"placeholder",
",",
"params",
")"
] | [
199,
4
] | [
205,
64
] | python | en | ['en', 'en', 'en'] | True |
ChiaManager._wait_for_chiabox_docker | (num_trials: int = 20) | Returns true if the chiabox docker is up.
Otherwise it will keep trying checking the status of the
chiabox docker for num_trials times.
Returns False if the docker container is not up after that
many trials.
| Returns true if the chiabox docker is up.
Otherwise it will keep trying checking the status of the
chiabox docker for num_trials times.
Returns False if the docker container is not up after that
many trials.
| def _wait_for_chiabox_docker(num_trials: int = 20):
"""Returns true if the chiabox docker is up.
Otherwise it will keep trying checking the status of the
chiabox docker for num_trials times.
Returns False if the docker container is not up after that
many trials.
"""
for i in range(num_trials):
if i != 0:
time.sleep(1)
docker_status = check_chiabox_docker_status()
if docker_status == 'running':
return True
logging.info(f'{i + 1}/{num_trials} trials, chiabox docker container status = {docker_status}')
logging.error(f'chiabox docker failed to start')
return False | [
"def",
"_wait_for_chiabox_docker",
"(",
"num_trials",
":",
"int",
"=",
"20",
")",
":",
"for",
"i",
"in",
"range",
"(",
"num_trials",
")",
":",
"if",
"i",
"!=",
"0",
":",
"time",
".",
"sleep",
"(",
"1",
")",
"docker_status",
"=",
"check_chiabox_docker_status",
"(",
")",
"if",
"docker_status",
"==",
"'running'",
":",
"return",
"True",
"logging",
".",
"info",
"(",
"f'{i + 1}/{num_trials} trials, chiabox docker container status = {docker_status}'",
")",
"logging",
".",
"error",
"(",
"f'chiabox docker failed to start'",
")",
"return",
"False"
] | [
120,
4
] | [
135,
20
] | python | en | ['en', 'en', 'en'] | True |
GeoModelAdmin.media | (self) | Injects OpenLayers JavaScript into the admin. | Injects OpenLayers JavaScript into the admin. | def media(self):
"Injects OpenLayers JavaScript into the admin."
return super().media + Media(js=[self.openlayers_url] + self.extra_js) | [
"def",
"media",
"(",
"self",
")",
":",
"return",
"super",
"(",
")",
".",
"media",
"+",
"Media",
"(",
"js",
"=",
"[",
"self",
".",
"openlayers_url",
"]",
"+",
"self",
".",
"extra_js",
")"
] | [
47,
4
] | [
49,
78
] | python | en | ['en', 'en', 'en'] | True |
GeoModelAdmin.formfield_for_dbfield | (self, db_field, request, **kwargs) |
Overloaded from ModelAdmin so that an OpenLayersWidget is used
for viewing/editing 2D GeometryFields (OpenLayers 2 does not support
3D editing).
|
Overloaded from ModelAdmin so that an OpenLayersWidget is used
for viewing/editing 2D GeometryFields (OpenLayers 2 does not support
3D editing).
| def formfield_for_dbfield(self, db_field, request, **kwargs):
"""
Overloaded from ModelAdmin so that an OpenLayersWidget is used
for viewing/editing 2D GeometryFields (OpenLayers 2 does not support
3D editing).
"""
if isinstance(db_field, models.GeometryField) and db_field.dim < 3:
# Setting the widget with the newly defined widget.
kwargs['widget'] = self.get_map_widget(db_field)
return db_field.formfield(**kwargs)
else:
return super().formfield_for_dbfield(db_field, request, **kwargs) | [
"def",
"formfield_for_dbfield",
"(",
"self",
",",
"db_field",
",",
"request",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"isinstance",
"(",
"db_field",
",",
"models",
".",
"GeometryField",
")",
"and",
"db_field",
".",
"dim",
"<",
"3",
":",
"# Setting the widget with the newly defined widget.",
"kwargs",
"[",
"'widget'",
"]",
"=",
"self",
".",
"get_map_widget",
"(",
"db_field",
")",
"return",
"db_field",
".",
"formfield",
"(",
"*",
"*",
"kwargs",
")",
"else",
":",
"return",
"super",
"(",
")",
".",
"formfield_for_dbfield",
"(",
"db_field",
",",
"request",
",",
"*",
"*",
"kwargs",
")"
] | [
51,
4
] | [
62,
77
] | python | en | ['en', 'error', 'th'] | False |
GeoModelAdmin.get_map_widget | (self, db_field) |
Return a subclass of the OpenLayersWidget (or whatever was specified
in the `widget` attribute) using the settings from the attributes set
in this class.
|
Return a subclass of the OpenLayersWidget (or whatever was specified
in the `widget` attribute) using the settings from the attributes set
in this class.
| def get_map_widget(self, db_field):
"""
Return a subclass of the OpenLayersWidget (or whatever was specified
in the `widget` attribute) using the settings from the attributes set
in this class.
"""
is_collection = db_field.geom_type in ('MULTIPOINT', 'MULTILINESTRING', 'MULTIPOLYGON', 'GEOMETRYCOLLECTION')
if is_collection:
if db_field.geom_type == 'GEOMETRYCOLLECTION':
collection_type = 'Any'
else:
collection_type = OGRGeomType(db_field.geom_type.replace('MULTI', ''))
else:
collection_type = 'None'
class OLMap(self.widget):
template_name = self.map_template
geom_type = db_field.geom_type
wms_options = ''
if self.wms_options:
wms_options = ["%s: '%s'" % pair for pair in self.wms_options.items()]
wms_options = ', %s' % ', '.join(wms_options)
params = {
'default_lon': self.default_lon,
'default_lat': self.default_lat,
'default_zoom': self.default_zoom,
'display_wkt': self.debug or self.display_wkt,
'geom_type': OGRGeomType(db_field.geom_type),
'field_name': db_field.name,
'is_collection': is_collection,
'scrollable': self.scrollable,
'layerswitcher': self.layerswitcher,
'collection_type': collection_type,
'is_generic': db_field.geom_type == 'GEOMETRY',
'is_linestring': db_field.geom_type in ('LINESTRING', 'MULTILINESTRING'),
'is_polygon': db_field.geom_type in ('POLYGON', 'MULTIPOLYGON'),
'is_point': db_field.geom_type in ('POINT', 'MULTIPOINT'),
'num_zoom': self.num_zoom,
'max_zoom': self.max_zoom,
'min_zoom': self.min_zoom,
'units': self.units, # likely should get from object
'max_resolution': self.max_resolution,
'max_extent': self.max_extent,
'modifiable': self.modifiable,
'mouse_position': self.mouse_position,
'scale_text': self.scale_text,
'map_width': self.map_width,
'map_height': self.map_height,
'point_zoom': self.point_zoom,
'srid': self.map_srid,
'display_srid': self.display_srid,
'wms_url': self.wms_url,
'wms_layer': self.wms_layer,
'wms_name': self.wms_name,
'wms_options': wms_options,
'debug': self.debug,
}
return OLMap | [
"def",
"get_map_widget",
"(",
"self",
",",
"db_field",
")",
":",
"is_collection",
"=",
"db_field",
".",
"geom_type",
"in",
"(",
"'MULTIPOINT'",
",",
"'MULTILINESTRING'",
",",
"'MULTIPOLYGON'",
",",
"'GEOMETRYCOLLECTION'",
")",
"if",
"is_collection",
":",
"if",
"db_field",
".",
"geom_type",
"==",
"'GEOMETRYCOLLECTION'",
":",
"collection_type",
"=",
"'Any'",
"else",
":",
"collection_type",
"=",
"OGRGeomType",
"(",
"db_field",
".",
"geom_type",
".",
"replace",
"(",
"'MULTI'",
",",
"''",
")",
")",
"else",
":",
"collection_type",
"=",
"'None'",
"class",
"OLMap",
"(",
"self",
".",
"widget",
")",
":",
"template_name",
"=",
"self",
".",
"map_template",
"geom_type",
"=",
"db_field",
".",
"geom_type",
"wms_options",
"=",
"''",
"if",
"self",
".",
"wms_options",
":",
"wms_options",
"=",
"[",
"\"%s: '%s'\"",
"%",
"pair",
"for",
"pair",
"in",
"self",
".",
"wms_options",
".",
"items",
"(",
")",
"]",
"wms_options",
"=",
"', %s'",
"%",
"', '",
".",
"join",
"(",
"wms_options",
")",
"params",
"=",
"{",
"'default_lon'",
":",
"self",
".",
"default_lon",
",",
"'default_lat'",
":",
"self",
".",
"default_lat",
",",
"'default_zoom'",
":",
"self",
".",
"default_zoom",
",",
"'display_wkt'",
":",
"self",
".",
"debug",
"or",
"self",
".",
"display_wkt",
",",
"'geom_type'",
":",
"OGRGeomType",
"(",
"db_field",
".",
"geom_type",
")",
",",
"'field_name'",
":",
"db_field",
".",
"name",
",",
"'is_collection'",
":",
"is_collection",
",",
"'scrollable'",
":",
"self",
".",
"scrollable",
",",
"'layerswitcher'",
":",
"self",
".",
"layerswitcher",
",",
"'collection_type'",
":",
"collection_type",
",",
"'is_generic'",
":",
"db_field",
".",
"geom_type",
"==",
"'GEOMETRY'",
",",
"'is_linestring'",
":",
"db_field",
".",
"geom_type",
"in",
"(",
"'LINESTRING'",
",",
"'MULTILINESTRING'",
")",
",",
"'is_polygon'",
":",
"db_field",
".",
"geom_type",
"in",
"(",
"'POLYGON'",
",",
"'MULTIPOLYGON'",
")",
",",
"'is_point'",
":",
"db_field",
".",
"geom_type",
"in",
"(",
"'POINT'",
",",
"'MULTIPOINT'",
")",
",",
"'num_zoom'",
":",
"self",
".",
"num_zoom",
",",
"'max_zoom'",
":",
"self",
".",
"max_zoom",
",",
"'min_zoom'",
":",
"self",
".",
"min_zoom",
",",
"'units'",
":",
"self",
".",
"units",
",",
"# likely should get from object",
"'max_resolution'",
":",
"self",
".",
"max_resolution",
",",
"'max_extent'",
":",
"self",
".",
"max_extent",
",",
"'modifiable'",
":",
"self",
".",
"modifiable",
",",
"'mouse_position'",
":",
"self",
".",
"mouse_position",
",",
"'scale_text'",
":",
"self",
".",
"scale_text",
",",
"'map_width'",
":",
"self",
".",
"map_width",
",",
"'map_height'",
":",
"self",
".",
"map_height",
",",
"'point_zoom'",
":",
"self",
".",
"point_zoom",
",",
"'srid'",
":",
"self",
".",
"map_srid",
",",
"'display_srid'",
":",
"self",
".",
"display_srid",
",",
"'wms_url'",
":",
"self",
".",
"wms_url",
",",
"'wms_layer'",
":",
"self",
".",
"wms_layer",
",",
"'wms_name'",
":",
"self",
".",
"wms_name",
",",
"'wms_options'",
":",
"wms_options",
",",
"'debug'",
":",
"self",
".",
"debug",
",",
"}",
"return",
"OLMap"
] | [
64,
4
] | [
123,
20
] | python | en | ['en', 'error', 'th'] | False |
unescape | (s) | The inverse of cgi.escape(). | The inverse of cgi.escape(). | def unescape(s):
"""The inverse of cgi.escape()."""
s = s.replace('"', '"').replace('>', '>').replace('<', '<')
return s.replace('&', '&') | [
"def",
"unescape",
"(",
"s",
")",
":",
"s",
"=",
"s",
".",
"replace",
"(",
"'"'",
",",
"'\"'",
")",
".",
"replace",
"(",
"'>'",
",",
"'>'",
")",
".",
"replace",
"(",
"'<'",
",",
"'<'",
")",
"return",
"s",
".",
"replace",
"(",
"'&'",
",",
"'&'",
")"
] | [
13,
0
] | [
16,
34
] | python | en | ['en', 'it', 'en'] | True |
make_request | (url, pool, *, method='GET', headers=None, version='1.1') | Start an HTTP request. Return a Connection. | Start an HTTP request. Return a Connection. | def make_request(url, pool, *, method='GET', headers=None, version='1.1'):
"""Start an HTTP request. Return a Connection."""
parts = urllib.parse.urlparse(url)
assert parts.scheme in ('http', 'https'), repr(url)
ssl = parts.scheme == 'https'
port = parts.port or (443 if ssl else 80)
path = parts.path or '/'
path = '%s?%s' % (path, parts.query) if parts.query else path
logger.warn('* Connecting to %s:%s using %s for %s',
parts.hostname, port, 'ssl' if ssl else 'tcp', url)
conn = yield from pool.get_connection(parts.hostname, port, ssl)
headers = dict(headers) if headers else {} # Must use Cap-Words.
headers.setdefault('User-Agent', 'asyncio-example-crawl/0.0')
headers.setdefault('Host', parts.netloc)
headers.setdefault('Accept', '*/*')
lines = ['%s %s HTTP/%s' % (method, path, version)]
lines.extend('%s: %s' % kv for kv in headers.items())
for line in lines + ['']:
logger.info('> %s', line)
# TODO: close conn if this fails.
conn.writer.write('\r\n'.join(lines + ['', '']).encode('latin-1'))
return conn | [
"def",
"make_request",
"(",
"url",
",",
"pool",
",",
"*",
",",
"method",
"=",
"'GET'",
",",
"headers",
"=",
"None",
",",
"version",
"=",
"'1.1'",
")",
":",
"parts",
"=",
"urllib",
".",
"parse",
".",
"urlparse",
"(",
"url",
")",
"assert",
"parts",
".",
"scheme",
"in",
"(",
"'http'",
",",
"'https'",
")",
",",
"repr",
"(",
"url",
")",
"ssl",
"=",
"parts",
".",
"scheme",
"==",
"'https'",
"port",
"=",
"parts",
".",
"port",
"or",
"(",
"443",
"if",
"ssl",
"else",
"80",
")",
"path",
"=",
"parts",
".",
"path",
"or",
"'/'",
"path",
"=",
"'%s?%s'",
"%",
"(",
"path",
",",
"parts",
".",
"query",
")",
"if",
"parts",
".",
"query",
"else",
"path",
"logger",
".",
"warn",
"(",
"'* Connecting to %s:%s using %s for %s'",
",",
"parts",
".",
"hostname",
",",
"port",
",",
"'ssl'",
"if",
"ssl",
"else",
"'tcp'",
",",
"url",
")",
"conn",
"=",
"yield",
"from",
"pool",
".",
"get_connection",
"(",
"parts",
".",
"hostname",
",",
"port",
",",
"ssl",
")",
"headers",
"=",
"dict",
"(",
"headers",
")",
"if",
"headers",
"else",
"{",
"}",
"# Must use Cap-Words.",
"headers",
".",
"setdefault",
"(",
"'User-Agent'",
",",
"'asyncio-example-crawl/0.0'",
")",
"headers",
".",
"setdefault",
"(",
"'Host'",
",",
"parts",
".",
"netloc",
")",
"headers",
".",
"setdefault",
"(",
"'Accept'",
",",
"'*/*'",
")",
"lines",
"=",
"[",
"'%s %s HTTP/%s'",
"%",
"(",
"method",
",",
"path",
",",
"version",
")",
"]",
"lines",
".",
"extend",
"(",
"'%s: %s'",
"%",
"kv",
"for",
"kv",
"in",
"headers",
".",
"items",
"(",
")",
")",
"for",
"line",
"in",
"lines",
"+",
"[",
"''",
"]",
":",
"logger",
".",
"info",
"(",
"'> %s'",
",",
"line",
")",
"# TODO: close conn if this fails.",
"conn",
".",
"writer",
".",
"write",
"(",
"'\\r\\n'",
".",
"join",
"(",
"lines",
"+",
"[",
"''",
",",
"''",
"]",
")",
".",
"encode",
"(",
"'latin-1'",
")",
")",
"return",
"conn"
] | [
150,
0
] | [
174,
15
] | python | en | ['en', 'lb', 'en'] | True |
read_response | (conn) | Read an HTTP response from a connection. | Read an HTTP response from a connection. | def read_response(conn):
"""Read an HTTP response from a connection."""
@asyncio.coroutine
def getline():
line = (yield from conn.reader.readline()).decode('latin-1').rstrip()
logger.info('< %s', line)
return line
status_line = yield from getline()
status_parts = status_line.split(None, 2)
if len(status_parts) != 3 or not status_parts[1].isdigit():
logger.error('bad status_line %r', status_line)
raise BadStatusLine(status_line)
http_version, status, reason = status_parts
status = int(status)
headers = {}
while True:
header_line = yield from getline()
if not header_line:
break
key, value = header_line.split(':', 1)
# TODO: Continuation lines; multiple header lines per key..
headers[key.lower()] = value.lstrip()
if 'content-length' in headers:
nbytes = int(headers['content-length'])
output = asyncio.StreamReader()
asyncio.async(length_handler(nbytes, conn.reader, output))
elif headers.get('transfer-encoding') == 'chunked':
output = asyncio.StreamReader()
asyncio.async(chunked_handler(conn.reader, output))
else:
output = conn.reader
return http_version[5:], status, reason, headers, output | [
"def",
"read_response",
"(",
"conn",
")",
":",
"@",
"asyncio",
".",
"coroutine",
"def",
"getline",
"(",
")",
":",
"line",
"=",
"(",
"yield",
"from",
"conn",
".",
"reader",
".",
"readline",
"(",
")",
")",
".",
"decode",
"(",
"'latin-1'",
")",
".",
"rstrip",
"(",
")",
"logger",
".",
"info",
"(",
"'< %s'",
",",
"line",
")",
"return",
"line",
"status_line",
"=",
"yield",
"from",
"getline",
"(",
")",
"status_parts",
"=",
"status_line",
".",
"split",
"(",
"None",
",",
"2",
")",
"if",
"len",
"(",
"status_parts",
")",
"!=",
"3",
"or",
"not",
"status_parts",
"[",
"1",
"]",
".",
"isdigit",
"(",
")",
":",
"logger",
".",
"error",
"(",
"'bad status_line %r'",
",",
"status_line",
")",
"raise",
"BadStatusLine",
"(",
"status_line",
")",
"http_version",
",",
"status",
",",
"reason",
"=",
"status_parts",
"status",
"=",
"int",
"(",
"status",
")",
"headers",
"=",
"{",
"}",
"while",
"True",
":",
"header_line",
"=",
"yield",
"from",
"getline",
"(",
")",
"if",
"not",
"header_line",
":",
"break",
"key",
",",
"value",
"=",
"header_line",
".",
"split",
"(",
"':'",
",",
"1",
")",
"# TODO: Continuation lines; multiple header lines per key..",
"headers",
"[",
"key",
".",
"lower",
"(",
")",
"]",
"=",
"value",
".",
"lstrip",
"(",
")",
"if",
"'content-length'",
"in",
"headers",
":",
"nbytes",
"=",
"int",
"(",
"headers",
"[",
"'content-length'",
"]",
")",
"output",
"=",
"asyncio",
".",
"StreamReader",
"(",
")",
"asyncio",
".",
"async",
"(",
"length_handler",
"(",
"nbytes",
",",
"conn",
".",
"reader",
",",
"output",
")",
")",
"elif",
"headers",
".",
"get",
"(",
"'transfer-encoding'",
")",
"==",
"'chunked'",
":",
"output",
"=",
"asyncio",
".",
"StreamReader",
"(",
")",
"asyncio",
".",
"async",
"(",
"chunked_handler",
"(",
"conn",
".",
"reader",
",",
"output",
")",
")",
"else",
":",
"output",
"=",
"conn",
".",
"reader",
"return",
"http_version",
"[",
"5",
":",
"]",
",",
"status",
",",
"reason",
",",
"headers",
",",
"output"
] | [
178,
0
] | [
214,
60
] | python | en | ['en', 'en', 'en'] | True |
length_handler | (nbytes, input, output) | Async handler for reading a body given a Content-Length header. | Async handler for reading a body given a Content-Length header. | def length_handler(nbytes, input, output):
"""Async handler for reading a body given a Content-Length header."""
while nbytes > 0:
buffer = yield from input.read(min(nbytes, 256*1024))
if not buffer:
logger.error('premature end for content-length')
output.set_exception(EOFError())
return
output.feed_data(buffer)
nbytes -= len(buffer)
output.feed_eof() | [
"def",
"length_handler",
"(",
"nbytes",
",",
"input",
",",
"output",
")",
":",
"while",
"nbytes",
">",
"0",
":",
"buffer",
"=",
"yield",
"from",
"input",
".",
"read",
"(",
"min",
"(",
"nbytes",
",",
"256",
"*",
"1024",
")",
")",
"if",
"not",
"buffer",
":",
"logger",
".",
"error",
"(",
"'premature end for content-length'",
")",
"output",
".",
"set_exception",
"(",
"EOFError",
"(",
")",
")",
"return",
"output",
".",
"feed_data",
"(",
"buffer",
")",
"nbytes",
"-=",
"len",
"(",
"buffer",
")",
"output",
".",
"feed_eof",
"(",
")"
] | [
218,
0
] | [
228,
21
] | python | en | ['en', 'gl', 'en'] | True |
chunked_handler | (input, output) | Async handler for reading a body using Transfer-Encoding: chunked. | Async handler for reading a body using Transfer-Encoding: chunked. | def chunked_handler(input, output):
"""Async handler for reading a body using Transfer-Encoding: chunked."""
logger.info('parsing chunked response')
nblocks = 0
nbytes = 0
while True:
size_header = yield from input.readline()
if not size_header:
logger.error('premature end of chunked response')
output.set_exception(EOFError())
return
logger.debug('size_header = %r', size_header)
parts = size_header.split(b';')
size = int(parts[0], 16)
nblocks += 1
nbytes += size
if size:
logger.debug('reading chunk of %r bytes', size)
block = yield from input.readexactly(size)
assert len(block) == size, (len(block), size)
output.feed_data(block)
crlf = yield from input.readline()
assert crlf == b'\r\n', repr(crlf)
if not size:
break
logger.warn('chunked response had %r bytes in %r blocks', nbytes, nblocks)
output.feed_eof() | [
"def",
"chunked_handler",
"(",
"input",
",",
"output",
")",
":",
"logger",
".",
"info",
"(",
"'parsing chunked response'",
")",
"nblocks",
"=",
"0",
"nbytes",
"=",
"0",
"while",
"True",
":",
"size_header",
"=",
"yield",
"from",
"input",
".",
"readline",
"(",
")",
"if",
"not",
"size_header",
":",
"logger",
".",
"error",
"(",
"'premature end of chunked response'",
")",
"output",
".",
"set_exception",
"(",
"EOFError",
"(",
")",
")",
"return",
"logger",
".",
"debug",
"(",
"'size_header = %r'",
",",
"size_header",
")",
"parts",
"=",
"size_header",
".",
"split",
"(",
"b';'",
")",
"size",
"=",
"int",
"(",
"parts",
"[",
"0",
"]",
",",
"16",
")",
"nblocks",
"+=",
"1",
"nbytes",
"+=",
"size",
"if",
"size",
":",
"logger",
".",
"debug",
"(",
"'reading chunk of %r bytes'",
",",
"size",
")",
"block",
"=",
"yield",
"from",
"input",
".",
"readexactly",
"(",
"size",
")",
"assert",
"len",
"(",
"block",
")",
"==",
"size",
",",
"(",
"len",
"(",
"block",
")",
",",
"size",
")",
"output",
".",
"feed_data",
"(",
"block",
")",
"crlf",
"=",
"yield",
"from",
"input",
".",
"readline",
"(",
")",
"assert",
"crlf",
"==",
"b'\\r\\n'",
",",
"repr",
"(",
"crlf",
")",
"if",
"not",
"size",
":",
"break",
"logger",
".",
"warn",
"(",
"'chunked response had %r bytes in %r blocks'",
",",
"nbytes",
",",
"nblocks",
")",
"output",
".",
"feed_eof",
"(",
")"
] | [
232,
0
] | [
258,
21
] | python | en | ['en', 'en', 'en'] | True |
ConnectionPool.close | (self) | Close all connections available for reuse. | Close all connections available for reuse. | def close(self):
"""Close all connections available for reuse."""
for conns in self.connections.values():
for conn in conns:
conn.close()
self.connections.clear()
self.queue.clear() | [
"def",
"close",
"(",
"self",
")",
":",
"for",
"conns",
"in",
"self",
".",
"connections",
".",
"values",
"(",
")",
":",
"for",
"conn",
"in",
"conns",
":",
"conn",
".",
"close",
"(",
")",
"self",
".",
"connections",
".",
"clear",
"(",
")",
"self",
".",
"queue",
".",
"clear",
"(",
")"
] | [
40,
4
] | [
46,
26
] | python | en | ['en', 'en', 'en'] | True |
ConnectionPool.get_connection | (self, host, port, ssl) | Create or reuse a connection. | Create or reuse a connection. | def get_connection(self, host, port, ssl):
"""Create or reuse a connection."""
port = port or (443 if ssl else 80)
try:
ipaddrs = yield from self.loop.getaddrinfo(host, port)
except Exception as exc:
logger.error('Exception %r for (%r, %r)', exc, host, port)
raise
logger.warn('* %s resolves to %s',
host, ', '.join(ip[4][0] for ip in ipaddrs))
# Look for a reusable connection.
for _, _, _, _, (h, p, *_) in ipaddrs:
key = h, p, ssl
conn = None
conns = self.connections.get(key)
while conns:
conn = conns.pop(0)
self.queue.remove(conn)
if not conns:
del self.connections[key]
if conn.stale():
logger.warn('closing stale connection %r', key)
conn.close() # Just in case.
else:
logger.warn('* Reusing pooled connection %r', key)
return conn
# Create a new connection.
conn = Connection(self, host, port, ssl)
yield from conn.connect()
logger.warn('* New connection %r', conn.key)
return conn | [
"def",
"get_connection",
"(",
"self",
",",
"host",
",",
"port",
",",
"ssl",
")",
":",
"port",
"=",
"port",
"or",
"(",
"443",
"if",
"ssl",
"else",
"80",
")",
"try",
":",
"ipaddrs",
"=",
"yield",
"from",
"self",
".",
"loop",
".",
"getaddrinfo",
"(",
"host",
",",
"port",
")",
"except",
"Exception",
"as",
"exc",
":",
"logger",
".",
"error",
"(",
"'Exception %r for (%r, %r)'",
",",
"exc",
",",
"host",
",",
"port",
")",
"raise",
"logger",
".",
"warn",
"(",
"'* %s resolves to %s'",
",",
"host",
",",
"', '",
".",
"join",
"(",
"ip",
"[",
"4",
"]",
"[",
"0",
"]",
"for",
"ip",
"in",
"ipaddrs",
")",
")",
"# Look for a reusable connection.",
"for",
"_",
",",
"_",
",",
"_",
",",
"_",
",",
"(",
"h",
",",
"p",
",",
"*",
"_",
")",
"in",
"ipaddrs",
":",
"key",
"=",
"h",
",",
"p",
",",
"ssl",
"conn",
"=",
"None",
"conns",
"=",
"self",
".",
"connections",
".",
"get",
"(",
"key",
")",
"while",
"conns",
":",
"conn",
"=",
"conns",
".",
"pop",
"(",
"0",
")",
"self",
".",
"queue",
".",
"remove",
"(",
"conn",
")",
"if",
"not",
"conns",
":",
"del",
"self",
".",
"connections",
"[",
"key",
"]",
"if",
"conn",
".",
"stale",
"(",
")",
":",
"logger",
".",
"warn",
"(",
"'closing stale connection %r'",
",",
"key",
")",
"conn",
".",
"close",
"(",
")",
"# Just in case.",
"else",
":",
"logger",
".",
"warn",
"(",
"'* Reusing pooled connection %r'",
",",
"key",
")",
"return",
"conn",
"# Create a new connection.",
"conn",
"=",
"Connection",
"(",
"self",
",",
"host",
",",
"port",
",",
"ssl",
")",
"yield",
"from",
"conn",
".",
"connect",
"(",
")",
"logger",
".",
"warn",
"(",
"'* New connection %r'",
",",
"conn",
".",
"key",
")",
"return",
"conn"
] | [
49,
4
] | [
81,
19
] | python | en | ['en', 'en', 'en'] | True |
ConnectionPool.recycle_connection | (self, conn) | Make a connection available for reuse.
This also prunes the pool if it exceeds the size limits.
| Make a connection available for reuse. | def recycle_connection(self, conn):
"""Make a connection available for reuse.
This also prunes the pool if it exceeds the size limits.
"""
conns = self.connections.setdefault(conn.key, [])
conns.append(conn)
self.queue.append(conn)
if len(conns) > self.max_tasks:
victims = conns # Prune one connection for this key.
elif len(self.queue) > self.max_pool:
victims = self.queue # Prune one connection for any key.
else:
return
for victim in victims:
if victim.stale(): # Prefer pruning the oldest stale connection.
logger.warn('closing stale connection %r', victim.key)
break
else:
victim = victims[0]
logger.warn('closing oldest connection %r', victim.key)
conns = self.connections[victim.key]
conns.remove(victim)
if not conns:
del self.connections[victim.key]
self.queue.remove(victim)
victim.close() | [
"def",
"recycle_connection",
"(",
"self",
",",
"conn",
")",
":",
"conns",
"=",
"self",
".",
"connections",
".",
"setdefault",
"(",
"conn",
".",
"key",
",",
"[",
"]",
")",
"conns",
".",
"append",
"(",
"conn",
")",
"self",
".",
"queue",
".",
"append",
"(",
"conn",
")",
"if",
"len",
"(",
"conns",
")",
">",
"self",
".",
"max_tasks",
":",
"victims",
"=",
"conns",
"# Prune one connection for this key.",
"elif",
"len",
"(",
"self",
".",
"queue",
")",
">",
"self",
".",
"max_pool",
":",
"victims",
"=",
"self",
".",
"queue",
"# Prune one connection for any key.",
"else",
":",
"return",
"for",
"victim",
"in",
"victims",
":",
"if",
"victim",
".",
"stale",
"(",
")",
":",
"# Prefer pruning the oldest stale connection.",
"logger",
".",
"warn",
"(",
"'closing stale connection %r'",
",",
"victim",
".",
"key",
")",
"break",
"else",
":",
"victim",
"=",
"victims",
"[",
"0",
"]",
"logger",
".",
"warn",
"(",
"'closing oldest connection %r'",
",",
"victim",
".",
"key",
")",
"conns",
"=",
"self",
".",
"connections",
"[",
"victim",
".",
"key",
"]",
"conns",
".",
"remove",
"(",
"victim",
")",
"if",
"not",
"conns",
":",
"del",
"self",
".",
"connections",
"[",
"victim",
".",
"key",
"]",
"self",
".",
"queue",
".",
"remove",
"(",
"victim",
")",
"victim",
".",
"close",
"(",
")"
] | [
83,
4
] | [
112,
22
] | python | en | ['en', 'en', 'en'] | True |
Fetcher.fetch | (self) | Attempt to fetch the contents of the URL.
If successful, and the data is HTML, extract further links and
add them to the crawler. Redirects are also added back there.
| Attempt to fetch the contents of the URL. | def fetch(self):
"""Attempt to fetch the contents of the URL.
If successful, and the data is HTML, extract further links and
add them to the crawler. Redirects are also added back there.
"""
while self.tries < self.max_tries:
self.tries += 1
conn = None
try:
conn = yield from make_request(self.url, self.crawler.pool)
_, status, _, headers, output = yield from read_response(conn)
self.status, self.headers = status, headers
self.body = yield from output.read()
h_conn = headers.get('connection', '').lower()
if h_conn != 'close':
conn.close(recycle=True)
conn = None
if self.tries > 1:
logger.warn('try %r for %r success', self.tries, self.url)
break
except (BadStatusLine, OSError) as exc:
self.exceptions.append(exc)
logger.warn('try %r for %r raised %r',
self.tries, self.url, exc)
finally:
if conn is not None:
conn.close()
else:
# We never broke out of the while loop, i.e. all tries failed.
logger.error('no success for %r in %r tries',
self.url, self.max_tries)
return
if status in (300, 301, 302, 303, 307) and headers.get('location'):
next_url = headers['location']
self.next_url = urllib.parse.urljoin(self.url, next_url)
if self.max_redirect > 0:
logger.warn('redirect to %r from %r', self.next_url, self.url)
self.crawler.add_url(self.next_url, self.max_redirect-1)
else:
logger.error('redirect limit reached for %r from %r',
self.next_url, self.url)
else:
if status == 200:
self.ctype = headers.get('content-type')
self.pdict = {}
if self.ctype:
self.ctype, self.pdict = cgi.parse_header(self.ctype)
self.encoding = self.pdict.get('charset', 'utf-8')
if self.ctype == 'text/html':
body = self.body.decode(self.encoding, 'replace')
# Replace href with (?:href|src) to follow image links.
self.urls = set(re.findall(r'(?i)href=["\']?([^\s"\'<>]+)',
body))
if self.urls:
logger.warn('got %r distinct urls from %r',
len(self.urls), self.url)
self.new_urls = set()
for url in self.urls:
url = unescape(url)
url = urllib.parse.urljoin(self.url, url)
url, frag = urllib.parse.urldefrag(url)
if self.crawler.add_url(url):
self.new_urls.add(url) | [
"def",
"fetch",
"(",
"self",
")",
":",
"while",
"self",
".",
"tries",
"<",
"self",
".",
"max_tries",
":",
"self",
".",
"tries",
"+=",
"1",
"conn",
"=",
"None",
"try",
":",
"conn",
"=",
"yield",
"from",
"make_request",
"(",
"self",
".",
"url",
",",
"self",
".",
"crawler",
".",
"pool",
")",
"_",
",",
"status",
",",
"_",
",",
"headers",
",",
"output",
"=",
"yield",
"from",
"read_response",
"(",
"conn",
")",
"self",
".",
"status",
",",
"self",
".",
"headers",
"=",
"status",
",",
"headers",
"self",
".",
"body",
"=",
"yield",
"from",
"output",
".",
"read",
"(",
")",
"h_conn",
"=",
"headers",
".",
"get",
"(",
"'connection'",
",",
"''",
")",
".",
"lower",
"(",
")",
"if",
"h_conn",
"!=",
"'close'",
":",
"conn",
".",
"close",
"(",
"recycle",
"=",
"True",
")",
"conn",
"=",
"None",
"if",
"self",
".",
"tries",
">",
"1",
":",
"logger",
".",
"warn",
"(",
"'try %r for %r success'",
",",
"self",
".",
"tries",
",",
"self",
".",
"url",
")",
"break",
"except",
"(",
"BadStatusLine",
",",
"OSError",
")",
"as",
"exc",
":",
"self",
".",
"exceptions",
".",
"append",
"(",
"exc",
")",
"logger",
".",
"warn",
"(",
"'try %r for %r raised %r'",
",",
"self",
".",
"tries",
",",
"self",
".",
"url",
",",
"exc",
")",
"finally",
":",
"if",
"conn",
"is",
"not",
"None",
":",
"conn",
".",
"close",
"(",
")",
"else",
":",
"# We never broke out of the while loop, i.e. all tries failed.",
"logger",
".",
"error",
"(",
"'no success for %r in %r tries'",
",",
"self",
".",
"url",
",",
"self",
".",
"max_tries",
")",
"return",
"if",
"status",
"in",
"(",
"300",
",",
"301",
",",
"302",
",",
"303",
",",
"307",
")",
"and",
"headers",
".",
"get",
"(",
"'location'",
")",
":",
"next_url",
"=",
"headers",
"[",
"'location'",
"]",
"self",
".",
"next_url",
"=",
"urllib",
".",
"parse",
".",
"urljoin",
"(",
"self",
".",
"url",
",",
"next_url",
")",
"if",
"self",
".",
"max_redirect",
">",
"0",
":",
"logger",
".",
"warn",
"(",
"'redirect to %r from %r'",
",",
"self",
".",
"next_url",
",",
"self",
".",
"url",
")",
"self",
".",
"crawler",
".",
"add_url",
"(",
"self",
".",
"next_url",
",",
"self",
".",
"max_redirect",
"-",
"1",
")",
"else",
":",
"logger",
".",
"error",
"(",
"'redirect limit reached for %r from %r'",
",",
"self",
".",
"next_url",
",",
"self",
".",
"url",
")",
"else",
":",
"if",
"status",
"==",
"200",
":",
"self",
".",
"ctype",
"=",
"headers",
".",
"get",
"(",
"'content-type'",
")",
"self",
".",
"pdict",
"=",
"{",
"}",
"if",
"self",
".",
"ctype",
":",
"self",
".",
"ctype",
",",
"self",
".",
"pdict",
"=",
"cgi",
".",
"parse_header",
"(",
"self",
".",
"ctype",
")",
"self",
".",
"encoding",
"=",
"self",
".",
"pdict",
".",
"get",
"(",
"'charset'",
",",
"'utf-8'",
")",
"if",
"self",
".",
"ctype",
"==",
"'text/html'",
":",
"body",
"=",
"self",
".",
"body",
".",
"decode",
"(",
"self",
".",
"encoding",
",",
"'replace'",
")",
"# Replace href with (?:href|src) to follow image links.",
"self",
".",
"urls",
"=",
"set",
"(",
"re",
".",
"findall",
"(",
"r'(?i)href=[\"\\']?([^\\s\"\\'<>]+)'",
",",
"body",
")",
")",
"if",
"self",
".",
"urls",
":",
"logger",
".",
"warn",
"(",
"'got %r distinct urls from %r'",
",",
"len",
"(",
"self",
".",
"urls",
")",
",",
"self",
".",
"url",
")",
"self",
".",
"new_urls",
"=",
"set",
"(",
")",
"for",
"url",
"in",
"self",
".",
"urls",
":",
"url",
"=",
"unescape",
"(",
"url",
")",
"url",
"=",
"urllib",
".",
"parse",
".",
"urljoin",
"(",
"self",
".",
"url",
",",
"url",
")",
"url",
",",
"frag",
"=",
"urllib",
".",
"parse",
".",
"urldefrag",
"(",
"url",
")",
"if",
"self",
".",
"crawler",
".",
"add_url",
"(",
"url",
")",
":",
"self",
".",
"new_urls",
".",
"add",
"(",
"url",
")"
] | [
299,
4
] | [
362,
50
] | python | en | ['en', 'en', 'en'] | True |
Crawler.close | (self) | Close resources (currently only the pool). | Close resources (currently only the pool). | def close(self):
"""Close resources (currently only the pool)."""
self.pool.close() | [
"def",
"close",
"(",
"self",
")",
":",
"self",
".",
"pool",
".",
"close",
"(",
")"
] | [
410,
4
] | [
412,
25
] | python | en | ['en', 'en', 'en'] | True |
Crawler.host_okay | (self, host) | Check if a host should be crawled.
A literal match (after lowercasing) is always good. For hosts
that don't look like IP addresses, some approximate matches
are okay depending on the strict flag.
| Check if a host should be crawled. | def host_okay(self, host):
"""Check if a host should be crawled.
A literal match (after lowercasing) is always good. For hosts
that don't look like IP addresses, some approximate matches
are okay depending on the strict flag.
"""
host = host.lower()
if host in self.root_domains:
return True
if re.match(r'\A[\d\.]*\Z', host):
return False
if self.strict:
return self._host_okay_strictish(host)
else:
return self._host_okay_lenient(host) | [
"def",
"host_okay",
"(",
"self",
",",
"host",
")",
":",
"host",
"=",
"host",
".",
"lower",
"(",
")",
"if",
"host",
"in",
"self",
".",
"root_domains",
":",
"return",
"True",
"if",
"re",
".",
"match",
"(",
"r'\\A[\\d\\.]*\\Z'",
",",
"host",
")",
":",
"return",
"False",
"if",
"self",
".",
"strict",
":",
"return",
"self",
".",
"_host_okay_strictish",
"(",
"host",
")",
"else",
":",
"return",
"self",
".",
"_host_okay_lenient",
"(",
"host",
")"
] | [
414,
4
] | [
429,
48
] | python | en | ['en', 'en', 'en'] | True |
Crawler._host_okay_strictish | (self, host) | Check if a host should be crawled, strict-ish version.
This checks for equality modulo an initial 'www.' component.
| Check if a host should be crawled, strict-ish version. | def _host_okay_strictish(self, host):
"""Check if a host should be crawled, strict-ish version.
This checks for equality modulo an initial 'www.' component.
"""
host = host[4:] if host.startswith('www.') else 'www.' + host
return host in self.root_domains | [
"def",
"_host_okay_strictish",
"(",
"self",
",",
"host",
")",
":",
"host",
"=",
"host",
"[",
"4",
":",
"]",
"if",
"host",
".",
"startswith",
"(",
"'www.'",
")",
"else",
"'www.'",
"+",
"host",
"return",
"host",
"in",
"self",
".",
"root_domains"
] | [
431,
4
] | [
437,
40
] | python | en | ['en', 'en', 'en'] | True |
Crawler._host_okay_lenient | (self, host) | Check if a host should be crawled, lenient version.
This compares the last two components of the host.
| Check if a host should be crawled, lenient version. | def _host_okay_lenient(self, host):
"""Check if a host should be crawled, lenient version.
This compares the last two components of the host.
"""
return host in self.root_domains | [
"def",
"_host_okay_lenient",
"(",
"self",
",",
"host",
")",
":",
"return",
"host",
"in",
"self",
".",
"root_domains"
] | [
439,
4
] | [
444,
40
] | python | en | ['en', 'en', 'en'] | True |
Crawler.add_url | (self, url, max_redirect=None) | Add a URL to the todo list if not seen before. | Add a URL to the todo list if not seen before. | def add_url(self, url, max_redirect=None):
"""Add a URL to the todo list if not seen before."""
if self.exclude and re.search(self.exclude, url):
return False
parts = urllib.parse.urlparse(url)
if parts.scheme not in ('http', 'https'):
logger.info('skipping non-http scheme in %r', url)
return False
host, port = urllib.parse.splitport(parts.netloc)
if not self.host_okay(host):
logger.info('skipping non-root host in %r', url)
return False
if max_redirect is None:
max_redirect = self.max_redirect
if url in self.todo or url in self.busy or url in self.done:
return False
logger.warn('adding %r %r', url, max_redirect)
self.todo[url] = max_redirect
return True | [
"def",
"add_url",
"(",
"self",
",",
"url",
",",
"max_redirect",
"=",
"None",
")",
":",
"if",
"self",
".",
"exclude",
"and",
"re",
".",
"search",
"(",
"self",
".",
"exclude",
",",
"url",
")",
":",
"return",
"False",
"parts",
"=",
"urllib",
".",
"parse",
".",
"urlparse",
"(",
"url",
")",
"if",
"parts",
".",
"scheme",
"not",
"in",
"(",
"'http'",
",",
"'https'",
")",
":",
"logger",
".",
"info",
"(",
"'skipping non-http scheme in %r'",
",",
"url",
")",
"return",
"False",
"host",
",",
"port",
"=",
"urllib",
".",
"parse",
".",
"splitport",
"(",
"parts",
".",
"netloc",
")",
"if",
"not",
"self",
".",
"host_okay",
"(",
"host",
")",
":",
"logger",
".",
"info",
"(",
"'skipping non-root host in %r'",
",",
"url",
")",
"return",
"False",
"if",
"max_redirect",
"is",
"None",
":",
"max_redirect",
"=",
"self",
".",
"max_redirect",
"if",
"url",
"in",
"self",
".",
"todo",
"or",
"url",
"in",
"self",
".",
"busy",
"or",
"url",
"in",
"self",
".",
"done",
":",
"return",
"False",
"logger",
".",
"warn",
"(",
"'adding %r %r'",
",",
"url",
",",
"max_redirect",
")",
"self",
".",
"todo",
"[",
"url",
"]",
"=",
"max_redirect",
"return",
"True"
] | [
446,
4
] | [
464,
19
] | python | en | ['en', 'en', 'en'] | True |
Crawler.crawl | (self) | Run the crawler until all finished. | Run the crawler until all finished. | def crawl(self):
"""Run the crawler until all finished."""
with (yield from self.termination):
while self.todo or self.busy:
if self.todo:
url, max_redirect = self.todo.popitem()
fetcher = Fetcher(url,
crawler=self,
max_redirect=max_redirect,
max_tries=self.max_tries,
)
self.busy[url] = fetcher
fetcher.task = asyncio.Task(self.fetch(fetcher))
else:
yield from self.termination.wait()
self.t1 = time.time() | [
"def",
"crawl",
"(",
"self",
")",
":",
"with",
"(",
"yield",
"from",
"self",
".",
"termination",
")",
":",
"while",
"self",
".",
"todo",
"or",
"self",
".",
"busy",
":",
"if",
"self",
".",
"todo",
":",
"url",
",",
"max_redirect",
"=",
"self",
".",
"todo",
".",
"popitem",
"(",
")",
"fetcher",
"=",
"Fetcher",
"(",
"url",
",",
"crawler",
"=",
"self",
",",
"max_redirect",
"=",
"max_redirect",
",",
"max_tries",
"=",
"self",
".",
"max_tries",
",",
")",
"self",
".",
"busy",
"[",
"url",
"]",
"=",
"fetcher",
"fetcher",
".",
"task",
"=",
"asyncio",
".",
"Task",
"(",
"self",
".",
"fetch",
"(",
"fetcher",
")",
")",
"else",
":",
"yield",
"from",
"self",
".",
"termination",
".",
"wait",
"(",
")",
"self",
".",
"t1",
"=",
"time",
".",
"time",
"(",
")"
] | [
467,
4
] | [
482,
29
] | python | en | ['en', 'en', 'en'] | True |
Crawler.fetch | (self, fetcher) | Call the Fetcher's fetch(), with a limit on concurrency.
Once this returns, move the fetcher from busy to done.
| Call the Fetcher's fetch(), with a limit on concurrency. | def fetch(self, fetcher):
"""Call the Fetcher's fetch(), with a limit on concurrency.
Once this returns, move the fetcher from busy to done.
"""
url = fetcher.url
with (yield from self.governor):
try:
yield from fetcher.fetch() # Fetcher gonna fetch.
finally:
# Force GC of the task, so the error is logged.
fetcher.task = None
with (yield from self.termination):
self.done[url] = fetcher
del self.busy[url]
self.termination.notify() | [
"def",
"fetch",
"(",
"self",
",",
"fetcher",
")",
":",
"url",
"=",
"fetcher",
".",
"url",
"with",
"(",
"yield",
"from",
"self",
".",
"governor",
")",
":",
"try",
":",
"yield",
"from",
"fetcher",
".",
"fetch",
"(",
")",
"# Fetcher gonna fetch.",
"finally",
":",
"# Force GC of the task, so the error is logged.",
"fetcher",
".",
"task",
"=",
"None",
"with",
"(",
"yield",
"from",
"self",
".",
"termination",
")",
":",
"self",
".",
"done",
"[",
"url",
"]",
"=",
"fetcher",
"del",
"self",
".",
"busy",
"[",
"url",
"]",
"self",
".",
"termination",
".",
"notify",
"(",
")"
] | [
485,
4
] | [
500,
37
] | python | en | ['en', 'en', 'en'] | True |
finalize_featurized_objects | (featurized_objects: np.ndarray,
shift_direction=PositionShift.TO_CENTER_OF_MASS
) | Processes featurized objects returned by simulator.
Args:
shift_direction: Either PositionShift.TO_CENTER_OF_MASS or
PositionShift.FROM_CENTER_OF_MASS representing which direction
to shift position of jar objects. Default is
PositionShift.TO_CENTER_OF_MASS representing the processing done
on the array returned by the simulator.
The features are by index:
- 0: x in pixels of center of mass divided by SCENE_WIDTH
- 1: y in pixels of center of mass divided by SCENE_HEIGHT
- 2: angle of the object between 0 and 2pi divided by 2pi
- 3: diameter in pixels of object divided by SCENE_WIDTH
- 4-8: One hot encoding of the object shape, according to order:
ball, bar, jar, standing sticks
- 8-14: One hot encoding of object color, according to order:
red, green, blue, purple, gray, black
| Processes featurized objects returned by simulator.
Args:
shift_direction: Either PositionShift.TO_CENTER_OF_MASS or
PositionShift.FROM_CENTER_OF_MASS representing which direction
to shift position of jar objects. Default is
PositionShift.TO_CENTER_OF_MASS representing the processing done
on the array returned by the simulator. | def finalize_featurized_objects(featurized_objects: np.ndarray,
shift_direction=PositionShift.TO_CENTER_OF_MASS
) -> np.ndarray:
assert isinstance(shift_direction, PositionShift), shift_direction
"""Processes featurized objects returned by simulator.
Args:
shift_direction: Either PositionShift.TO_CENTER_OF_MASS or
PositionShift.FROM_CENTER_OF_MASS representing which direction
to shift position of jar objects. Default is
PositionShift.TO_CENTER_OF_MASS representing the processing done
on the array returned by the simulator.
The features are by index:
- 0: x in pixels of center of mass divided by SCENE_WIDTH
- 1: y in pixels of center of mass divided by SCENE_HEIGHT
- 2: angle of the object between 0 and 2pi divided by 2pi
- 3: diameter in pixels of object divided by SCENE_WIDTH
- 4-8: One hot encoding of the object shape, according to order:
ball, bar, jar, standing sticks
- 8-14: One hot encoding of object color, according to order:
red, green, blue, purple, gray, black
"""
featurized_objects = np.copy(featurized_objects)
direction = 1.0 if shift_direction == PositionShift.TO_CENTER_OF_MASS else -1.0
is_jar = featurized_objects[:, :, FeaturizedObjects._SHAPE_START_INDEX +
scene_if.ShapeType.JAR - 1] == 1
if featurized_objects[is_jar].shape[0] > 0:
offsets = np.apply_along_axis(_get_jar_offset, 1,
featurized_objects[0, :, :][is_jar[0, :]])
offsets_expanded = np.concatenate([offsets] *
featurized_objects.shape[0],
axis=0)
angles = featurized_objects[is_jar][:, FeaturizedObjects.
_ANGLE_INDEX] * 2 * math.pi
directional_offsets = np.stack(
[
-1 * offsets_expanded * np.sin(angles),
offsets_expanded * np.cos(angles)
],
axis=-1) / constants.SCENE_WIDTH * direction
featurized_objects[is_jar, :FeaturizedObjects.
_ANGLE_INDEX] += directional_offsets
return featurized_objects | [
"def",
"finalize_featurized_objects",
"(",
"featurized_objects",
":",
"np",
".",
"ndarray",
",",
"shift_direction",
"=",
"PositionShift",
".",
"TO_CENTER_OF_MASS",
")",
"->",
"np",
".",
"ndarray",
":",
"assert",
"isinstance",
"(",
"shift_direction",
",",
"PositionShift",
")",
",",
"shift_direction",
"featurized_objects",
"=",
"np",
".",
"copy",
"(",
"featurized_objects",
")",
"direction",
"=",
"1.0",
"if",
"shift_direction",
"==",
"PositionShift",
".",
"TO_CENTER_OF_MASS",
"else",
"-",
"1.0",
"is_jar",
"=",
"featurized_objects",
"[",
":",
",",
":",
",",
"FeaturizedObjects",
".",
"_SHAPE_START_INDEX",
"+",
"scene_if",
".",
"ShapeType",
".",
"JAR",
"-",
"1",
"]",
"==",
"1",
"if",
"featurized_objects",
"[",
"is_jar",
"]",
".",
"shape",
"[",
"0",
"]",
">",
"0",
":",
"offsets",
"=",
"np",
".",
"apply_along_axis",
"(",
"_get_jar_offset",
",",
"1",
",",
"featurized_objects",
"[",
"0",
",",
":",
",",
":",
"]",
"[",
"is_jar",
"[",
"0",
",",
":",
"]",
"]",
")",
"offsets_expanded",
"=",
"np",
".",
"concatenate",
"(",
"[",
"offsets",
"]",
"*",
"featurized_objects",
".",
"shape",
"[",
"0",
"]",
",",
"axis",
"=",
"0",
")",
"angles",
"=",
"featurized_objects",
"[",
"is_jar",
"]",
"[",
":",
",",
"FeaturizedObjects",
".",
"_ANGLE_INDEX",
"]",
"*",
"2",
"*",
"math",
".",
"pi",
"directional_offsets",
"=",
"np",
".",
"stack",
"(",
"[",
"-",
"1",
"*",
"offsets_expanded",
"*",
"np",
".",
"sin",
"(",
"angles",
")",
",",
"offsets_expanded",
"*",
"np",
".",
"cos",
"(",
"angles",
")",
"]",
",",
"axis",
"=",
"-",
"1",
")",
"/",
"constants",
".",
"SCENE_WIDTH",
"*",
"direction",
"featurized_objects",
"[",
"is_jar",
",",
":",
"FeaturizedObjects",
".",
"_ANGLE_INDEX",
"]",
"+=",
"directional_offsets",
"return",
"featurized_objects"
] | [
44,
0
] | [
87,
29
] | python | en | ['en', 'en', 'en'] | True |
FeaturizedObjects.num_objects | (self) | Number of objects in the scene. | Number of objects in the scene. | def num_objects(self) -> int:
"""Number of objects in the scene."""
return self.features.shape[1] | [
"def",
"num_objects",
"(",
"self",
")",
"->",
"int",
":",
"return",
"self",
".",
"features",
".",
"shape",
"[",
"1",
"]"
] | [
217,
4
] | [
219,
37
] | python | en | ['en', 'en', 'en'] | True |
FeaturizedObjects.num_scene_objects | (self) | Number of scene objects in the scene. | Number of scene objects in the scene. | def num_scene_objects(self) -> int:
"""Number of scene objects in the scene."""
return self.num_objects - self.num_user_inputs | [
"def",
"num_scene_objects",
"(",
"self",
")",
"->",
"int",
":",
"return",
"self",
".",
"num_objects",
"-",
"self",
".",
"num_user_inputs"
] | [
231,
4
] | [
233,
54
] | python | en | ['en', 'en', 'en'] | True |
voice | () | Respond to incoming phone calls and mention the caller's city | Respond to incoming phone calls and mention the caller's city | def voice():
"""Respond to incoming phone calls and mention the caller's city"""
# Get the caller's city from Twilio's request to our app
city = request.values['FromCity']
# Start our TwiML response
resp = VoiceResponse()
# Read a message aloud to the caller
resp.say('Never gonna give you up, {}!'.format(city), voice='alice')
# Play an audio file for the caller
resp.play('https://demo.twilio.com/docs/classic.mp3')
return str(resp) | [
"def",
"voice",
"(",
")",
":",
"# Get the caller's city from Twilio's request to our app",
"city",
"=",
"request",
".",
"values",
"[",
"'FromCity'",
"]",
"# Start our TwiML response",
"resp",
"=",
"VoiceResponse",
"(",
")",
"# Read a message aloud to the caller",
"resp",
".",
"say",
"(",
"'Never gonna give you up, {}!'",
".",
"format",
"(",
"city",
")",
",",
"voice",
"=",
"'alice'",
")",
"# Play an audio file for the caller",
"resp",
".",
"play",
"(",
"'https://demo.twilio.com/docs/classic.mp3'",
")",
"return",
"str",
"(",
"resp",
")"
] | [
7,
0
] | [
21,
20
] | python | en | ['en', 'en', 'en'] | True |
open | (fp, mode="r") |
Load texture from a GD image file.
:param filename: GD file name, or an opened file handle.
:param mode: Optional mode. In this version, if the mode argument
is given, it must be "r".
:returns: An image instance.
:raises OSError: If the image could not be read.
|
Load texture from a GD image file. | def open(fp, mode="r"):
"""
Load texture from a GD image file.
:param filename: GD file name, or an opened file handle.
:param mode: Optional mode. In this version, if the mode argument
is given, it must be "r".
:returns: An image instance.
:raises OSError: If the image could not be read.
"""
if mode != "r":
raise ValueError("bad mode")
try:
return GdImageFile(fp)
except SyntaxError as e:
raise UnidentifiedImageError("cannot identify this image file") from e | [
"def",
"open",
"(",
"fp",
",",
"mode",
"=",
"\"r\"",
")",
":",
"if",
"mode",
"!=",
"\"r\"",
":",
"raise",
"ValueError",
"(",
"\"bad mode\"",
")",
"try",
":",
"return",
"GdImageFile",
"(",
"fp",
")",
"except",
"SyntaxError",
"as",
"e",
":",
"raise",
"UnidentifiedImageError",
"(",
"\"cannot identify this image file\"",
")",
"from",
"e"
] | [
73,
0
] | [
89,
78
] | python | en | ['en', 'error', 'th'] | False |
is_authenticated | (request) | whether or not request user is authenticated or not | whether or not request user is authenticated or not | def is_authenticated(request):
""" whether or not request user is authenticated or not """
return request.user and request.user.is_authenticated | [
"def",
"is_authenticated",
"(",
"request",
")",
":",
"return",
"request",
".",
"user",
"and",
"request",
".",
"user",
".",
"is_authenticated"
] | [
4,
0
] | [
6,
57
] | python | en | ['en', 'en', 'en'] | True |
backoff | (attempts) | Return a backoff delay, in seconds, given a number of attempts.
The delay increases very rapidly with the number of attempts:
1, 2, 4, 8, 16, 32, ... seconds
| Return a backoff delay, in seconds, given a number of attempts. | def backoff(attempts):
"""Return a backoff delay, in seconds, given a number of attempts.
The delay increases very rapidly with the number of attempts:
1, 2, 4, 8, 16, 32, ... seconds
"""
return 2 ** attempts | [
"def",
"backoff",
"(",
"attempts",
")",
":",
"return",
"2",
"**",
"attempts"
] | [
32,
0
] | [
39,
24
] | python | en | ['en', 'en', 'en'] | True |
ChoiceBase.to_list | (cls) | return a list of (name, value) which can be used as choice field in models | return a list of (name, value) which can be used as choice field in models | def to_list(cls):
""" return a list of (name, value) which can be used as choice field in models"""
return [(d.value, d.name) for d in cls] | [
"def",
"to_list",
"(",
"cls",
")",
":",
"return",
"[",
"(",
"d",
".",
"value",
",",
"d",
".",
"name",
")",
"for",
"d",
"in",
"cls",
"]"
] | [
21,
4
] | [
23,
47
] | python | en | ['en', 'en', 'en'] | True |
decoder | (conv_func) |
Convert bytestrings from Python's sqlite3 interface to a regular string.
|
Convert bytestrings from Python's sqlite3 interface to a regular string.
| def decoder(conv_func):
"""
Convert bytestrings from Python's sqlite3 interface to a regular string.
"""
return lambda s: conv_func(s.decode()) | [
"def",
"decoder",
"(",
"conv_func",
")",
":",
"return",
"lambda",
"s",
":",
"conv_func",
"(",
"s",
".",
"decode",
"(",
")",
")"
] | [
37,
0
] | [
41,
42
] | python | en | ['en', 'error', 'th'] | False |
none_guard | (func) |
Decorator that returns None if any of the arguments to the decorated
function are None. Many SQL functions return NULL if any of their arguments
are NULL. This decorator simplifies the implementation of this for the
custom functions registered below.
|
Decorator that returns None if any of the arguments to the decorated
function are None. Many SQL functions return NULL if any of their arguments
are NULL. This decorator simplifies the implementation of this for the
custom functions registered below.
| def none_guard(func):
"""
Decorator that returns None if any of the arguments to the decorated
function are None. Many SQL functions return NULL if any of their arguments
are NULL. This decorator simplifies the implementation of this for the
custom functions registered below.
"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
return None if None in args else func(*args, **kwargs)
return wrapper | [
"def",
"none_guard",
"(",
"func",
")",
":",
"@",
"functools",
".",
"wraps",
"(",
"func",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"None",
"if",
"None",
"in",
"args",
"else",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"wrapper"
] | [
44,
0
] | [
54,
18
] | python | en | ['en', 'error', 'th'] | False |
list_aggregate | (function) |
Return an aggregate class that accumulates values in a list and applies
the provided function to the data.
|
Return an aggregate class that accumulates values in a list and applies
the provided function to the data.
| def list_aggregate(function):
"""
Return an aggregate class that accumulates values in a list and applies
the provided function to the data.
"""
return type('ListAggregate', (list,), {'finalize': function, 'step': list.append}) | [
"def",
"list_aggregate",
"(",
"function",
")",
":",
"return",
"type",
"(",
"'ListAggregate'",
",",
"(",
"list",
",",
")",
",",
"{",
"'finalize'",
":",
"function",
",",
"'step'",
":",
"list",
".",
"append",
"}",
")"
] | [
57,
0
] | [
62,
86
] | python | en | ['en', 'error', 'th'] | False |
_sqlite_format_dtdelta | (conn, lhs, rhs) |
LHS and RHS can be either:
- An integer number of microseconds
- A string representing a datetime
|
LHS and RHS can be either:
- An integer number of microseconds
- A string representing a datetime
| def _sqlite_format_dtdelta(conn, lhs, rhs):
"""
LHS and RHS can be either:
- An integer number of microseconds
- A string representing a datetime
"""
try:
real_lhs = datetime.timedelta(0, 0, lhs) if isinstance(lhs, int) else backend_utils.typecast_timestamp(lhs)
real_rhs = datetime.timedelta(0, 0, rhs) if isinstance(rhs, int) else backend_utils.typecast_timestamp(rhs)
if conn.strip() == '+':
out = real_lhs + real_rhs
else:
out = real_lhs - real_rhs
except (ValueError, TypeError):
return None
# typecast_timestamp returns a date or a datetime without timezone.
# It will be formatted as "%Y-%m-%d" or "%Y-%m-%d %H:%M:%S[.%f]"
return str(out) | [
"def",
"_sqlite_format_dtdelta",
"(",
"conn",
",",
"lhs",
",",
"rhs",
")",
":",
"try",
":",
"real_lhs",
"=",
"datetime",
".",
"timedelta",
"(",
"0",
",",
"0",
",",
"lhs",
")",
"if",
"isinstance",
"(",
"lhs",
",",
"int",
")",
"else",
"backend_utils",
".",
"typecast_timestamp",
"(",
"lhs",
")",
"real_rhs",
"=",
"datetime",
".",
"timedelta",
"(",
"0",
",",
"0",
",",
"rhs",
")",
"if",
"isinstance",
"(",
"rhs",
",",
"int",
")",
"else",
"backend_utils",
".",
"typecast_timestamp",
"(",
"rhs",
")",
"if",
"conn",
".",
"strip",
"(",
")",
"==",
"'+'",
":",
"out",
"=",
"real_lhs",
"+",
"real_rhs",
"else",
":",
"out",
"=",
"real_lhs",
"-",
"real_rhs",
"except",
"(",
"ValueError",
",",
"TypeError",
")",
":",
"return",
"None",
"# typecast_timestamp returns a date or a datetime without timezone.",
"# It will be formatted as \"%Y-%m-%d\" or \"%Y-%m-%d %H:%M:%S[.%f]\"",
"return",
"str",
"(",
"out",
")"
] | [
558,
0
] | [
575,
19
] | python | en | ['en', 'error', 'th'] | False |
DatabaseWrapper.check_constraints | (self, table_names=None) |
Check each table name in `table_names` for rows with invalid foreign
key references. This method is intended to be used in conjunction with
`disable_constraint_checking()` and `enable_constraint_checking()`, to
determine if rows with invalid references were entered while constraint
checks were off.
|
Check each table name in `table_names` for rows with invalid foreign
key references. This method is intended to be used in conjunction with
`disable_constraint_checking()` and `enable_constraint_checking()`, to
determine if rows with invalid references were entered while constraint
checks were off.
| def check_constraints(self, table_names=None):
"""
Check each table name in `table_names` for rows with invalid foreign
key references. This method is intended to be used in conjunction with
`disable_constraint_checking()` and `enable_constraint_checking()`, to
determine if rows with invalid references were entered while constraint
checks were off.
"""
if self.features.supports_pragma_foreign_key_check:
with self.cursor() as cursor:
if table_names is None:
violations = cursor.execute('PRAGMA foreign_key_check').fetchall()
else:
violations = chain.from_iterable(
cursor.execute(
'PRAGMA foreign_key_check(%s)'
% self.ops.quote_name(table_name)
).fetchall()
for table_name in table_names
)
# See https://www.sqlite.org/pragma.html#pragma_foreign_key_check
for table_name, rowid, referenced_table_name, foreign_key_index in violations:
foreign_key = cursor.execute(
'PRAGMA foreign_key_list(%s)' % self.ops.quote_name(table_name)
).fetchall()[foreign_key_index]
column_name, referenced_column_name = foreign_key[3:5]
primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name)
primary_key_value, bad_value = cursor.execute(
'SELECT %s, %s FROM %s WHERE rowid = %%s' % (
self.ops.quote_name(primary_key_column_name),
self.ops.quote_name(column_name),
self.ops.quote_name(table_name),
),
(rowid,),
).fetchone()
raise IntegrityError(
"The row in table '%s' with primary key '%s' has an "
"invalid foreign key: %s.%s contains a value '%s' that "
"does not have a corresponding value in %s.%s." % (
table_name, primary_key_value, table_name, column_name,
bad_value, referenced_table_name, referenced_column_name
)
)
else:
with self.cursor() as cursor:
if table_names is None:
table_names = self.introspection.table_names(cursor)
for table_name in table_names:
primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name)
if not primary_key_column_name:
continue
key_columns = self.introspection.get_key_columns(cursor, table_name)
for column_name, referenced_table_name, referenced_column_name in key_columns:
cursor.execute(
"""
SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING
LEFT JOIN `%s` as REFERRED
ON (REFERRING.`%s` = REFERRED.`%s`)
WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL
"""
% (
primary_key_column_name, column_name, table_name,
referenced_table_name, column_name, referenced_column_name,
column_name, referenced_column_name,
)
)
for bad_row in cursor.fetchall():
raise IntegrityError(
"The row in table '%s' with primary key '%s' has an "
"invalid foreign key: %s.%s contains a value '%s' that "
"does not have a corresponding value in %s.%s." % (
table_name, bad_row[0], table_name, column_name,
bad_row[1], referenced_table_name, referenced_column_name,
)
) | [
"def",
"check_constraints",
"(",
"self",
",",
"table_names",
"=",
"None",
")",
":",
"if",
"self",
".",
"features",
".",
"supports_pragma_foreign_key_check",
":",
"with",
"self",
".",
"cursor",
"(",
")",
"as",
"cursor",
":",
"if",
"table_names",
"is",
"None",
":",
"violations",
"=",
"cursor",
".",
"execute",
"(",
"'PRAGMA foreign_key_check'",
")",
".",
"fetchall",
"(",
")",
"else",
":",
"violations",
"=",
"chain",
".",
"from_iterable",
"(",
"cursor",
".",
"execute",
"(",
"'PRAGMA foreign_key_check(%s)'",
"%",
"self",
".",
"ops",
".",
"quote_name",
"(",
"table_name",
")",
")",
".",
"fetchall",
"(",
")",
"for",
"table_name",
"in",
"table_names",
")",
"# See https://www.sqlite.org/pragma.html#pragma_foreign_key_check",
"for",
"table_name",
",",
"rowid",
",",
"referenced_table_name",
",",
"foreign_key_index",
"in",
"violations",
":",
"foreign_key",
"=",
"cursor",
".",
"execute",
"(",
"'PRAGMA foreign_key_list(%s)'",
"%",
"self",
".",
"ops",
".",
"quote_name",
"(",
"table_name",
")",
")",
".",
"fetchall",
"(",
")",
"[",
"foreign_key_index",
"]",
"column_name",
",",
"referenced_column_name",
"=",
"foreign_key",
"[",
"3",
":",
"5",
"]",
"primary_key_column_name",
"=",
"self",
".",
"introspection",
".",
"get_primary_key_column",
"(",
"cursor",
",",
"table_name",
")",
"primary_key_value",
",",
"bad_value",
"=",
"cursor",
".",
"execute",
"(",
"'SELECT %s, %s FROM %s WHERE rowid = %%s'",
"%",
"(",
"self",
".",
"ops",
".",
"quote_name",
"(",
"primary_key_column_name",
")",
",",
"self",
".",
"ops",
".",
"quote_name",
"(",
"column_name",
")",
",",
"self",
".",
"ops",
".",
"quote_name",
"(",
"table_name",
")",
",",
")",
",",
"(",
"rowid",
",",
")",
",",
")",
".",
"fetchone",
"(",
")",
"raise",
"IntegrityError",
"(",
"\"The row in table '%s' with primary key '%s' has an \"",
"\"invalid foreign key: %s.%s contains a value '%s' that \"",
"\"does not have a corresponding value in %s.%s.\"",
"%",
"(",
"table_name",
",",
"primary_key_value",
",",
"table_name",
",",
"column_name",
",",
"bad_value",
",",
"referenced_table_name",
",",
"referenced_column_name",
")",
")",
"else",
":",
"with",
"self",
".",
"cursor",
"(",
")",
"as",
"cursor",
":",
"if",
"table_names",
"is",
"None",
":",
"table_names",
"=",
"self",
".",
"introspection",
".",
"table_names",
"(",
"cursor",
")",
"for",
"table_name",
"in",
"table_names",
":",
"primary_key_column_name",
"=",
"self",
".",
"introspection",
".",
"get_primary_key_column",
"(",
"cursor",
",",
"table_name",
")",
"if",
"not",
"primary_key_column_name",
":",
"continue",
"key_columns",
"=",
"self",
".",
"introspection",
".",
"get_key_columns",
"(",
"cursor",
",",
"table_name",
")",
"for",
"column_name",
",",
"referenced_table_name",
",",
"referenced_column_name",
"in",
"key_columns",
":",
"cursor",
".",
"execute",
"(",
"\"\"\"\n SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING\n LEFT JOIN `%s` as REFERRED\n ON (REFERRING.`%s` = REFERRED.`%s`)\n WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL\n \"\"\"",
"%",
"(",
"primary_key_column_name",
",",
"column_name",
",",
"table_name",
",",
"referenced_table_name",
",",
"column_name",
",",
"referenced_column_name",
",",
"column_name",
",",
"referenced_column_name",
",",
")",
")",
"for",
"bad_row",
"in",
"cursor",
".",
"fetchall",
"(",
")",
":",
"raise",
"IntegrityError",
"(",
"\"The row in table '%s' with primary key '%s' has an \"",
"\"invalid foreign key: %s.%s contains a value '%s' that \"",
"\"does not have a corresponding value in %s.%s.\"",
"%",
"(",
"table_name",
",",
"bad_row",
"[",
"0",
"]",
",",
"table_name",
",",
"column_name",
",",
"bad_row",
"[",
"1",
"]",
",",
"referenced_table_name",
",",
"referenced_column_name",
",",
")",
")"
] | [
317,
4
] | [
391,
29
] | python | en | ['en', 'error', 'th'] | False |
DatabaseWrapper._start_transaction_under_autocommit | (self) |
Start a transaction explicitly in autocommit mode.
Staying in autocommit mode works around a bug of sqlite3 that breaks
savepoints when autocommit is disabled.
|
Start a transaction explicitly in autocommit mode. | def _start_transaction_under_autocommit(self):
"""
Start a transaction explicitly in autocommit mode.
Staying in autocommit mode works around a bug of sqlite3 that breaks
savepoints when autocommit is disabled.
"""
self.cursor().execute("BEGIN") | [
"def",
"_start_transaction_under_autocommit",
"(",
"self",
")",
":",
"self",
".",
"cursor",
"(",
")",
".",
"execute",
"(",
"\"BEGIN\"",
")"
] | [
396,
4
] | [
403,
38
] | python | en | ['en', 'error', 'th'] | False |
CompletionCommand.run | (self, options: Values, args: List[str]) | Prints the completion code of the given shell | Prints the completion code of the given shell | def run(self, options: Values, args: List[str]) -> int:
"""Prints the completion code of the given shell"""
shells = COMPLETION_SCRIPTS.keys()
shell_options = ['--' + shell for shell in sorted(shells)]
if options.shell in shells:
script = textwrap.dedent(
COMPLETION_SCRIPTS.get(options.shell, '').format(
prog=get_prog())
)
print(BASE_COMPLETION.format(script=script, shell=options.shell))
return SUCCESS
else:
sys.stderr.write(
'ERROR: You must pass {}\n' .format(' or '.join(shell_options))
)
return SUCCESS | [
"def",
"run",
"(",
"self",
",",
"options",
":",
"Values",
",",
"args",
":",
"List",
"[",
"str",
"]",
")",
"->",
"int",
":",
"shells",
"=",
"COMPLETION_SCRIPTS",
".",
"keys",
"(",
")",
"shell_options",
"=",
"[",
"'--'",
"+",
"shell",
"for",
"shell",
"in",
"sorted",
"(",
"shells",
")",
"]",
"if",
"options",
".",
"shell",
"in",
"shells",
":",
"script",
"=",
"textwrap",
".",
"dedent",
"(",
"COMPLETION_SCRIPTS",
".",
"get",
"(",
"options",
".",
"shell",
",",
"''",
")",
".",
"format",
"(",
"prog",
"=",
"get_prog",
"(",
")",
")",
")",
"print",
"(",
"BASE_COMPLETION",
".",
"format",
"(",
"script",
"=",
"script",
",",
"shell",
"=",
"options",
".",
"shell",
")",
")",
"return",
"SUCCESS",
"else",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'ERROR: You must pass {}\\n'",
".",
"format",
"(",
"' or '",
".",
"join",
"(",
"shell_options",
")",
")",
")",
"return",
"SUCCESS"
] | [
75,
4
] | [
90,
26
] | python | en | ['en', 'en', 'en'] | True |
is_django_module | (module) | Return True if the given module is nested under Django. | Return True if the given module is nested under Django. | def is_django_module(module):
"""Return True if the given module is nested under Django."""
return module.__name__.startswith('django.') | [
"def",
"is_django_module",
"(",
"module",
")",
":",
"return",
"module",
".",
"__name__",
".",
"startswith",
"(",
"'django.'",
")"
] | [
48,
0
] | [
50,
48
] | python | en | ['en', 'en', 'en'] | True |
is_django_path | (path) | Return True if the given file path is nested under Django. | Return True if the given file path is nested under Django. | def is_django_path(path):
"""Return True if the given file path is nested under Django."""
return Path(django.__file__).parent in Path(path).parents | [
"def",
"is_django_path",
"(",
"path",
")",
":",
"return",
"Path",
"(",
"django",
".",
"__file__",
")",
".",
"parent",
"in",
"Path",
"(",
"path",
")",
".",
"parents"
] | [
53,
0
] | [
55,
61
] | python | en | ['en', 'en', 'en'] | True |
ensure_echo_on | () |
Ensure that echo mode is enabled. Some tools such as PDB disable
it which causes usability issues after reload.
|
Ensure that echo mode is enabled. Some tools such as PDB disable
it which causes usability issues after reload.
| def ensure_echo_on():
"""
Ensure that echo mode is enabled. Some tools such as PDB disable
it which causes usability issues after reload.
"""
if not termios or not sys.stdin.isatty():
return
attr_list = termios.tcgetattr(sys.stdin)
if not attr_list[3] & termios.ECHO:
attr_list[3] |= termios.ECHO
if hasattr(signal, 'SIGTTOU'):
old_handler = signal.signal(signal.SIGTTOU, signal.SIG_IGN)
else:
old_handler = None
termios.tcsetattr(sys.stdin, termios.TCSANOW, attr_list)
if old_handler is not None:
signal.signal(signal.SIGTTOU, old_handler) | [
"def",
"ensure_echo_on",
"(",
")",
":",
"if",
"not",
"termios",
"or",
"not",
"sys",
".",
"stdin",
".",
"isatty",
"(",
")",
":",
"return",
"attr_list",
"=",
"termios",
".",
"tcgetattr",
"(",
"sys",
".",
"stdin",
")",
"if",
"not",
"attr_list",
"[",
"3",
"]",
"&",
"termios",
".",
"ECHO",
":",
"attr_list",
"[",
"3",
"]",
"|=",
"termios",
".",
"ECHO",
"if",
"hasattr",
"(",
"signal",
",",
"'SIGTTOU'",
")",
":",
"old_handler",
"=",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGTTOU",
",",
"signal",
".",
"SIG_IGN",
")",
"else",
":",
"old_handler",
"=",
"None",
"termios",
".",
"tcsetattr",
"(",
"sys",
".",
"stdin",
",",
"termios",
".",
"TCSANOW",
",",
"attr_list",
")",
"if",
"old_handler",
"is",
"not",
"None",
":",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGTTOU",
",",
"old_handler",
")"
] | [
89,
0
] | [
105,
54
] | python | en | ['en', 'error', 'th'] | False |
iter_modules_and_files | (modules, extra_files) | Iterate through all modules needed to be watched. | Iterate through all modules needed to be watched. | def iter_modules_and_files(modules, extra_files):
"""Iterate through all modules needed to be watched."""
sys_file_paths = []
for module in modules:
# During debugging (with PyDev) the 'typing.io' and 'typing.re' objects
# are added to sys.modules, however they are types not modules and so
# cause issues here.
if not isinstance(module, ModuleType):
continue
if module.__name__ == '__main__':
# __main__ (usually manage.py) doesn't always have a __spec__ set.
# Handle this by falling back to using __file__, resolved below.
# See https://docs.python.org/reference/import.html#main-spec
# __file__ may not exists, e.g. when running ipdb debugger.
if hasattr(module, '__file__'):
sys_file_paths.append(module.__file__)
continue
if getattr(module, '__spec__', None) is None:
continue
spec = module.__spec__
# Modules could be loaded from places without a concrete location. If
# this is the case, skip them.
if spec.has_location:
origin = spec.loader.archive if isinstance(spec.loader, zipimporter) else spec.origin
sys_file_paths.append(origin)
results = set()
for filename in itertools.chain(sys_file_paths, extra_files):
if not filename:
continue
path = Path(filename)
try:
if not path.exists():
# The module could have been removed, don't fail loudly if this
# is the case.
continue
except ValueError as e:
# Network filesystems may return null bytes in file paths.
logger.debug('"%s" raised when resolving path: "%s"', e, path)
continue
resolved_path = path.resolve().absolute()
results.add(resolved_path)
return frozenset(results) | [
"def",
"iter_modules_and_files",
"(",
"modules",
",",
"extra_files",
")",
":",
"sys_file_paths",
"=",
"[",
"]",
"for",
"module",
"in",
"modules",
":",
"# During debugging (with PyDev) the 'typing.io' and 'typing.re' objects",
"# are added to sys.modules, however they are types not modules and so",
"# cause issues here.",
"if",
"not",
"isinstance",
"(",
"module",
",",
"ModuleType",
")",
":",
"continue",
"if",
"module",
".",
"__name__",
"==",
"'__main__'",
":",
"# __main__ (usually manage.py) doesn't always have a __spec__ set.",
"# Handle this by falling back to using __file__, resolved below.",
"# See https://docs.python.org/reference/import.html#main-spec",
"# __file__ may not exists, e.g. when running ipdb debugger.",
"if",
"hasattr",
"(",
"module",
",",
"'__file__'",
")",
":",
"sys_file_paths",
".",
"append",
"(",
"module",
".",
"__file__",
")",
"continue",
"if",
"getattr",
"(",
"module",
",",
"'__spec__'",
",",
"None",
")",
"is",
"None",
":",
"continue",
"spec",
"=",
"module",
".",
"__spec__",
"# Modules could be loaded from places without a concrete location. If",
"# this is the case, skip them.",
"if",
"spec",
".",
"has_location",
":",
"origin",
"=",
"spec",
".",
"loader",
".",
"archive",
"if",
"isinstance",
"(",
"spec",
".",
"loader",
",",
"zipimporter",
")",
"else",
"spec",
".",
"origin",
"sys_file_paths",
".",
"append",
"(",
"origin",
")",
"results",
"=",
"set",
"(",
")",
"for",
"filename",
"in",
"itertools",
".",
"chain",
"(",
"sys_file_paths",
",",
"extra_files",
")",
":",
"if",
"not",
"filename",
":",
"continue",
"path",
"=",
"Path",
"(",
"filename",
")",
"try",
":",
"if",
"not",
"path",
".",
"exists",
"(",
")",
":",
"# The module could have been removed, don't fail loudly if this",
"# is the case.",
"continue",
"except",
"ValueError",
"as",
"e",
":",
"# Network filesystems may return null bytes in file paths.",
"logger",
".",
"debug",
"(",
"'\"%s\" raised when resolving path: \"%s\"'",
",",
"e",
",",
"path",
")",
"continue",
"resolved_path",
"=",
"path",
".",
"resolve",
"(",
")",
".",
"absolute",
"(",
")",
"results",
".",
"add",
"(",
"resolved_path",
")",
"return",
"frozenset",
"(",
"results",
")"
] | [
119,
0
] | [
161,
29
] | python | en | ['en', 'en', 'en'] | True |
common_roots | (paths) |
Return a tuple of common roots that are shared between the given paths.
File system watchers operate on directories and aren't cheap to create.
Try to find the minimum set of directories to watch that encompass all of
the files that need to be watched.
|
Return a tuple of common roots that are shared between the given paths.
File system watchers operate on directories and aren't cheap to create.
Try to find the minimum set of directories to watch that encompass all of
the files that need to be watched.
| def common_roots(paths):
"""
Return a tuple of common roots that are shared between the given paths.
File system watchers operate on directories and aren't cheap to create.
Try to find the minimum set of directories to watch that encompass all of
the files that need to be watched.
"""
# Inspired from Werkzeug:
# https://github.com/pallets/werkzeug/blob/7477be2853df70a022d9613e765581b9411c3c39/werkzeug/_reloader.py
# Create a sorted list of the path components, longest first.
path_parts = sorted([x.parts for x in paths], key=len, reverse=True)
tree = {}
for chunks in path_parts:
node = tree
# Add each part of the path to the tree.
for chunk in chunks:
node = node.setdefault(chunk, {})
# Clear the last leaf in the tree.
node.clear()
# Turn the tree into a list of Path instances.
def _walk(node, path):
for prefix, child in node.items():
yield from _walk(child, path + (prefix,))
if not node:
yield Path(*path)
return tuple(_walk(tree, ())) | [
"def",
"common_roots",
"(",
"paths",
")",
":",
"# Inspired from Werkzeug:",
"# https://github.com/pallets/werkzeug/blob/7477be2853df70a022d9613e765581b9411c3c39/werkzeug/_reloader.py",
"# Create a sorted list of the path components, longest first.",
"path_parts",
"=",
"sorted",
"(",
"[",
"x",
".",
"parts",
"for",
"x",
"in",
"paths",
"]",
",",
"key",
"=",
"len",
",",
"reverse",
"=",
"True",
")",
"tree",
"=",
"{",
"}",
"for",
"chunks",
"in",
"path_parts",
":",
"node",
"=",
"tree",
"# Add each part of the path to the tree.",
"for",
"chunk",
"in",
"chunks",
":",
"node",
"=",
"node",
".",
"setdefault",
"(",
"chunk",
",",
"{",
"}",
")",
"# Clear the last leaf in the tree.",
"node",
".",
"clear",
"(",
")",
"# Turn the tree into a list of Path instances.",
"def",
"_walk",
"(",
"node",
",",
"path",
")",
":",
"for",
"prefix",
",",
"child",
"in",
"node",
".",
"items",
"(",
")",
":",
"yield",
"from",
"_walk",
"(",
"child",
",",
"path",
"+",
"(",
"prefix",
",",
")",
")",
"if",
"not",
"node",
":",
"yield",
"Path",
"(",
"*",
"path",
")",
"return",
"tuple",
"(",
"_walk",
"(",
"tree",
",",
"(",
")",
")",
")"
] | [
165,
0
] | [
192,
33
] | python | en | ['en', 'error', 'th'] | False |
sys_path_directories | () |
Yield absolute directories from sys.path, ignoring entries that don't
exist.
|
Yield absolute directories from sys.path, ignoring entries that don't
exist.
| def sys_path_directories():
"""
Yield absolute directories from sys.path, ignoring entries that don't
exist.
"""
for path in sys.path:
path = Path(path)
if not path.exists():
continue
resolved_path = path.resolve().absolute()
# If the path is a file (like a zip file), watch the parent directory.
if resolved_path.is_file():
yield resolved_path.parent
else:
yield resolved_path | [
"def",
"sys_path_directories",
"(",
")",
":",
"for",
"path",
"in",
"sys",
".",
"path",
":",
"path",
"=",
"Path",
"(",
"path",
")",
"if",
"not",
"path",
".",
"exists",
"(",
")",
":",
"continue",
"resolved_path",
"=",
"path",
".",
"resolve",
"(",
")",
".",
"absolute",
"(",
")",
"# If the path is a file (like a zip file), watch the parent directory.",
"if",
"resolved_path",
".",
"is_file",
"(",
")",
":",
"yield",
"resolved_path",
".",
"parent",
"else",
":",
"yield",
"resolved_path"
] | [
195,
0
] | [
209,
31
] | python | en | ['en', 'error', 'th'] | False |
get_child_arguments | () |
Return the executable. This contains a workaround for Windows if the
executable is reported to not have the .exe extension which can cause bugs
on reloading.
|
Return the executable. This contains a workaround for Windows if the
executable is reported to not have the .exe extension which can cause bugs
on reloading.
| def get_child_arguments():
"""
Return the executable. This contains a workaround for Windows if the
executable is reported to not have the .exe extension which can cause bugs
on reloading.
"""
import __main__
py_script = Path(sys.argv[0])
args = [sys.executable] + ['-W%s' % o for o in sys.warnoptions]
# __spec__ is set when the server was started with the `-m` option,
# see https://docs.python.org/3/reference/import.html#main-spec
# __spec__ may not exist, e.g. when running in a Conda env.
if getattr(__main__, '__spec__', None) is not None and __main__.__spec__.parent:
args += ['-m', __main__.__spec__.parent]
args += sys.argv[1:]
elif not py_script.exists():
# sys.argv[0] may not exist for several reasons on Windows.
# It may exist with a .exe extension or have a -script.py suffix.
exe_entrypoint = py_script.with_suffix('.exe')
if exe_entrypoint.exists():
# Should be executed directly, ignoring sys.executable.
# TODO: Remove str() when dropping support for PY37.
# args parameter accepts path-like on Windows from Python 3.8.
return [str(exe_entrypoint), *sys.argv[1:]]
script_entrypoint = py_script.with_name('%s-script.py' % py_script.name)
if script_entrypoint.exists():
# Should be executed as usual.
# TODO: Remove str() when dropping support for PY37.
# args parameter accepts path-like on Windows from Python 3.8.
return [*args, str(script_entrypoint), *sys.argv[1:]]
raise RuntimeError('Script %s does not exist.' % py_script)
else:
args += sys.argv
return args | [
"def",
"get_child_arguments",
"(",
")",
":",
"import",
"__main__",
"py_script",
"=",
"Path",
"(",
"sys",
".",
"argv",
"[",
"0",
"]",
")",
"args",
"=",
"[",
"sys",
".",
"executable",
"]",
"+",
"[",
"'-W%s'",
"%",
"o",
"for",
"o",
"in",
"sys",
".",
"warnoptions",
"]",
"# __spec__ is set when the server was started with the `-m` option,",
"# see https://docs.python.org/3/reference/import.html#main-spec",
"# __spec__ may not exist, e.g. when running in a Conda env.",
"if",
"getattr",
"(",
"__main__",
",",
"'__spec__'",
",",
"None",
")",
"is",
"not",
"None",
"and",
"__main__",
".",
"__spec__",
".",
"parent",
":",
"args",
"+=",
"[",
"'-m'",
",",
"__main__",
".",
"__spec__",
".",
"parent",
"]",
"args",
"+=",
"sys",
".",
"argv",
"[",
"1",
":",
"]",
"elif",
"not",
"py_script",
".",
"exists",
"(",
")",
":",
"# sys.argv[0] may not exist for several reasons on Windows.",
"# It may exist with a .exe extension or have a -script.py suffix.",
"exe_entrypoint",
"=",
"py_script",
".",
"with_suffix",
"(",
"'.exe'",
")",
"if",
"exe_entrypoint",
".",
"exists",
"(",
")",
":",
"# Should be executed directly, ignoring sys.executable.",
"# TODO: Remove str() when dropping support for PY37.",
"# args parameter accepts path-like on Windows from Python 3.8.",
"return",
"[",
"str",
"(",
"exe_entrypoint",
")",
",",
"*",
"sys",
".",
"argv",
"[",
"1",
":",
"]",
"]",
"script_entrypoint",
"=",
"py_script",
".",
"with_name",
"(",
"'%s-script.py'",
"%",
"py_script",
".",
"name",
")",
"if",
"script_entrypoint",
".",
"exists",
"(",
")",
":",
"# Should be executed as usual.",
"# TODO: Remove str() when dropping support for PY37.",
"# args parameter accepts path-like on Windows from Python 3.8.",
"return",
"[",
"*",
"args",
",",
"str",
"(",
"script_entrypoint",
")",
",",
"*",
"sys",
".",
"argv",
"[",
"1",
":",
"]",
"]",
"raise",
"RuntimeError",
"(",
"'Script %s does not exist.'",
"%",
"py_script",
")",
"else",
":",
"args",
"+=",
"sys",
".",
"argv",
"return",
"args"
] | [
212,
0
] | [
246,
15
] | python | en | ['en', 'error', 'th'] | False |
get_reloader | () | Return the most suitable reloader for this environment. | Return the most suitable reloader for this environment. | def get_reloader():
"""Return the most suitable reloader for this environment."""
try:
WatchmanReloader.check_availability()
except WatchmanUnavailable:
return StatReloader()
return WatchmanReloader() | [
"def",
"get_reloader",
"(",
")",
":",
"try",
":",
"WatchmanReloader",
".",
"check_availability",
"(",
")",
"except",
"WatchmanUnavailable",
":",
"return",
"StatReloader",
"(",
")",
"return",
"WatchmanReloader",
"(",
")"
] | [
603,
0
] | [
609,
29
] | python | en | ['en', 'en', 'en'] | True |
BaseReloader.watched_files | (self, include_globs=True) |
Yield all files that need to be watched, including module files and
files within globs.
|
Yield all files that need to be watched, including module files and
files within globs.
| def watched_files(self, include_globs=True):
"""
Yield all files that need to be watched, including module files and
files within globs.
"""
yield from iter_all_python_module_files()
yield from self.extra_files
if include_globs:
for directory, patterns in self.directory_globs.items():
for pattern in patterns:
yield from directory.glob(pattern) | [
"def",
"watched_files",
"(",
"self",
",",
"include_globs",
"=",
"True",
")",
":",
"yield",
"from",
"iter_all_python_module_files",
"(",
")",
"yield",
"from",
"self",
".",
"extra_files",
"if",
"include_globs",
":",
"for",
"directory",
",",
"patterns",
"in",
"self",
".",
"directory_globs",
".",
"items",
"(",
")",
":",
"for",
"pattern",
"in",
"patterns",
":",
"yield",
"from",
"directory",
".",
"glob",
"(",
"pattern",
")"
] | [
283,
4
] | [
293,
54
] | python | en | ['en', 'error', 'th'] | False |
BaseReloader.wait_for_apps_ready | (self, app_reg, django_main_thread) |
Wait until Django reports that the apps have been loaded. If the given
thread has terminated before the apps are ready, then a SyntaxError or
other non-recoverable error has been raised. In that case, stop waiting
for the apps_ready event and continue processing.
Return True if the thread is alive and the ready event has been
triggered, or False if the thread is terminated while waiting for the
event.
|
Wait until Django reports that the apps have been loaded. If the given
thread has terminated before the apps are ready, then a SyntaxError or
other non-recoverable error has been raised. In that case, stop waiting
for the apps_ready event and continue processing. | def wait_for_apps_ready(self, app_reg, django_main_thread):
"""
Wait until Django reports that the apps have been loaded. If the given
thread has terminated before the apps are ready, then a SyntaxError or
other non-recoverable error has been raised. In that case, stop waiting
for the apps_ready event and continue processing.
Return True if the thread is alive and the ready event has been
triggered, or False if the thread is terminated while waiting for the
event.
"""
while django_main_thread.is_alive():
if app_reg.ready_event.wait(timeout=0.1):
return True
else:
logger.debug('Main Django thread has terminated before apps are ready.')
return False | [
"def",
"wait_for_apps_ready",
"(",
"self",
",",
"app_reg",
",",
"django_main_thread",
")",
":",
"while",
"django_main_thread",
".",
"is_alive",
"(",
")",
":",
"if",
"app_reg",
".",
"ready_event",
".",
"wait",
"(",
"timeout",
"=",
"0.1",
")",
":",
"return",
"True",
"else",
":",
"logger",
".",
"debug",
"(",
"'Main Django thread has terminated before apps are ready.'",
")",
"return",
"False"
] | [
295,
4
] | [
311,
24
] | python | en | ['en', 'error', 'th'] | False |
BaseReloader.tick | (self) |
This generator is called in a loop from run_loop. It's important that
the method takes care of pausing or otherwise waiting for a period of
time. This split between run_loop() and tick() is to improve the
testability of the reloader implementations by decoupling the work they
do from the loop.
|
This generator is called in a loop from run_loop. It's important that
the method takes care of pausing or otherwise waiting for a period of
time. This split between run_loop() and tick() is to improve the
testability of the reloader implementations by decoupling the work they
do from the loop.
| def tick(self):
"""
This generator is called in a loop from run_loop. It's important that
the method takes care of pausing or otherwise waiting for a period of
time. This split between run_loop() and tick() is to improve the
testability of the reloader implementations by decoupling the work they
do from the loop.
"""
raise NotImplementedError('subclasses must implement tick().') | [
"def",
"tick",
"(",
"self",
")",
":",
"raise",
"NotImplementedError",
"(",
"'subclasses must implement tick().'",
")"
] | [
339,
4
] | [
347,
70
] | python | en | ['en', 'error', 'th'] | False |
WatchmanReloader._watch_glob | (self, directory, patterns) |
Watch a directory with a specific glob. If the directory doesn't yet
exist, attempt to watch the parent directory and amend the patterns to
include this. It's important this method isn't called more than one per
directory when updating all subscriptions. Subsequent calls will
overwrite the named subscription, so it must include all possible glob
expressions.
|
Watch a directory with a specific glob. If the directory doesn't yet
exist, attempt to watch the parent directory and amend the patterns to
include this. It's important this method isn't called more than one per
directory when updating all subscriptions. Subsequent calls will
overwrite the named subscription, so it must include all possible glob
expressions.
| def _watch_glob(self, directory, patterns):
"""
Watch a directory with a specific glob. If the directory doesn't yet
exist, attempt to watch the parent directory and amend the patterns to
include this. It's important this method isn't called more than one per
directory when updating all subscriptions. Subsequent calls will
overwrite the named subscription, so it must include all possible glob
expressions.
"""
prefix = 'glob'
if not directory.exists():
if not directory.parent.exists():
logger.warning('Unable to watch directory %s as neither it or its parent exist.', directory)
return
prefix = 'glob-parent-%s' % directory.name
patterns = ['%s/%s' % (directory.name, pattern) for pattern in patterns]
directory = directory.parent
expression = ['anyof']
for pattern in patterns:
expression.append(['match', pattern, 'wholename'])
self._subscribe(directory, '%s:%s' % (prefix, directory), expression) | [
"def",
"_watch_glob",
"(",
"self",
",",
"directory",
",",
"patterns",
")",
":",
"prefix",
"=",
"'glob'",
"if",
"not",
"directory",
".",
"exists",
"(",
")",
":",
"if",
"not",
"directory",
".",
"parent",
".",
"exists",
"(",
")",
":",
"logger",
".",
"warning",
"(",
"'Unable to watch directory %s as neither it or its parent exist.'",
",",
"directory",
")",
"return",
"prefix",
"=",
"'glob-parent-%s'",
"%",
"directory",
".",
"name",
"patterns",
"=",
"[",
"'%s/%s'",
"%",
"(",
"directory",
".",
"name",
",",
"pattern",
")",
"for",
"pattern",
"in",
"patterns",
"]",
"directory",
"=",
"directory",
".",
"parent",
"expression",
"=",
"[",
"'anyof'",
"]",
"for",
"pattern",
"in",
"patterns",
":",
"expression",
".",
"append",
"(",
"[",
"'match'",
",",
"pattern",
",",
"'wholename'",
"]",
")",
"self",
".",
"_subscribe",
"(",
"directory",
",",
"'%s:%s'",
"%",
"(",
"prefix",
",",
"directory",
")",
",",
"expression",
")"
] | [
480,
4
] | [
501,
77
] | python | en | ['en', 'error', 'th'] | False |
WatchmanReloader.check_server_status | (self, inner_ex=None) | Return True if the server is available. | Return True if the server is available. | def check_server_status(self, inner_ex=None):
"""Return True if the server is available."""
try:
self.client.query('version')
except Exception:
raise WatchmanUnavailable(str(inner_ex)) from inner_ex
return True | [
"def",
"check_server_status",
"(",
"self",
",",
"inner_ex",
"=",
"None",
")",
":",
"try",
":",
"self",
".",
"client",
".",
"query",
"(",
"'version'",
")",
"except",
"Exception",
":",
"raise",
"WatchmanUnavailable",
"(",
"str",
"(",
"inner_ex",
")",
")",
"from",
"inner_ex",
"return",
"True"
] | [
577,
4
] | [
583,
19
] | python | en | ['en', 'en', 'en'] | True |
create_report | (BUCKET, gcsfilename, tmpdir) |
Creates report in gs://BUCKET/ based on contents in gcsfilename (gs://bucket/some/dir/filename)
|
Creates report in gs://BUCKET/ based on contents in gcsfilename (gs://bucket/some/dir/filename)
| def create_report(BUCKET, gcsfilename, tmpdir):
"""
Creates report in gs://BUCKET/ based on contents in gcsfilename (gs://bucket/some/dir/filename)
"""
# connect to BigQuery
client = bigquery.Client()
destination_table = client.get_table('sparktobq.kdd_cup')
# Specify table schema. Autodetect is not a good idea for production code
job_config = bigquery.LoadJobConfig()
schema = [
bigquery.SchemaField("duration", "INT64"),
]
for name in ['protocol_type', 'service', 'flag']:
schema.append(bigquery.SchemaField(name, "STRING"))
for name in 'src_bytes,dst_bytes,wrong_fragment,urgent,hot,num_failed_logins'.split(','):
schema.append(bigquery.SchemaField(name, "INT64"))
schema.append(bigquery.SchemaField("unused_10", "STRING"))
schema.append(bigquery.SchemaField("num_compromised", "INT64"))
schema.append(bigquery.SchemaField("unused_12", "STRING"))
for name in 'su_attempted,num_root,num_file_creations'.split(','):
schema.append(bigquery.SchemaField(name, "INT64"))
for fieldno in range(16, 41):
schema.append(bigquery.SchemaField("unused_{}".format(fieldno), "STRING"))
schema.append(bigquery.SchemaField("label", "STRING"))
job_config.schema = schema
# Load CSV data into BigQuery, replacing any rows that were there before
job_config.create_disposition = bigquery.CreateDisposition.CREATE_IF_NEEDED
job_config.write_disposition = bigquery.WriteDisposition.WRITE_TRUNCATE
job_config.skip_leading_rows = 0
job_config.source_format = bigquery.SourceFormat.CSV
load_job = client.load_table_from_uri(gcsfilename, destination_table, job_config=job_config)
print("Starting LOAD job {} for {}".format(load_job.job_id, gcsfilename))
load_job.result() # Waits for table load to complete.
print("Finished LOAD job {}".format(load_job.job_id))
# connections by protocol
sql = """
SELECT COUNT(*) AS count
FROM sparktobq.kdd_cup
GROUP BY protocol_type
ORDER by count ASC
"""
connections_by_protocol = client.query(sql).to_dataframe()
connections_by_protocol.to_csv(os.path.join(tmpdir,"connections_by_protocol.csv"))
print("Finished analyzing connections")
# attacks plot
sql = """
SELECT
protocol_type,
CASE label
WHEN 'normal.' THEN 'no attack'
ELSE 'attack'
END AS state,
COUNT(*) as total_freq,
ROUND(AVG(src_bytes), 2) as mean_src_bytes,
ROUND(AVG(dst_bytes), 2) as mean_dst_bytes,
ROUND(AVG(duration), 2) as mean_duration,
SUM(num_failed_logins) as total_failed_logins,
SUM(num_compromised) as total_compromised,
SUM(num_file_creations) as total_file_creations,
SUM(su_attempted) as total_root_attempts,
SUM(num_root) as total_root_acceses
FROM sparktobq.kdd_cup
GROUP BY protocol_type, state
ORDER BY 3 DESC
"""
attack_stats = client.query(sql).to_dataframe()
ax = attack_stats.plot.bar(x='protocol_type', subplots=True, figsize=(10,25))
ax[0].get_figure().savefig(os.path.join(tmpdir,'report.png'));
print("Finished analyzing attacks")
bucket = gcs.Client().get_bucket(BUCKET)
for blob in bucket.list_blobs(prefix='sparktobq/'):
blob.delete()
for fname in ['report.png', 'connections_by_protocol.csv']:
bucket.blob('sparktobq/{}'.format(fname)).upload_from_filename(os.path.join(tmpdir,fname))
print("Uploaded report based on {} to {}".format(gcsfilename, BUCKET)) | [
"def",
"create_report",
"(",
"BUCKET",
",",
"gcsfilename",
",",
"tmpdir",
")",
":",
"# connect to BigQuery",
"client",
"=",
"bigquery",
".",
"Client",
"(",
")",
"destination_table",
"=",
"client",
".",
"get_table",
"(",
"'sparktobq.kdd_cup'",
")",
"# Specify table schema. Autodetect is not a good idea for production code",
"job_config",
"=",
"bigquery",
".",
"LoadJobConfig",
"(",
")",
"schema",
"=",
"[",
"bigquery",
".",
"SchemaField",
"(",
"\"duration\"",
",",
"\"INT64\"",
")",
",",
"]",
"for",
"name",
"in",
"[",
"'protocol_type'",
",",
"'service'",
",",
"'flag'",
"]",
":",
"schema",
".",
"append",
"(",
"bigquery",
".",
"SchemaField",
"(",
"name",
",",
"\"STRING\"",
")",
")",
"for",
"name",
"in",
"'src_bytes,dst_bytes,wrong_fragment,urgent,hot,num_failed_logins'",
".",
"split",
"(",
"','",
")",
":",
"schema",
".",
"append",
"(",
"bigquery",
".",
"SchemaField",
"(",
"name",
",",
"\"INT64\"",
")",
")",
"schema",
".",
"append",
"(",
"bigquery",
".",
"SchemaField",
"(",
"\"unused_10\"",
",",
"\"STRING\"",
")",
")",
"schema",
".",
"append",
"(",
"bigquery",
".",
"SchemaField",
"(",
"\"num_compromised\"",
",",
"\"INT64\"",
")",
")",
"schema",
".",
"append",
"(",
"bigquery",
".",
"SchemaField",
"(",
"\"unused_12\"",
",",
"\"STRING\"",
")",
")",
"for",
"name",
"in",
"'su_attempted,num_root,num_file_creations'",
".",
"split",
"(",
"','",
")",
":",
"schema",
".",
"append",
"(",
"bigquery",
".",
"SchemaField",
"(",
"name",
",",
"\"INT64\"",
")",
")",
"for",
"fieldno",
"in",
"range",
"(",
"16",
",",
"41",
")",
":",
"schema",
".",
"append",
"(",
"bigquery",
".",
"SchemaField",
"(",
"\"unused_{}\"",
".",
"format",
"(",
"fieldno",
")",
",",
"\"STRING\"",
")",
")",
"schema",
".",
"append",
"(",
"bigquery",
".",
"SchemaField",
"(",
"\"label\"",
",",
"\"STRING\"",
")",
")",
"job_config",
".",
"schema",
"=",
"schema",
"# Load CSV data into BigQuery, replacing any rows that were there before",
"job_config",
".",
"create_disposition",
"=",
"bigquery",
".",
"CreateDisposition",
".",
"CREATE_IF_NEEDED",
"job_config",
".",
"write_disposition",
"=",
"bigquery",
".",
"WriteDisposition",
".",
"WRITE_TRUNCATE",
"job_config",
".",
"skip_leading_rows",
"=",
"0",
"job_config",
".",
"source_format",
"=",
"bigquery",
".",
"SourceFormat",
".",
"CSV",
"load_job",
"=",
"client",
".",
"load_table_from_uri",
"(",
"gcsfilename",
",",
"destination_table",
",",
"job_config",
"=",
"job_config",
")",
"print",
"(",
"\"Starting LOAD job {} for {}\"",
".",
"format",
"(",
"load_job",
".",
"job_id",
",",
"gcsfilename",
")",
")",
"load_job",
".",
"result",
"(",
")",
"# Waits for table load to complete.",
"print",
"(",
"\"Finished LOAD job {}\"",
".",
"format",
"(",
"load_job",
".",
"job_id",
")",
")",
"# connections by protocol",
"sql",
"=",
"\"\"\"\n SELECT COUNT(*) AS count\n FROM sparktobq.kdd_cup\n GROUP BY protocol_type\n ORDER by count ASC \n \"\"\"",
"connections_by_protocol",
"=",
"client",
".",
"query",
"(",
"sql",
")",
".",
"to_dataframe",
"(",
")",
"connections_by_protocol",
".",
"to_csv",
"(",
"os",
".",
"path",
".",
"join",
"(",
"tmpdir",
",",
"\"connections_by_protocol.csv\"",
")",
")",
"print",
"(",
"\"Finished analyzing connections\"",
")",
"# attacks plot",
"sql",
"=",
"\"\"\"\n SELECT \n protocol_type, \n CASE label\n WHEN 'normal.' THEN 'no attack'\n ELSE 'attack'\n END AS state,\n COUNT(*) as total_freq,\n ROUND(AVG(src_bytes), 2) as mean_src_bytes,\n ROUND(AVG(dst_bytes), 2) as mean_dst_bytes,\n ROUND(AVG(duration), 2) as mean_duration,\n SUM(num_failed_logins) as total_failed_logins,\n SUM(num_compromised) as total_compromised,\n SUM(num_file_creations) as total_file_creations,\n SUM(su_attempted) as total_root_attempts,\n SUM(num_root) as total_root_acceses\n FROM sparktobq.kdd_cup\n GROUP BY protocol_type, state\n ORDER BY 3 DESC\n \"\"\"",
"attack_stats",
"=",
"client",
".",
"query",
"(",
"sql",
")",
".",
"to_dataframe",
"(",
")",
"ax",
"=",
"attack_stats",
".",
"plot",
".",
"bar",
"(",
"x",
"=",
"'protocol_type'",
",",
"subplots",
"=",
"True",
",",
"figsize",
"=",
"(",
"10",
",",
"25",
")",
")",
"ax",
"[",
"0",
"]",
".",
"get_figure",
"(",
")",
".",
"savefig",
"(",
"os",
".",
"path",
".",
"join",
"(",
"tmpdir",
",",
"'report.png'",
")",
")",
"print",
"(",
"\"Finished analyzing attacks\"",
")",
"bucket",
"=",
"gcs",
".",
"Client",
"(",
")",
".",
"get_bucket",
"(",
"BUCKET",
")",
"for",
"blob",
"in",
"bucket",
".",
"list_blobs",
"(",
"prefix",
"=",
"'sparktobq/'",
")",
":",
"blob",
".",
"delete",
"(",
")",
"for",
"fname",
"in",
"[",
"'report.png'",
",",
"'connections_by_protocol.csv'",
"]",
":",
"bucket",
".",
"blob",
"(",
"'sparktobq/{}'",
".",
"format",
"(",
"fname",
")",
")",
".",
"upload_from_filename",
"(",
"os",
".",
"path",
".",
"join",
"(",
"tmpdir",
",",
"fname",
")",
")",
"print",
"(",
"\"Uploaded report based on {} to {}\"",
".",
"format",
"(",
"gcsfilename",
",",
"BUCKET",
")",
")"
] | [
6,
0
] | [
85,
74
] | python | en | ['en', 'error', 'th'] | False |
guess_content_type | (filename, default="application/octet-stream") |
Guess the "Content-Type" of a file.
:param filename:
The filename to guess the "Content-Type" of using :mod:`mimetypes`.
:param default:
If no "Content-Type" can be guessed, default to `default`.
|
Guess the "Content-Type" of a file. | def guess_content_type(filename, default="application/octet-stream"):
"""
Guess the "Content-Type" of a file.
:param filename:
The filename to guess the "Content-Type" of using :mod:`mimetypes`.
:param default:
If no "Content-Type" can be guessed, default to `default`.
"""
if filename:
return mimetypes.guess_type(filename)[0] or default
return default | [
"def",
"guess_content_type",
"(",
"filename",
",",
"default",
"=",
"\"application/octet-stream\"",
")",
":",
"if",
"filename",
":",
"return",
"mimetypes",
".",
"guess_type",
"(",
"filename",
")",
"[",
"0",
"]",
"or",
"default",
"return",
"default"
] | [
9,
0
] | [
20,
18
] | python | en | ['en', 'error', 'th'] | False |
format_header_param_rfc2231 | (name, value) |
Helper function to format and quote a single header parameter using the
strategy defined in RFC 2231.
Particularly useful for header parameters which might contain
non-ASCII values, like file names. This follows
`RFC 2388 Section 4.4 <https://tools.ietf.org/html/rfc2388#section-4.4>`_.
:param name:
The name of the parameter, a string expected to be ASCII only.
:param value:
The value of the parameter, provided as ``bytes`` or `str``.
:ret:
An RFC-2231-formatted unicode string.
|
Helper function to format and quote a single header parameter using the
strategy defined in RFC 2231. | def format_header_param_rfc2231(name, value):
"""
Helper function to format and quote a single header parameter using the
strategy defined in RFC 2231.
Particularly useful for header parameters which might contain
non-ASCII values, like file names. This follows
`RFC 2388 Section 4.4 <https://tools.ietf.org/html/rfc2388#section-4.4>`_.
:param name:
The name of the parameter, a string expected to be ASCII only.
:param value:
The value of the parameter, provided as ``bytes`` or `str``.
:ret:
An RFC-2231-formatted unicode string.
"""
if isinstance(value, six.binary_type):
value = value.decode("utf-8")
if not any(ch in value for ch in '"\\\r\n'):
result = u'%s="%s"' % (name, value)
try:
result.encode("ascii")
except (UnicodeEncodeError, UnicodeDecodeError):
pass
else:
return result
if six.PY2: # Python 2:
value = value.encode("utf-8")
# encode_rfc2231 accepts an encoded string and returns an ascii-encoded
# string in Python 2 but accepts and returns unicode strings in Python 3
value = email.utils.encode_rfc2231(value, "utf-8")
value = "%s*=%s" % (name, value)
if six.PY2: # Python 2:
value = value.decode("utf-8")
return value | [
"def",
"format_header_param_rfc2231",
"(",
"name",
",",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"six",
".",
"binary_type",
")",
":",
"value",
"=",
"value",
".",
"decode",
"(",
"\"utf-8\"",
")",
"if",
"not",
"any",
"(",
"ch",
"in",
"value",
"for",
"ch",
"in",
"'\"\\\\\\r\\n'",
")",
":",
"result",
"=",
"u'%s=\"%s\"'",
"%",
"(",
"name",
",",
"value",
")",
"try",
":",
"result",
".",
"encode",
"(",
"\"ascii\"",
")",
"except",
"(",
"UnicodeEncodeError",
",",
"UnicodeDecodeError",
")",
":",
"pass",
"else",
":",
"return",
"result",
"if",
"six",
".",
"PY2",
":",
"# Python 2:",
"value",
"=",
"value",
".",
"encode",
"(",
"\"utf-8\"",
")",
"# encode_rfc2231 accepts an encoded string and returns an ascii-encoded",
"# string in Python 2 but accepts and returns unicode strings in Python 3",
"value",
"=",
"email",
".",
"utils",
".",
"encode_rfc2231",
"(",
"value",
",",
"\"utf-8\"",
")",
"value",
"=",
"\"%s*=%s\"",
"%",
"(",
"name",
",",
"value",
")",
"if",
"six",
".",
"PY2",
":",
"# Python 2:",
"value",
"=",
"value",
".",
"decode",
"(",
"\"utf-8\"",
")",
"return",
"value"
] | [
23,
0
] | [
62,
16
] | python | en | ['en', 'error', 'th'] | False |
format_header_param_html5 | (name, value) |
Helper function to format and quote a single header parameter using the
HTML5 strategy.
Particularly useful for header parameters which might contain
non-ASCII values, like file names. This follows the `HTML5 Working Draft
Section 4.10.22.7`_ and matches the behavior of curl and modern browsers.
.. _HTML5 Working Draft Section 4.10.22.7:
https://w3c.github.io/html/sec-forms.html#multipart-form-data
:param name:
The name of the parameter, a string expected to be ASCII only.
:param value:
The value of the parameter, provided as ``bytes`` or `str``.
:ret:
A unicode string, stripped of troublesome characters.
|
Helper function to format and quote a single header parameter using the
HTML5 strategy. | def format_header_param_html5(name, value):
"""
Helper function to format and quote a single header parameter using the
HTML5 strategy.
Particularly useful for header parameters which might contain
non-ASCII values, like file names. This follows the `HTML5 Working Draft
Section 4.10.22.7`_ and matches the behavior of curl and modern browsers.
.. _HTML5 Working Draft Section 4.10.22.7:
https://w3c.github.io/html/sec-forms.html#multipart-form-data
:param name:
The name of the parameter, a string expected to be ASCII only.
:param value:
The value of the parameter, provided as ``bytes`` or `str``.
:ret:
A unicode string, stripped of troublesome characters.
"""
if isinstance(value, six.binary_type):
value = value.decode("utf-8")
value = _replace_multiple(value, _HTML5_REPLACEMENTS)
return u'%s="%s"' % (name, value) | [
"def",
"format_header_param_html5",
"(",
"name",
",",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"six",
".",
"binary_type",
")",
":",
"value",
"=",
"value",
".",
"decode",
"(",
"\"utf-8\"",
")",
"value",
"=",
"_replace_multiple",
"(",
"value",
",",
"_HTML5_REPLACEMENTS",
")",
"return",
"u'%s=\"%s\"'",
"%",
"(",
"name",
",",
"value",
")"
] | [
94,
0
] | [
118,
37
] | python | en | ['en', 'error', 'th'] | False |
RequestField.from_tuples | (cls, fieldname, value, header_formatter=format_header_param_html5) |
A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
Supports constructing :class:`~urllib3.fields.RequestField` from
parameter of key/value strings AND key/filetuple. A filetuple is a
(filename, data, MIME type) tuple where the MIME type is optional.
For example::
'foo': 'bar',
'fakefile': ('foofile.txt', 'contents of foofile'),
'realfile': ('barfile.txt', open('realfile').read()),
'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
'nonamefile': 'contents of nonamefile field',
Field names and filenames must be unicode.
|
A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters. | def from_tuples(cls, fieldname, value, header_formatter=format_header_param_html5):
"""
A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
Supports constructing :class:`~urllib3.fields.RequestField` from
parameter of key/value strings AND key/filetuple. A filetuple is a
(filename, data, MIME type) tuple where the MIME type is optional.
For example::
'foo': 'bar',
'fakefile': ('foofile.txt', 'contents of foofile'),
'realfile': ('barfile.txt', open('realfile').read()),
'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
'nonamefile': 'contents of nonamefile field',
Field names and filenames must be unicode.
"""
if isinstance(value, tuple):
if len(value) == 3:
filename, data, content_type = value
else:
filename, data = value
content_type = guess_content_type(filename)
else:
filename = None
content_type = None
data = value
request_param = cls(
fieldname, data, filename=filename, header_formatter=header_formatter
)
request_param.make_multipart(content_type=content_type)
return request_param | [
"def",
"from_tuples",
"(",
"cls",
",",
"fieldname",
",",
"value",
",",
"header_formatter",
"=",
"format_header_param_html5",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"tuple",
")",
":",
"if",
"len",
"(",
"value",
")",
"==",
"3",
":",
"filename",
",",
"data",
",",
"content_type",
"=",
"value",
"else",
":",
"filename",
",",
"data",
"=",
"value",
"content_type",
"=",
"guess_content_type",
"(",
"filename",
")",
"else",
":",
"filename",
"=",
"None",
"content_type",
"=",
"None",
"data",
"=",
"value",
"request_param",
"=",
"cls",
"(",
"fieldname",
",",
"data",
",",
"filename",
"=",
"filename",
",",
"header_formatter",
"=",
"header_formatter",
")",
"request_param",
".",
"make_multipart",
"(",
"content_type",
"=",
"content_type",
")",
"return",
"request_param"
] | [
159,
4
] | [
192,
28
] | python | en | ['en', 'error', 'th'] | False |
RequestField._render_part | (self, name, value) |
Overridable helper function to format a single header parameter. By
default, this calls ``self.header_formatter``.
:param name:
The name of the parameter, a string expected to be ASCII only.
:param value:
The value of the parameter, provided as a unicode string.
|
Overridable helper function to format a single header parameter. By
default, this calls ``self.header_formatter``. | def _render_part(self, name, value):
"""
Overridable helper function to format a single header parameter. By
default, this calls ``self.header_formatter``.
:param name:
The name of the parameter, a string expected to be ASCII only.
:param value:
The value of the parameter, provided as a unicode string.
"""
return self.header_formatter(name, value) | [
"def",
"_render_part",
"(",
"self",
",",
"name",
",",
"value",
")",
":",
"return",
"self",
".",
"header_formatter",
"(",
"name",
",",
"value",
")"
] | [
194,
4
] | [
205,
49
] | python | en | ['en', 'error', 'th'] | False |
RequestField._render_parts | (self, header_parts) |
Helper function to format and quote a single header.
Useful for single headers that are composed of multiple items. E.g.,
'Content-Disposition' fields.
:param header_parts:
A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format
as `k1="v1"; k2="v2"; ...`.
|
Helper function to format and quote a single header. | def _render_parts(self, header_parts):
"""
Helper function to format and quote a single header.
Useful for single headers that are composed of multiple items. E.g.,
'Content-Disposition' fields.
:param header_parts:
A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format
as `k1="v1"; k2="v2"; ...`.
"""
parts = []
iterable = header_parts
if isinstance(header_parts, dict):
iterable = header_parts.items()
for name, value in iterable:
if value is not None:
parts.append(self._render_part(name, value))
return u"; ".join(parts) | [
"def",
"_render_parts",
"(",
"self",
",",
"header_parts",
")",
":",
"parts",
"=",
"[",
"]",
"iterable",
"=",
"header_parts",
"if",
"isinstance",
"(",
"header_parts",
",",
"dict",
")",
":",
"iterable",
"=",
"header_parts",
".",
"items",
"(",
")",
"for",
"name",
",",
"value",
"in",
"iterable",
":",
"if",
"value",
"is",
"not",
"None",
":",
"parts",
".",
"append",
"(",
"self",
".",
"_render_part",
"(",
"name",
",",
"value",
")",
")",
"return",
"u\"; \"",
".",
"join",
"(",
"parts",
")"
] | [
207,
4
] | [
227,
32
] | python | en | ['en', 'error', 'th'] | False |
RequestField.render_headers | (self) |
Renders the headers for this request field.
|
Renders the headers for this request field.
| def render_headers(self):
"""
Renders the headers for this request field.
"""
lines = []
sort_keys = ["Content-Disposition", "Content-Type", "Content-Location"]
for sort_key in sort_keys:
if self.headers.get(sort_key, False):
lines.append(u"%s: %s" % (sort_key, self.headers[sort_key]))
for header_name, header_value in self.headers.items():
if header_name not in sort_keys:
if header_value:
lines.append(u"%s: %s" % (header_name, header_value))
lines.append(u"\r\n")
return u"\r\n".join(lines) | [
"def",
"render_headers",
"(",
"self",
")",
":",
"lines",
"=",
"[",
"]",
"sort_keys",
"=",
"[",
"\"Content-Disposition\"",
",",
"\"Content-Type\"",
",",
"\"Content-Location\"",
"]",
"for",
"sort_key",
"in",
"sort_keys",
":",
"if",
"self",
".",
"headers",
".",
"get",
"(",
"sort_key",
",",
"False",
")",
":",
"lines",
".",
"append",
"(",
"u\"%s: %s\"",
"%",
"(",
"sort_key",
",",
"self",
".",
"headers",
"[",
"sort_key",
"]",
")",
")",
"for",
"header_name",
",",
"header_value",
"in",
"self",
".",
"headers",
".",
"items",
"(",
")",
":",
"if",
"header_name",
"not",
"in",
"sort_keys",
":",
"if",
"header_value",
":",
"lines",
".",
"append",
"(",
"u\"%s: %s\"",
"%",
"(",
"header_name",
",",
"header_value",
")",
")",
"lines",
".",
"append",
"(",
"u\"\\r\\n\"",
")",
"return",
"u\"\\r\\n\"",
".",
"join",
"(",
"lines",
")"
] | [
229,
4
] | [
246,
34
] | python | en | ['en', 'error', 'th'] | False |
RequestField.make_multipart | (
self, content_disposition=None, content_type=None, content_location=None
) |
Makes this request field into a multipart request field.
This method overrides "Content-Disposition", "Content-Type" and
"Content-Location" headers to the request parameter.
:param content_type:
The 'Content-Type' of the request body.
:param content_location:
The 'Content-Location' of the request body.
|
Makes this request field into a multipart request field. | def make_multipart(
self, content_disposition=None, content_type=None, content_location=None
):
"""
Makes this request field into a multipart request field.
This method overrides "Content-Disposition", "Content-Type" and
"Content-Location" headers to the request parameter.
:param content_type:
The 'Content-Type' of the request body.
:param content_location:
The 'Content-Location' of the request body.
"""
self.headers["Content-Disposition"] = content_disposition or u"form-data"
self.headers["Content-Disposition"] += u"; ".join(
[
u"",
self._render_parts(
((u"name", self._name), (u"filename", self._filename))
),
]
)
self.headers["Content-Type"] = content_type
self.headers["Content-Location"] = content_location | [
"def",
"make_multipart",
"(",
"self",
",",
"content_disposition",
"=",
"None",
",",
"content_type",
"=",
"None",
",",
"content_location",
"=",
"None",
")",
":",
"self",
".",
"headers",
"[",
"\"Content-Disposition\"",
"]",
"=",
"content_disposition",
"or",
"u\"form-data\"",
"self",
".",
"headers",
"[",
"\"Content-Disposition\"",
"]",
"+=",
"u\"; \"",
".",
"join",
"(",
"[",
"u\"\"",
",",
"self",
".",
"_render_parts",
"(",
"(",
"(",
"u\"name\"",
",",
"self",
".",
"_name",
")",
",",
"(",
"u\"filename\"",
",",
"self",
".",
"_filename",
")",
")",
")",
",",
"]",
")",
"self",
".",
"headers",
"[",
"\"Content-Type\"",
"]",
"=",
"content_type",
"self",
".",
"headers",
"[",
"\"Content-Location\"",
"]",
"=",
"content_location"
] | [
248,
4
] | [
273,
59
] | python | en | ['en', 'error', 'th'] | False |
AbstractConnectionPool.__init__ | (self, minconn, maxconn, *args, **kwargs) | Initialize the connection pool.
New 'minconn' connections are created immediately calling 'connfunc'
with given parameters. The connection pool will support a maximum of
about 'maxconn' connections.
| Initialize the connection pool. | def __init__(self, minconn, maxconn, *args, **kwargs):
"""Initialize the connection pool.
New 'minconn' connections are created immediately calling 'connfunc'
with given parameters. The connection pool will support a maximum of
about 'maxconn' connections.
"""
self.minconn = int(minconn)
self.maxconn = int(maxconn)
self.closed = False
self._args = args
self._kwargs = kwargs
self._pool = []
self._used = {}
self._rused = {} # id(conn) -> key map
self._keys = 0
for i in range(self.minconn):
self._connect() | [
"def",
"__init__",
"(",
"self",
",",
"minconn",
",",
"maxconn",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"minconn",
"=",
"int",
"(",
"minconn",
")",
"self",
".",
"maxconn",
"=",
"int",
"(",
"maxconn",
")",
"self",
".",
"closed",
"=",
"False",
"self",
".",
"_args",
"=",
"args",
"self",
".",
"_kwargs",
"=",
"kwargs",
"self",
".",
"_pool",
"=",
"[",
"]",
"self",
".",
"_used",
"=",
"{",
"}",
"self",
".",
"_rused",
"=",
"{",
"}",
"# id(conn) -> key map",
"self",
".",
"_keys",
"=",
"0",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"minconn",
")",
":",
"self",
".",
"_connect",
"(",
")"
] | [
38,
4
] | [
58,
27
] | python | en | ['en', 'en', 'en'] | True |
AbstractConnectionPool._connect | (self, key=None) | Create a new connection and assign it to 'key' if not None. | Create a new connection and assign it to 'key' if not None. | def _connect(self, key=None):
"""Create a new connection and assign it to 'key' if not None."""
conn = psycopg2.connect(*self._args, **self._kwargs)
if key is not None:
self._used[key] = conn
self._rused[id(conn)] = key
else:
self._pool.append(conn)
return conn | [
"def",
"_connect",
"(",
"self",
",",
"key",
"=",
"None",
")",
":",
"conn",
"=",
"psycopg2",
".",
"connect",
"(",
"*",
"self",
".",
"_args",
",",
"*",
"*",
"self",
".",
"_kwargs",
")",
"if",
"key",
"is",
"not",
"None",
":",
"self",
".",
"_used",
"[",
"key",
"]",
"=",
"conn",
"self",
".",
"_rused",
"[",
"id",
"(",
"conn",
")",
"]",
"=",
"key",
"else",
":",
"self",
".",
"_pool",
".",
"append",
"(",
"conn",
")",
"return",
"conn"
] | [
60,
4
] | [
68,
19
] | python | en | ['en', 'en', 'en'] | True |
AbstractConnectionPool._getkey | (self) | Return a new unique key. | Return a new unique key. | def _getkey(self):
"""Return a new unique key."""
self._keys += 1
return self._keys | [
"def",
"_getkey",
"(",
"self",
")",
":",
"self",
".",
"_keys",
"+=",
"1",
"return",
"self",
".",
"_keys"
] | [
70,
4
] | [
73,
25
] | python | ca | ['fr', 'ca', 'en'] | False |
AbstractConnectionPool._getconn | (self, key=None) | Get a free connection and assign it to 'key' if not None. | Get a free connection and assign it to 'key' if not None. | def _getconn(self, key=None):
"""Get a free connection and assign it to 'key' if not None."""
if self.closed:
raise PoolError("connection pool is closed")
if key is None:
key = self._getkey()
if key in self._used:
return self._used[key]
if self._pool:
self._used[key] = conn = self._pool.pop()
self._rused[id(conn)] = key
return conn
else:
if len(self._used) == self.maxconn:
raise PoolError("connection pool exhausted")
return self._connect(key) | [
"def",
"_getconn",
"(",
"self",
",",
"key",
"=",
"None",
")",
":",
"if",
"self",
".",
"closed",
":",
"raise",
"PoolError",
"(",
"\"connection pool is closed\"",
")",
"if",
"key",
"is",
"None",
":",
"key",
"=",
"self",
".",
"_getkey",
"(",
")",
"if",
"key",
"in",
"self",
".",
"_used",
":",
"return",
"self",
".",
"_used",
"[",
"key",
"]",
"if",
"self",
".",
"_pool",
":",
"self",
".",
"_used",
"[",
"key",
"]",
"=",
"conn",
"=",
"self",
".",
"_pool",
".",
"pop",
"(",
")",
"self",
".",
"_rused",
"[",
"id",
"(",
"conn",
")",
"]",
"=",
"key",
"return",
"conn",
"else",
":",
"if",
"len",
"(",
"self",
".",
"_used",
")",
"==",
"self",
".",
"maxconn",
":",
"raise",
"PoolError",
"(",
"\"connection pool exhausted\"",
")",
"return",
"self",
".",
"_connect",
"(",
"key",
")"
] | [
75,
4
] | [
92,
37
] | python | en | ['en', 'en', 'en'] | True |
AbstractConnectionPool._putconn | (self, conn, key=None, close=False) | Put away a connection. | Put away a connection. | def _putconn(self, conn, key=None, close=False):
"""Put away a connection."""
if self.closed:
raise PoolError("connection pool is closed")
if key is None:
key = self._rused.get(id(conn))
if key is None:
raise PoolError("trying to put unkeyed connection")
if len(self._pool) < self.minconn and not close:
# Return the connection into a consistent state before putting
# it back into the pool
if not conn.closed:
status = conn.info.transaction_status
if status == _ext.TRANSACTION_STATUS_UNKNOWN:
# server connection lost
conn.close()
elif status != _ext.TRANSACTION_STATUS_IDLE:
# connection in error or in transaction
conn.rollback()
self._pool.append(conn)
else:
# regular idle connection
self._pool.append(conn)
# If the connection is closed, we just discard it.
else:
conn.close()
# here we check for the presence of key because it can happen that a
# thread tries to put back a connection after a call to close
if not self.closed or key in self._used:
del self._used[key]
del self._rused[id(conn)] | [
"def",
"_putconn",
"(",
"self",
",",
"conn",
",",
"key",
"=",
"None",
",",
"close",
"=",
"False",
")",
":",
"if",
"self",
".",
"closed",
":",
"raise",
"PoolError",
"(",
"\"connection pool is closed\"",
")",
"if",
"key",
"is",
"None",
":",
"key",
"=",
"self",
".",
"_rused",
".",
"get",
"(",
"id",
"(",
"conn",
")",
")",
"if",
"key",
"is",
"None",
":",
"raise",
"PoolError",
"(",
"\"trying to put unkeyed connection\"",
")",
"if",
"len",
"(",
"self",
".",
"_pool",
")",
"<",
"self",
".",
"minconn",
"and",
"not",
"close",
":",
"# Return the connection into a consistent state before putting",
"# it back into the pool",
"if",
"not",
"conn",
".",
"closed",
":",
"status",
"=",
"conn",
".",
"info",
".",
"transaction_status",
"if",
"status",
"==",
"_ext",
".",
"TRANSACTION_STATUS_UNKNOWN",
":",
"# server connection lost",
"conn",
".",
"close",
"(",
")",
"elif",
"status",
"!=",
"_ext",
".",
"TRANSACTION_STATUS_IDLE",
":",
"# connection in error or in transaction",
"conn",
".",
"rollback",
"(",
")",
"self",
".",
"_pool",
".",
"append",
"(",
"conn",
")",
"else",
":",
"# regular idle connection",
"self",
".",
"_pool",
".",
"append",
"(",
"conn",
")",
"# If the connection is closed, we just discard it.",
"else",
":",
"conn",
".",
"close",
"(",
")",
"# here we check for the presence of key because it can happen that a",
"# thread tries to put back a connection after a call to close",
"if",
"not",
"self",
".",
"closed",
"or",
"key",
"in",
"self",
".",
"_used",
":",
"del",
"self",
".",
"_used",
"[",
"key",
"]",
"del",
"self",
".",
"_rused",
"[",
"id",
"(",
"conn",
")",
"]"
] | [
94,
4
] | [
127,
37
] | python | en | ['en', 'en', 'en'] | True |
AbstractConnectionPool._closeall | (self) | Close all connections.
Note that this can lead to some code fail badly when trying to use
an already closed connection. If you call .closeall() make sure
your code can deal with it.
| Close all connections. | def _closeall(self):
"""Close all connections.
Note that this can lead to some code fail badly when trying to use
an already closed connection. If you call .closeall() make sure
your code can deal with it.
"""
if self.closed:
raise PoolError("connection pool is closed")
for conn in self._pool + list(self._used.values()):
try:
conn.close()
except Exception:
pass
self.closed = True | [
"def",
"_closeall",
"(",
"self",
")",
":",
"if",
"self",
".",
"closed",
":",
"raise",
"PoolError",
"(",
"\"connection pool is closed\"",
")",
"for",
"conn",
"in",
"self",
".",
"_pool",
"+",
"list",
"(",
"self",
".",
"_used",
".",
"values",
"(",
")",
")",
":",
"try",
":",
"conn",
".",
"close",
"(",
")",
"except",
"Exception",
":",
"pass",
"self",
".",
"closed",
"=",
"True"
] | [
129,
4
] | [
143,
26
] | python | en | ['en', 'en', 'en'] | True |
ThreadedConnectionPool.__init__ | (self, minconn, maxconn, *args, **kwargs) | Initialize the threading lock. | Initialize the threading lock. | def __init__(self, minconn, maxconn, *args, **kwargs):
"""Initialize the threading lock."""
import threading
AbstractConnectionPool.__init__(
self, minconn, maxconn, *args, **kwargs)
self._lock = threading.Lock() | [
"def",
"__init__",
"(",
"self",
",",
"minconn",
",",
"maxconn",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"import",
"threading",
"AbstractConnectionPool",
".",
"__init__",
"(",
"self",
",",
"minconn",
",",
"maxconn",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"_lock",
"=",
"threading",
".",
"Lock",
"(",
")"
] | [
157,
4
] | [
162,
37
] | python | en | ['en', 'en', 'en'] | True |
ThreadedConnectionPool.getconn | (self, key=None) | Get a free connection and assign it to 'key' if not None. | Get a free connection and assign it to 'key' if not None. | def getconn(self, key=None):
"""Get a free connection and assign it to 'key' if not None."""
self._lock.acquire()
try:
return self._getconn(key)
finally:
self._lock.release() | [
"def",
"getconn",
"(",
"self",
",",
"key",
"=",
"None",
")",
":",
"self",
".",
"_lock",
".",
"acquire",
"(",
")",
"try",
":",
"return",
"self",
".",
"_getconn",
"(",
"key",
")",
"finally",
":",
"self",
".",
"_lock",
".",
"release",
"(",
")"
] | [
164,
4
] | [
170,
32
] | python | en | ['en', 'en', 'en'] | True |
ThreadedConnectionPool.putconn | (self, conn=None, key=None, close=False) | Put away an unused connection. | Put away an unused connection. | def putconn(self, conn=None, key=None, close=False):
"""Put away an unused connection."""
self._lock.acquire()
try:
self._putconn(conn, key, close)
finally:
self._lock.release() | [
"def",
"putconn",
"(",
"self",
",",
"conn",
"=",
"None",
",",
"key",
"=",
"None",
",",
"close",
"=",
"False",
")",
":",
"self",
".",
"_lock",
".",
"acquire",
"(",
")",
"try",
":",
"self",
".",
"_putconn",
"(",
"conn",
",",
"key",
",",
"close",
")",
"finally",
":",
"self",
".",
"_lock",
".",
"release",
"(",
")"
] | [
172,
4
] | [
178,
32
] | python | en | ['en', 'en', 'en'] | True |
ThreadedConnectionPool.closeall | (self) | Close all connections (even the one currently in use.) | Close all connections (even the one currently in use.) | def closeall(self):
"""Close all connections (even the one currently in use.)"""
self._lock.acquire()
try:
self._closeall()
finally:
self._lock.release() | [
"def",
"closeall",
"(",
"self",
")",
":",
"self",
".",
"_lock",
".",
"acquire",
"(",
")",
"try",
":",
"self",
".",
"_closeall",
"(",
")",
"finally",
":",
"self",
".",
"_lock",
".",
"release",
"(",
")"
] | [
180,
4
] | [
186,
32
] | python | en | ['en', 'en', 'en'] | True |
sub_accounts | (enabled=None, ids=None, prefix=None, **options) |
List all sub accounts
:param enabled: Whether to only return enabled sub-accounts (true) or disabled accounts (false).
Default: all accounts are returned (both enabled and disabled).
:type enabled: bool, optional
:param ids: List of sub-account IDs. Up to 100. When provided, other filters are ignored.
:type ids: list, optional
:param prefix: Search by prefix of the sub-account name. Case-insensitive.
:type prefix: str, optional
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: A list of sub accounts
:rtype: dict
|
List all sub accounts
:param enabled: Whether to only return enabled sub-accounts (true) or disabled accounts (false).
Default: all accounts are returned (both enabled and disabled).
:type enabled: bool, optional
:param ids: List of sub-account IDs. Up to 100. When provided, other filters are ignored.
:type ids: list, optional
:param prefix: Search by prefix of the sub-account name. Case-insensitive.
:type prefix: str, optional
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: A list of sub accounts
:rtype: dict
| def sub_accounts(enabled=None, ids=None, prefix=None, **options):
"""
List all sub accounts
:param enabled: Whether to only return enabled sub-accounts (true) or disabled accounts (false).
Default: all accounts are returned (both enabled and disabled).
:type enabled: bool, optional
:param ids: List of sub-account IDs. Up to 100. When provided, other filters are ignored.
:type ids: list, optional
:param prefix: Search by prefix of the sub-account name. Case-insensitive.
:type prefix: str, optional
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: A list of sub accounts
:rtype: dict
"""
uri = [SUB_ACCOUNTS_SUB_PATH]
params = {"ids": ids, "enabled": enabled, "prefix": prefix}
return _call_account_api("GET", uri, params=params, **options) | [
"def",
"sub_accounts",
"(",
"enabled",
"=",
"None",
",",
"ids",
"=",
"None",
",",
"prefix",
"=",
"None",
",",
"*",
"*",
"options",
")",
":",
"uri",
"=",
"[",
"SUB_ACCOUNTS_SUB_PATH",
"]",
"params",
"=",
"{",
"\"ids\"",
":",
"ids",
",",
"\"enabled\"",
":",
"enabled",
",",
"\"prefix\"",
":",
"prefix",
"}",
"return",
"_call_account_api",
"(",
"\"GET\"",
",",
"uri",
",",
"params",
"=",
"params",
",",
"*",
"*",
"options",
")"
] | [
22,
0
] | [
39,
66
] | python | en | ['en', 'error', 'th'] | False |
create_sub_account | (name, cloud_name=None, custom_attributes=None, enabled=None,
base_account=None, **options) |
Create a new sub account
:param name: Name of the new sub account
:type name: str
:param cloud_name: A case-insensitive cloud name comprised of alphanumeric and underscore characters.
* Generates an error if the cloud name is not unique across all Cloudinary accounts.
:type cloud_name: str, optional
:param custom_attributes: Any custom attributes you want to associate with the sub-account
:type custom_attributes: dict, optional
:param enabled: Whether to create the account as enabled (default is enabled).
:type enabled: bool, optional
:param base_account: ID of sub-account from which to copy settings
:type base_account: str, optional
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: The created sub account
:rtype: dict
|
Create a new sub account
:param name: Name of the new sub account
:type name: str
:param cloud_name: A case-insensitive cloud name comprised of alphanumeric and underscore characters.
* Generates an error if the cloud name is not unique across all Cloudinary accounts.
:type cloud_name: str, optional
:param custom_attributes: Any custom attributes you want to associate with the sub-account
:type custom_attributes: dict, optional
:param enabled: Whether to create the account as enabled (default is enabled).
:type enabled: bool, optional
:param base_account: ID of sub-account from which to copy settings
:type base_account: str, optional
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: The created sub account
:rtype: dict
| def create_sub_account(name, cloud_name=None, custom_attributes=None, enabled=None,
base_account=None, **options):
"""
Create a new sub account
:param name: Name of the new sub account
:type name: str
:param cloud_name: A case-insensitive cloud name comprised of alphanumeric and underscore characters.
* Generates an error if the cloud name is not unique across all Cloudinary accounts.
:type cloud_name: str, optional
:param custom_attributes: Any custom attributes you want to associate with the sub-account
:type custom_attributes: dict, optional
:param enabled: Whether to create the account as enabled (default is enabled).
:type enabled: bool, optional
:param base_account: ID of sub-account from which to copy settings
:type base_account: str, optional
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: The created sub account
:rtype: dict
"""
uri = [SUB_ACCOUNTS_SUB_PATH]
params = {"name": name,
"cloud_name": cloud_name,
"custom_attributes": custom_attributes,
"enabled": enabled,
"base_account": base_account}
return _call_account_api("POST", uri, params=params, **options) | [
"def",
"create_sub_account",
"(",
"name",
",",
"cloud_name",
"=",
"None",
",",
"custom_attributes",
"=",
"None",
",",
"enabled",
"=",
"None",
",",
"base_account",
"=",
"None",
",",
"*",
"*",
"options",
")",
":",
"uri",
"=",
"[",
"SUB_ACCOUNTS_SUB_PATH",
"]",
"params",
"=",
"{",
"\"name\"",
":",
"name",
",",
"\"cloud_name\"",
":",
"cloud_name",
",",
"\"custom_attributes\"",
":",
"custom_attributes",
",",
"\"enabled\"",
":",
"enabled",
",",
"\"base_account\"",
":",
"base_account",
"}",
"return",
"_call_account_api",
"(",
"\"POST\"",
",",
"uri",
",",
"params",
"=",
"params",
",",
"*",
"*",
"options",
")"
] | [
42,
0
] | [
68,
67
] | python | en | ['en', 'error', 'th'] | False |
delete_sub_account | (sub_account_id, **options) |
Delete a sub account
:param sub_account_id: The id of the sub account
:type sub_account_id: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: Result message
:rtype: dict
|
Delete a sub account
:param sub_account_id: The id of the sub account
:type sub_account_id: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: Result message
:rtype: dict
| def delete_sub_account(sub_account_id, **options):
"""
Delete a sub account
:param sub_account_id: The id of the sub account
:type sub_account_id: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: Result message
:rtype: dict
"""
uri = [SUB_ACCOUNTS_SUB_PATH, sub_account_id]
return _call_account_api("delete", uri, {}, **options) | [
"def",
"delete_sub_account",
"(",
"sub_account_id",
",",
"*",
"*",
"options",
")",
":",
"uri",
"=",
"[",
"SUB_ACCOUNTS_SUB_PATH",
",",
"sub_account_id",
"]",
"return",
"_call_account_api",
"(",
"\"delete\"",
",",
"uri",
",",
"{",
"}",
",",
"*",
"*",
"options",
")"
] | [
71,
0
] | [
82,
58
] | python | en | ['en', 'error', 'th'] | False |
sub_account | (sub_account_id, **options) |
Get information of a sub account
:param sub_account_id: The id of the sub account
:type sub_account_id: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: A sub account
:rtype: dict
|
Get information of a sub account
:param sub_account_id: The id of the sub account
:type sub_account_id: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: A sub account
:rtype: dict
| def sub_account(sub_account_id, **options):
"""
Get information of a sub account
:param sub_account_id: The id of the sub account
:type sub_account_id: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: A sub account
:rtype: dict
"""
uri = [SUB_ACCOUNTS_SUB_PATH, sub_account_id]
return _call_account_api("get", uri, {}, **options) | [
"def",
"sub_account",
"(",
"sub_account_id",
",",
"*",
"*",
"options",
")",
":",
"uri",
"=",
"[",
"SUB_ACCOUNTS_SUB_PATH",
",",
"sub_account_id",
"]",
"return",
"_call_account_api",
"(",
"\"get\"",
",",
"uri",
",",
"{",
"}",
",",
"*",
"*",
"options",
")"
] | [
85,
0
] | [
96,
55
] | python | en | ['en', 'error', 'th'] | False |
update_sub_account | (sub_account_id, name=None, cloud_name=None, custom_attributes=None,
enabled=None, base_account=None,
**options) |
Update a sub account
:param sub_account_id: The id of the sub account
:type sub_account_id: str
:param name: Name of the account
:type name: str, optional
:param cloud_name: Unique cloud name
:type cloud_name: str, optional
:param custom_attributes: Any custom attributes you want to associate with the sub-account.
:type custom_attributes: dict, optional
:param enabled: Whether to create the account as enabled (default is enabled).
:type enabled: bool, optional
:param base_account: ID of sub-account from which to copy settings
:type base_account: str, optional
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: Updated sub account
:rtype: dict
|
Update a sub account
:param sub_account_id: The id of the sub account
:type sub_account_id: str
:param name: Name of the account
:type name: str, optional
:param cloud_name: Unique cloud name
:type cloud_name: str, optional
:param custom_attributes: Any custom attributes you want to associate with the sub-account.
:type custom_attributes: dict, optional
:param enabled: Whether to create the account as enabled (default is enabled).
:type enabled: bool, optional
:param base_account: ID of sub-account from which to copy settings
:type base_account: str, optional
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: Updated sub account
:rtype: dict
| def update_sub_account(sub_account_id, name=None, cloud_name=None, custom_attributes=None,
enabled=None, base_account=None,
**options):
"""
Update a sub account
:param sub_account_id: The id of the sub account
:type sub_account_id: str
:param name: Name of the account
:type name: str, optional
:param cloud_name: Unique cloud name
:type cloud_name: str, optional
:param custom_attributes: Any custom attributes you want to associate with the sub-account.
:type custom_attributes: dict, optional
:param enabled: Whether to create the account as enabled (default is enabled).
:type enabled: bool, optional
:param base_account: ID of sub-account from which to copy settings
:type base_account: str, optional
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: Updated sub account
:rtype: dict
"""
uri = [SUB_ACCOUNTS_SUB_PATH, sub_account_id]
params = {"name": name,
"cloud_name": cloud_name,
"custom_attributes": custom_attributes,
"enabled": enabled,
"base_account": base_account}
return _call_account_api("put", uri, params=params, **options) | [
"def",
"update_sub_account",
"(",
"sub_account_id",
",",
"name",
"=",
"None",
",",
"cloud_name",
"=",
"None",
",",
"custom_attributes",
"=",
"None",
",",
"enabled",
"=",
"None",
",",
"base_account",
"=",
"None",
",",
"*",
"*",
"options",
")",
":",
"uri",
"=",
"[",
"SUB_ACCOUNTS_SUB_PATH",
",",
"sub_account_id",
"]",
"params",
"=",
"{",
"\"name\"",
":",
"name",
",",
"\"cloud_name\"",
":",
"cloud_name",
",",
"\"custom_attributes\"",
":",
"custom_attributes",
",",
"\"enabled\"",
":",
"enabled",
",",
"\"base_account\"",
":",
"base_account",
"}",
"return",
"_call_account_api",
"(",
"\"put\"",
",",
"uri",
",",
"params",
"=",
"params",
",",
"*",
"*",
"options",
")"
] | [
99,
0
] | [
127,
66
] | python | en | ['en', 'error', 'th'] | False |
users | (user_ids=None, sub_account_id=None, pending=None, prefix=None, **options) |
List all users
:param user_ids: The ids of the users to fetch
:type user_ids: list, optional
:param sub_account_id: The id of a sub account
:type sub_account_id: str, optional
:param pending: Limit results to pending users (True),
users that are not pending (False),
or all users (None, the default).
:type pending: bool, optional
:param prefix: User prefix
:type prefix: str, optional
:param options: Generic advanced options dict, see online documentation.
:type options: dict, optional
:return: List of users associated with the account
:rtype: dict
|
List all users
:param user_ids: The ids of the users to fetch
:type user_ids: list, optional
:param sub_account_id: The id of a sub account
:type sub_account_id: str, optional
:param pending: Limit results to pending users (True),
users that are not pending (False),
or all users (None, the default).
:type pending: bool, optional
:param prefix: User prefix
:type prefix: str, optional
:param options: Generic advanced options dict, see online documentation.
:type options: dict, optional
:return: List of users associated with the account
:rtype: dict
| def users(user_ids=None, sub_account_id=None, pending=None, prefix=None, **options):
"""
List all users
:param user_ids: The ids of the users to fetch
:type user_ids: list, optional
:param sub_account_id: The id of a sub account
:type sub_account_id: str, optional
:param pending: Limit results to pending users (True),
users that are not pending (False),
or all users (None, the default).
:type pending: bool, optional
:param prefix: User prefix
:type prefix: str, optional
:param options: Generic advanced options dict, see online documentation.
:type options: dict, optional
:return: List of users associated with the account
:rtype: dict
"""
uri = [USERS_SUB_PATH]
user_ids = encode_list(user_ids)
params = {"ids": user_ids,
"sub_account_id": sub_account_id,
"pending": pending,
"prefix": prefix}
return _call_account_api("get", uri, params=params, **options) | [
"def",
"users",
"(",
"user_ids",
"=",
"None",
",",
"sub_account_id",
"=",
"None",
",",
"pending",
"=",
"None",
",",
"prefix",
"=",
"None",
",",
"*",
"*",
"options",
")",
":",
"uri",
"=",
"[",
"USERS_SUB_PATH",
"]",
"user_ids",
"=",
"encode_list",
"(",
"user_ids",
")",
"params",
"=",
"{",
"\"ids\"",
":",
"user_ids",
",",
"\"sub_account_id\"",
":",
"sub_account_id",
",",
"\"pending\"",
":",
"pending",
",",
"\"prefix\"",
":",
"prefix",
"}",
"return",
"_call_account_api",
"(",
"\"get\"",
",",
"uri",
",",
"params",
"=",
"params",
",",
"*",
"*",
"options",
")"
] | [
130,
0
] | [
154,
66
] | python | en | ['en', 'error', 'th'] | False |
create_user | (name, email, role, sub_account_ids=None, **options) |
Create a user
:param name: Username
:type name: str
:param email: User's email
:type email: str
:param role: User's role
:type role: str
:param sub_account_ids: Optional. Sub accounts to associate with the user
:type sub_account_ids: list, optional
:param options: Generic advanced options dict, see online documentation.
:type options: dict, optional
:return: Details of created user
:rtype: dict
|
Create a user
:param name: Username
:type name: str
:param email: User's email
:type email: str
:param role: User's role
:type role: str
:param sub_account_ids: Optional. Sub accounts to associate with the user
:type sub_account_ids: list, optional
:param options: Generic advanced options dict, see online documentation.
:type options: dict, optional
:return: Details of created user
:rtype: dict
| def create_user(name, email, role, sub_account_ids=None, **options):
"""
Create a user
:param name: Username
:type name: str
:param email: User's email
:type email: str
:param role: User's role
:type role: str
:param sub_account_ids: Optional. Sub accounts to associate with the user
:type sub_account_ids: list, optional
:param options: Generic advanced options dict, see online documentation.
:type options: dict, optional
:return: Details of created user
:rtype: dict
"""
uri = [USERS_SUB_PATH]
params = {"name": name,
"email": email,
"role": role,
"sub_account_ids": sub_account_ids}
return _call_account_api("post", uri, params=params, **options) | [
"def",
"create_user",
"(",
"name",
",",
"email",
",",
"role",
",",
"sub_account_ids",
"=",
"None",
",",
"*",
"*",
"options",
")",
":",
"uri",
"=",
"[",
"USERS_SUB_PATH",
"]",
"params",
"=",
"{",
"\"name\"",
":",
"name",
",",
"\"email\"",
":",
"email",
",",
"\"role\"",
":",
"role",
",",
"\"sub_account_ids\"",
":",
"sub_account_ids",
"}",
"return",
"_call_account_api",
"(",
"\"post\"",
",",
"uri",
",",
"params",
"=",
"params",
",",
"*",
"*",
"options",
")"
] | [
157,
0
] | [
178,
67
] | python | en | ['en', 'error', 'th'] | False |
delete_user | (user_id, **options) |
Delete a user
:param user_id: The id of user to delete
:type user_id: str
:param options: Generic advanced options dict, see online documentation.
:type options: dict, optional
:return: Result message
:rtype: dict
|
Delete a user
:param user_id: The id of user to delete
:type user_id: str
:param options: Generic advanced options dict, see online documentation.
:type options: dict, optional
:return: Result message
:rtype: dict
| def delete_user(user_id, **options):
"""
Delete a user
:param user_id: The id of user to delete
:type user_id: str
:param options: Generic advanced options dict, see online documentation.
:type options: dict, optional
:return: Result message
:rtype: dict
"""
uri = [USERS_SUB_PATH, user_id]
return _call_account_api("delete", uri, {}, **options) | [
"def",
"delete_user",
"(",
"user_id",
",",
"*",
"*",
"options",
")",
":",
"uri",
"=",
"[",
"USERS_SUB_PATH",
",",
"user_id",
"]",
"return",
"_call_account_api",
"(",
"\"delete\"",
",",
"uri",
",",
"{",
"}",
",",
"*",
"*",
"options",
")"
] | [
181,
0
] | [
192,
58
] | python | en | ['en', 'error', 'th'] | False |
user | (user_id, **options) |
Get information of a user
:param user_id: The id of the user
:type user_id: str
:param options: Generic advanced options dict, see online documentation.
:type options: dict, optional
:return: A user
:rtype: dict
|
Get information of a user
:param user_id: The id of the user
:type user_id: str
:param options: Generic advanced options dict, see online documentation.
:type options: dict, optional
:return: A user
:rtype: dict
| def user(user_id, **options):
"""
Get information of a user
:param user_id: The id of the user
:type user_id: str
:param options: Generic advanced options dict, see online documentation.
:type options: dict, optional
:return: A user
:rtype: dict
"""
uri = [USERS_SUB_PATH, user_id]
return _call_account_api("get", uri, {}, **options) | [
"def",
"user",
"(",
"user_id",
",",
"*",
"*",
"options",
")",
":",
"uri",
"=",
"[",
"USERS_SUB_PATH",
",",
"user_id",
"]",
"return",
"_call_account_api",
"(",
"\"get\"",
",",
"uri",
",",
"{",
"}",
",",
"*",
"*",
"options",
")"
] | [
195,
0
] | [
206,
55
] | python | en | ['en', 'error', 'th'] | False |
update_user | (user_id, name=None, email=None, role=None, sub_account_ids=None, **options) |
Update a user
:param user_id: The id of the user to update
:type user_id: str
:param name: Username
:type name: str, optional
:param email: User's email
:type email: str, optional
:param role: User's role
:type role: Role, optional
:param sub_account_ids: The list of sub-account IDs that this user can access.
Note: This parameter is ignored if the role is specified as master_admin.
:type sub_account_ids: list, optional
:param options: Generic advanced options dict, see online documentation.
:type options: dict, optional
:return: The updated user
:rtype: dict
|
Update a user
:param user_id: The id of the user to update
:type user_id: str
:param name: Username
:type name: str, optional
:param email: User's email
:type email: str, optional
:param role: User's role
:type role: Role, optional
:param sub_account_ids: The list of sub-account IDs that this user can access.
Note: This parameter is ignored if the role is specified as master_admin.
:type sub_account_ids: list, optional
:param options: Generic advanced options dict, see online documentation.
:type options: dict, optional
:return: The updated user
:rtype: dict
| def update_user(user_id, name=None, email=None, role=None, sub_account_ids=None, **options):
"""
Update a user
:param user_id: The id of the user to update
:type user_id: str
:param name: Username
:type name: str, optional
:param email: User's email
:type email: str, optional
:param role: User's role
:type role: Role, optional
:param sub_account_ids: The list of sub-account IDs that this user can access.
Note: This parameter is ignored if the role is specified as master_admin.
:type sub_account_ids: list, optional
:param options: Generic advanced options dict, see online documentation.
:type options: dict, optional
:return: The updated user
:rtype: dict
"""
uri = [USERS_SUB_PATH, user_id]
params = {"name": name,
"email": email,
"role": role,
"sub_account_ids": sub_account_ids}
return _call_account_api("put", uri, params=params, **options) | [
"def",
"update_user",
"(",
"user_id",
",",
"name",
"=",
"None",
",",
"email",
"=",
"None",
",",
"role",
"=",
"None",
",",
"sub_account_ids",
"=",
"None",
",",
"*",
"*",
"options",
")",
":",
"uri",
"=",
"[",
"USERS_SUB_PATH",
",",
"user_id",
"]",
"params",
"=",
"{",
"\"name\"",
":",
"name",
",",
"\"email\"",
":",
"email",
",",
"\"role\"",
":",
"role",
",",
"\"sub_account_ids\"",
":",
"sub_account_ids",
"}",
"return",
"_call_account_api",
"(",
"\"put\"",
",",
"uri",
",",
"params",
"=",
"params",
",",
"*",
"*",
"options",
")"
] | [
209,
0
] | [
233,
66
] | python | en | ['en', 'error', 'th'] | False |
user_groups | (**options) |
List all user groups
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: List of user groups
:rtype: ProvisioningAPIRespose
|
List all user groups
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: List of user groups
:rtype: ProvisioningAPIRespose
| def user_groups(**options):
"""
List all user groups
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: List of user groups
:rtype: ProvisioningAPIRespose
"""
uri = [USER_GROUPS_SUB_PATH]
return _call_account_api("get", uri, {}, **options) | [
"def",
"user_groups",
"(",
"*",
"*",
"options",
")",
":",
"uri",
"=",
"[",
"USER_GROUPS_SUB_PATH",
"]",
"return",
"_call_account_api",
"(",
"\"get\"",
",",
"uri",
",",
"{",
"}",
",",
"*",
"*",
"options",
")"
] | [
236,
0
] | [
245,
55
] | python | en | ['en', 'error', 'th'] | False |
create_user_group | (name, **options) |
Create a new user group
:param name: Name of the user group
:type name: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: The newly created group
:rtype: dict
|
Create a new user group
:param name: Name of the user group
:type name: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: The newly created group
:rtype: dict
| def create_user_group(name, **options):
"""
Create a new user group
:param name: Name of the user group
:type name: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: The newly created group
:rtype: dict
"""
uri = [USER_GROUPS_SUB_PATH]
params = {"name": name}
return _call_account_api("post", uri, params, **options) | [
"def",
"create_user_group",
"(",
"name",
",",
"*",
"*",
"options",
")",
":",
"uri",
"=",
"[",
"USER_GROUPS_SUB_PATH",
"]",
"params",
"=",
"{",
"\"name\"",
":",
"name",
"}",
"return",
"_call_account_api",
"(",
"\"post\"",
",",
"uri",
",",
"params",
",",
"*",
"*",
"options",
")"
] | [
248,
0
] | [
260,
60
] | python | en | ['en', 'error', 'th'] | False |
update_user_group | (user_group_id, name, **options) |
Update a user group
:param user_group_id: The id of the user group to update
:type user_group_id: str
:param name: Name of the user group
:type name: str, optional
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: The updated group
:rtype: dict
|
Update a user group
:param user_group_id: The id of the user group to update
:type user_group_id: str
:param name: Name of the user group
:type name: str, optional
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: The updated group
:rtype: dict
| def update_user_group(user_group_id, name, **options):
"""
Update a user group
:param user_group_id: The id of the user group to update
:type user_group_id: str
:param name: Name of the user group
:type name: str, optional
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: The updated group
:rtype: dict
"""
uri = [USER_GROUPS_SUB_PATH, user_group_id]
params = {"name": name}
return _call_account_api("put", uri, params, **options) | [
"def",
"update_user_group",
"(",
"user_group_id",
",",
"name",
",",
"*",
"*",
"options",
")",
":",
"uri",
"=",
"[",
"USER_GROUPS_SUB_PATH",
",",
"user_group_id",
"]",
"params",
"=",
"{",
"\"name\"",
":",
"name",
"}",
"return",
"_call_account_api",
"(",
"\"put\"",
",",
"uri",
",",
"params",
",",
"*",
"*",
"options",
")"
] | [
263,
0
] | [
277,
59
] | python | en | ['en', 'error', 'th'] | False |
delete_user_group | (user_group_id, **options) |
Delete a user group
:param user_group_id: The id of the user group to delete
:type user_group_id: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: The result message
:rtype: dict
|
Delete a user group
:param user_group_id: The id of the user group to delete
:type user_group_id: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: The result message
:rtype: dict
| def delete_user_group(user_group_id, **options):
"""
Delete a user group
:param user_group_id: The id of the user group to delete
:type user_group_id: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: The result message
:rtype: dict
"""
uri = [USER_GROUPS_SUB_PATH, user_group_id]
return _call_account_api("delete", uri, {}, **options) | [
"def",
"delete_user_group",
"(",
"user_group_id",
",",
"*",
"*",
"options",
")",
":",
"uri",
"=",
"[",
"USER_GROUPS_SUB_PATH",
",",
"user_group_id",
"]",
"return",
"_call_account_api",
"(",
"\"delete\"",
",",
"uri",
",",
"{",
"}",
",",
"*",
"*",
"options",
")"
] | [
280,
0
] | [
291,
58
] | python | en | ['en', 'error', 'th'] | False |
user_group | (user_group_id, **options) |
Get information of a user group
:param user_group_id: The id of the user group
:type user_group_id: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: Details of the group
:rtype: dict
|
Get information of a user group
:param user_group_id: The id of the user group
:type user_group_id: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: Details of the group
:rtype: dict
| def user_group(user_group_id, **options):
"""
Get information of a user group
:param user_group_id: The id of the user group
:type user_group_id: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: Details of the group
:rtype: dict
"""
uri = [USER_GROUPS_SUB_PATH, user_group_id]
return _call_account_api("get", uri, {}, **options) | [
"def",
"user_group",
"(",
"user_group_id",
",",
"*",
"*",
"options",
")",
":",
"uri",
"=",
"[",
"USER_GROUPS_SUB_PATH",
",",
"user_group_id",
"]",
"return",
"_call_account_api",
"(",
"\"get\"",
",",
"uri",
",",
"{",
"}",
",",
"*",
"*",
"options",
")"
] | [
294,
0
] | [
305,
55
] | python | en | ['en', 'error', 'th'] | False |
add_user_to_group | (user_group_id, user_id, **options) |
Add a user to a user group
:param user_group_id: The id of the user group to add the user to
:type user_group_id: str
:param user_id: The user id to add
:type user_id: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: List of users in the group
:rtype: dict
|
Add a user to a user group
:param user_group_id: The id of the user group to add the user to
:type user_group_id: str
:param user_id: The user id to add
:type user_id: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: List of users in the group
:rtype: dict
| def add_user_to_group(user_group_id, user_id, **options):
"""
Add a user to a user group
:param user_group_id: The id of the user group to add the user to
:type user_group_id: str
:param user_id: The user id to add
:type user_id: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: List of users in the group
:rtype: dict
"""
uri = [USER_GROUPS_SUB_PATH, user_group_id, "users", user_id]
return _call_account_api("post", uri, {}, **options) | [
"def",
"add_user_to_group",
"(",
"user_group_id",
",",
"user_id",
",",
"*",
"*",
"options",
")",
":",
"uri",
"=",
"[",
"USER_GROUPS_SUB_PATH",
",",
"user_group_id",
",",
"\"users\"",
",",
"user_id",
"]",
"return",
"_call_account_api",
"(",
"\"post\"",
",",
"uri",
",",
"{",
"}",
",",
"*",
"*",
"options",
")"
] | [
308,
0
] | [
321,
56
] | python | en | ['en', 'error', 'th'] | False |
remove_user_from_group | (user_group_id, user_id, **options) |
Remove a user from a user group
:param user_group_id: The id of the user group to remove the user from
:type user_group_id: str
:param user_id: The id of the user to remove
:type user_id: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: List of users in the group
:rtype: dict
|
Remove a user from a user group
:param user_group_id: The id of the user group to remove the user from
:type user_group_id: str
:param user_id: The id of the user to remove
:type user_id: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: List of users in the group
:rtype: dict
| def remove_user_from_group(user_group_id, user_id, **options):
"""
Remove a user from a user group
:param user_group_id: The id of the user group to remove the user from
:type user_group_id: str
:param user_id: The id of the user to remove
:type user_id: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: List of users in the group
:rtype: dict
"""
uri = [USER_GROUPS_SUB_PATH, user_group_id, "users", user_id]
return _call_account_api("delete", uri, {}, **options) | [
"def",
"remove_user_from_group",
"(",
"user_group_id",
",",
"user_id",
",",
"*",
"*",
"options",
")",
":",
"uri",
"=",
"[",
"USER_GROUPS_SUB_PATH",
",",
"user_group_id",
",",
"\"users\"",
",",
"user_id",
"]",
"return",
"_call_account_api",
"(",
"\"delete\"",
",",
"uri",
",",
"{",
"}",
",",
"*",
"*",
"options",
")"
] | [
324,
0
] | [
337,
58
] | python | en | ['en', 'error', 'th'] | False |
user_group_users | (user_group_id, **options) |
Get all users in a user group
:param user_group_id: The id of user group to get list of users
:type user_group_id: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: List of users in the group
:rtype: dict
|
Get all users in a user group
:param user_group_id: The id of user group to get list of users
:type user_group_id: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: List of users in the group
:rtype: dict
| def user_group_users(user_group_id, **options):
"""
Get all users in a user group
:param user_group_id: The id of user group to get list of users
:type user_group_id: str
:param options: Generic advanced options dict, see online documentation
:type options: dict, optional
:return: List of users in the group
:rtype: dict
"""
uri = [USER_GROUPS_SUB_PATH, user_group_id, "users"]
return _call_account_api("get", uri, {}, **options) | [
"def",
"user_group_users",
"(",
"user_group_id",
",",
"*",
"*",
"options",
")",
":",
"uri",
"=",
"[",
"USER_GROUPS_SUB_PATH",
",",
"user_group_id",
",",
"\"users\"",
"]",
"return",
"_call_account_api",
"(",
"\"get\"",
",",
"uri",
",",
"{",
"}",
",",
"*",
"*",
"options",
")"
] | [
340,
0
] | [
351,
55
] | python | en | ['en', 'error', 'th'] | False |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.