Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
ContainerIO.read | (self, n=0) |
Read data.
:param n: Number of bytes to read. If omitted or zero,
read until end of region.
:returns: An 8-bit string.
|
Read data. | def read(self, n=0):
"""
Read data.
:param n: Number of bytes to read. If omitted or zero,
read until end of region.
:returns: An 8-bit string.
"""
if n:
n = min(n, self.length - self.pos)
else:
n = self.length - self.pos
if not n: # EOF
return b"" if "b" in self.fh.mode else ""
self.pos = self.pos + n
return self.fh.read(n) | [
"def",
"read",
"(",
"self",
",",
"n",
"=",
"0",
")",
":",
"if",
"n",
":",
"n",
"=",
"min",
"(",
"n",
",",
"self",
".",
"length",
"-",
"self",
".",
"pos",
")",
"else",
":",
"n",
"=",
"self",
".",
"length",
"-",
"self",
".",
"pos",
"if",
"not",
"n",
":",
"# EOF",
"return",
"b\"\"",
"if",
"\"b\"",
"in",
"self",
".",
"fh",
".",
"mode",
"else",
"\"\"",
"self",
".",
"pos",
"=",
"self",
".",
"pos",
"+",
"n",
"return",
"self",
".",
"fh",
".",
"read",
"(",
"n",
")"
] | [
73,
4
] | [
88,
30
] | python | en | ['en', 'error', 'th'] | False |
ContainerIO.readline | (self) |
Read a line of text.
:returns: An 8-bit string.
|
Read a line of text. | def readline(self):
"""
Read a line of text.
:returns: An 8-bit string.
"""
s = b"" if "b" in self.fh.mode else ""
newline_character = b"\n" if "b" in self.fh.mode else "\n"
while True:
c = self.read(1)
if not c:
break
s = s + c
if c == newline_character:
break
return s | [
"def",
"readline",
"(",
"self",
")",
":",
"s",
"=",
"b\"\"",
"if",
"\"b\"",
"in",
"self",
".",
"fh",
".",
"mode",
"else",
"\"\"",
"newline_character",
"=",
"b\"\\n\"",
"if",
"\"b\"",
"in",
"self",
".",
"fh",
".",
"mode",
"else",
"\"\\n\"",
"while",
"True",
":",
"c",
"=",
"self",
".",
"read",
"(",
"1",
")",
"if",
"not",
"c",
":",
"break",
"s",
"=",
"s",
"+",
"c",
"if",
"c",
"==",
"newline_character",
":",
"break",
"return",
"s"
] | [
90,
4
] | [
105,
16
] | python | en | ['en', 'error', 'th'] | False |
ContainerIO.readlines | (self) |
Read multiple lines of text.
:returns: A list of 8-bit strings.
|
Read multiple lines of text. | def readlines(self):
"""
Read multiple lines of text.
:returns: A list of 8-bit strings.
"""
lines = []
while True:
s = self.readline()
if not s:
break
lines.append(s)
return lines | [
"def",
"readlines",
"(",
"self",
")",
":",
"lines",
"=",
"[",
"]",
"while",
"True",
":",
"s",
"=",
"self",
".",
"readline",
"(",
")",
"if",
"not",
"s",
":",
"break",
"lines",
".",
"append",
"(",
"s",
")",
"return",
"lines"
] | [
107,
4
] | [
119,
20
] | python | en | ['en', 'error', 'th'] | False |
run_command | (*cmd, **kwargs) | The stdout of most fbcode_builder utilities is meant to be parsed. | The stdout of most fbcode_builder utilities is meant to be parsed. | def run_command(*cmd, **kwargs):
'The stdout of most fbcode_builder utilities is meant to be parsed.'
logging.debug('Running: {0} with {1}'.format(cmd, kwargs))
kwargs['stdout'] = sys.stderr
subprocess.check_call(cmd, **kwargs) | [
"def",
"run_command",
"(",
"*",
"cmd",
",",
"*",
"*",
"kwargs",
")",
":",
"logging",
".",
"debug",
"(",
"'Running: {0} with {1}'",
".",
"format",
"(",
"cmd",
",",
"kwargs",
")",
")",
"kwargs",
"[",
"'stdout'",
"]",
"=",
"sys",
".",
"stderr",
"subprocess",
".",
"check_call",
"(",
"cmd",
",",
"*",
"*",
"kwargs",
")"
] | [
24,
0
] | [
28,
40
] | python | en | ['en', 'en', 'en'] | True |
_inner_read_config | (path) |
Helper to read a named config file.
The grossness with the global is a workaround for this python bug:
https://bugs.python.org/issue21591
The bug prevents us from defining either a local function or a lambda
in the scope of read_fbcode_builder_config below.
|
Helper to read a named config file.
The grossness with the global is a workaround for this python bug:
https://bugs.python.org/issue21591
The bug prevents us from defining either a local function or a lambda
in the scope of read_fbcode_builder_config below.
| def _inner_read_config(path):
'''
Helper to read a named config file.
The grossness with the global is a workaround for this python bug:
https://bugs.python.org/issue21591
The bug prevents us from defining either a local function or a lambda
in the scope of read_fbcode_builder_config below.
'''
global _project_dir
full_path = os.path.join(_project_dir, path)
return read_fbcode_builder_config(full_path) | [
"def",
"_inner_read_config",
"(",
"path",
")",
":",
"global",
"_project_dir",
"full_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"_project_dir",
",",
"path",
")",
"return",
"read_fbcode_builder_config",
"(",
"full_path",
")"
] | [
40,
0
] | [
50,
48
] | python | en | ['en', 'error', 'th'] | False |
steps_for_spec | (builder, spec, processed_modules=None) |
Sets `builder` configuration, and returns all the builder steps
necessary to build `spec` and its dependencies.
Traverses the dependencies in depth-first order, honoring the sequencing
in each 'depends_on' list.
|
Sets `builder` configuration, and returns all the builder steps
necessary to build `spec` and its dependencies. | def steps_for_spec(builder, spec, processed_modules=None):
'''
Sets `builder` configuration, and returns all the builder steps
necessary to build `spec` and its dependencies.
Traverses the dependencies in depth-first order, honoring the sequencing
in each 'depends_on' list.
'''
if processed_modules is None:
processed_modules = set()
steps = []
for module in spec.get('depends_on', []):
if module not in processed_modules:
processed_modules.add(module)
steps.extend(steps_for_spec(
builder,
module.fbcode_builder_spec(builder),
processed_modules
))
steps.extend(spec.get('steps', []))
return steps | [
"def",
"steps_for_spec",
"(",
"builder",
",",
"spec",
",",
"processed_modules",
"=",
"None",
")",
":",
"if",
"processed_modules",
"is",
"None",
":",
"processed_modules",
"=",
"set",
"(",
")",
"steps",
"=",
"[",
"]",
"for",
"module",
"in",
"spec",
".",
"get",
"(",
"'depends_on'",
",",
"[",
"]",
")",
":",
"if",
"module",
"not",
"in",
"processed_modules",
":",
"processed_modules",
".",
"add",
"(",
"module",
")",
"steps",
".",
"extend",
"(",
"steps_for_spec",
"(",
"builder",
",",
"module",
".",
"fbcode_builder_spec",
"(",
"builder",
")",
",",
"processed_modules",
")",
")",
"steps",
".",
"extend",
"(",
"spec",
".",
"get",
"(",
"'steps'",
",",
"[",
"]",
")",
")",
"return",
"steps"
] | [
68,
0
] | [
88,
16
] | python | en | ['en', 'error', 'th'] | False |
handle_default_options | (options) |
Include any default options that all commands should accept here
so that ManagementUtility can handle them before searching for
user commands.
|
Include any default options that all commands should accept here
so that ManagementUtility can handle them before searching for
user commands.
| def handle_default_options(options):
"""
Include any default options that all commands should accept here
so that ManagementUtility can handle them before searching for
user commands.
"""
if options.settings:
os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
if options.pythonpath:
sys.path.insert(0, options.pythonpath) | [
"def",
"handle_default_options",
"(",
"options",
")",
":",
"if",
"options",
".",
"settings",
":",
"os",
".",
"environ",
"[",
"'DJANGO_SETTINGS_MODULE'",
"]",
"=",
"options",
".",
"settings",
"if",
"options",
".",
"pythonpath",
":",
"sys",
".",
"path",
".",
"insert",
"(",
"0",
",",
"options",
".",
"pythonpath",
")"
] | [
69,
0
] | [
78,
46
] | python | en | ['en', 'error', 'th'] | False |
no_translations | (handle_func) | Decorator that forces a command to run with translations deactivated. | Decorator that forces a command to run with translations deactivated. | def no_translations(handle_func):
"""Decorator that forces a command to run with translations deactivated."""
def wrapped(*args, **kwargs):
from django.utils import translation
saved_locale = translation.get_language()
translation.deactivate_all()
try:
res = handle_func(*args, **kwargs)
finally:
if saved_locale is not None:
translation.activate(saved_locale)
return res
return wrapped | [
"def",
"no_translations",
"(",
"handle_func",
")",
":",
"def",
"wrapped",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
"django",
".",
"utils",
"import",
"translation",
"saved_locale",
"=",
"translation",
".",
"get_language",
"(",
")",
"translation",
".",
"deactivate_all",
"(",
")",
"try",
":",
"res",
"=",
"handle_func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"finally",
":",
"if",
"saved_locale",
"is",
"not",
"None",
":",
"translation",
".",
"activate",
"(",
"saved_locale",
")",
"return",
"res",
"return",
"wrapped"
] | [
81,
0
] | [
93,
18
] | python | en | ['en', 'en', 'en'] | True |
BaseCommand.get_version | (self) |
Return the Django version, which should be correct for all built-in
Django commands. User-supplied commands can override this method to
return their own version.
|
Return the Django version, which should be correct for all built-in
Django commands. User-supplied commands can override this method to
return their own version.
| def get_version(self):
"""
Return the Django version, which should be correct for all built-in
Django commands. User-supplied commands can override this method to
return their own version.
"""
return django.get_version() | [
"def",
"get_version",
"(",
"self",
")",
":",
"return",
"django",
".",
"get_version",
"(",
")"
] | [
266,
4
] | [
272,
35
] | python | en | ['en', 'error', 'th'] | False |
BaseCommand.create_parser | (self, prog_name, subcommand, **kwargs) |
Create and return the ``ArgumentParser`` which will be used to
parse the arguments to this command.
|
Create and return the ``ArgumentParser`` which will be used to
parse the arguments to this command.
| def create_parser(self, prog_name, subcommand, **kwargs):
"""
Create and return the ``ArgumentParser`` which will be used to
parse the arguments to this command.
"""
parser = CommandParser(
prog='%s %s' % (os.path.basename(prog_name), subcommand),
description=self.help or None,
formatter_class=DjangoHelpFormatter,
missing_args_message=getattr(self, 'missing_args_message', None),
called_from_command_line=getattr(self, '_called_from_command_line', None),
**kwargs
)
parser.add_argument('--version', action='version', version=self.get_version())
parser.add_argument(
'-v', '--verbosity', default=1,
type=int, choices=[0, 1, 2, 3],
help='Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, 3=very verbose output',
)
parser.add_argument(
'--settings',
help=(
'The Python path to a settings module, e.g. '
'"myproject.settings.main". If this isn\'t provided, the '
'DJANGO_SETTINGS_MODULE environment variable will be used.'
),
)
parser.add_argument(
'--pythonpath',
help='A directory to add to the Python path, e.g. "/home/djangoprojects/myproject".',
)
parser.add_argument('--traceback', action='store_true', help='Raise on CommandError exceptions')
parser.add_argument(
'--no-color', action='store_true',
help="Don't colorize the command output.",
)
parser.add_argument(
'--force-color', action='store_true',
help='Force colorization of the command output.',
)
if self.requires_system_checks:
parser.add_argument(
'--skip-checks', action='store_true',
help='Skip system checks.',
)
self.add_arguments(parser)
return parser | [
"def",
"create_parser",
"(",
"self",
",",
"prog_name",
",",
"subcommand",
",",
"*",
"*",
"kwargs",
")",
":",
"parser",
"=",
"CommandParser",
"(",
"prog",
"=",
"'%s %s'",
"%",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"prog_name",
")",
",",
"subcommand",
")",
",",
"description",
"=",
"self",
".",
"help",
"or",
"None",
",",
"formatter_class",
"=",
"DjangoHelpFormatter",
",",
"missing_args_message",
"=",
"getattr",
"(",
"self",
",",
"'missing_args_message'",
",",
"None",
")",
",",
"called_from_command_line",
"=",
"getattr",
"(",
"self",
",",
"'_called_from_command_line'",
",",
"None",
")",
",",
"*",
"*",
"kwargs",
")",
"parser",
".",
"add_argument",
"(",
"'--version'",
",",
"action",
"=",
"'version'",
",",
"version",
"=",
"self",
".",
"get_version",
"(",
")",
")",
"parser",
".",
"add_argument",
"(",
"'-v'",
",",
"'--verbosity'",
",",
"default",
"=",
"1",
",",
"type",
"=",
"int",
",",
"choices",
"=",
"[",
"0",
",",
"1",
",",
"2",
",",
"3",
"]",
",",
"help",
"=",
"'Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, 3=very verbose output'",
",",
")",
"parser",
".",
"add_argument",
"(",
"'--settings'",
",",
"help",
"=",
"(",
"'The Python path to a settings module, e.g. '",
"'\"myproject.settings.main\". If this isn\\'t provided, the '",
"'DJANGO_SETTINGS_MODULE environment variable will be used.'",
")",
",",
")",
"parser",
".",
"add_argument",
"(",
"'--pythonpath'",
",",
"help",
"=",
"'A directory to add to the Python path, e.g. \"/home/djangoprojects/myproject\".'",
",",
")",
"parser",
".",
"add_argument",
"(",
"'--traceback'",
",",
"action",
"=",
"'store_true'",
",",
"help",
"=",
"'Raise on CommandError exceptions'",
")",
"parser",
".",
"add_argument",
"(",
"'--no-color'",
",",
"action",
"=",
"'store_true'",
",",
"help",
"=",
"\"Don't colorize the command output.\"",
",",
")",
"parser",
".",
"add_argument",
"(",
"'--force-color'",
",",
"action",
"=",
"'store_true'",
",",
"help",
"=",
"'Force colorization of the command output.'",
",",
")",
"if",
"self",
".",
"requires_system_checks",
":",
"parser",
".",
"add_argument",
"(",
"'--skip-checks'",
",",
"action",
"=",
"'store_true'",
",",
"help",
"=",
"'Skip system checks.'",
",",
")",
"self",
".",
"add_arguments",
"(",
"parser",
")",
"return",
"parser"
] | [
274,
4
] | [
320,
21
] | python | en | ['en', 'error', 'th'] | False |
BaseCommand.add_arguments | (self, parser) |
Entry point for subclassed commands to add custom arguments.
|
Entry point for subclassed commands to add custom arguments.
| def add_arguments(self, parser):
"""
Entry point for subclassed commands to add custom arguments.
"""
pass | [
"def",
"add_arguments",
"(",
"self",
",",
"parser",
")",
":",
"pass"
] | [
322,
4
] | [
326,
12
] | python | en | ['en', 'error', 'th'] | False |
BaseCommand.print_help | (self, prog_name, subcommand) |
Print the help message for this command, derived from
``self.usage()``.
|
Print the help message for this command, derived from
``self.usage()``.
| def print_help(self, prog_name, subcommand):
"""
Print the help message for this command, derived from
``self.usage()``.
"""
parser = self.create_parser(prog_name, subcommand)
parser.print_help() | [
"def",
"print_help",
"(",
"self",
",",
"prog_name",
",",
"subcommand",
")",
":",
"parser",
"=",
"self",
".",
"create_parser",
"(",
"prog_name",
",",
"subcommand",
")",
"parser",
".",
"print_help",
"(",
")"
] | [
328,
4
] | [
334,
27
] | python | en | ['en', 'error', 'th'] | False |
BaseCommand.run_from_argv | (self, argv) |
Set up any environment changes requested (e.g., Python path
and Django settings), then run this command. If the
command raises a ``CommandError``, intercept it and print it sensibly
to stderr. If the ``--traceback`` option is present or the raised
``Exception`` is not ``CommandError``, raise it.
|
Set up any environment changes requested (e.g., Python path
and Django settings), then run this command. If the
command raises a ``CommandError``, intercept it and print it sensibly
to stderr. If the ``--traceback`` option is present or the raised
``Exception`` is not ``CommandError``, raise it.
| def run_from_argv(self, argv):
"""
Set up any environment changes requested (e.g., Python path
and Django settings), then run this command. If the
command raises a ``CommandError``, intercept it and print it sensibly
to stderr. If the ``--traceback`` option is present or the raised
``Exception`` is not ``CommandError``, raise it.
"""
self._called_from_command_line = True
parser = self.create_parser(argv[0], argv[1])
options = parser.parse_args(argv[2:])
cmd_options = vars(options)
# Move positional args out of options to mimic legacy optparse
args = cmd_options.pop('args', ())
handle_default_options(options)
try:
self.execute(*args, **cmd_options)
except CommandError as e:
if options.traceback:
raise
# SystemCheckError takes care of its own formatting.
if isinstance(e, SystemCheckError):
self.stderr.write(str(e), lambda x: x)
else:
self.stderr.write('%s: %s' % (e.__class__.__name__, e))
sys.exit(e.returncode)
finally:
try:
connections.close_all()
except ImproperlyConfigured:
# Ignore if connections aren't setup at this point (e.g. no
# configured settings).
pass | [
"def",
"run_from_argv",
"(",
"self",
",",
"argv",
")",
":",
"self",
".",
"_called_from_command_line",
"=",
"True",
"parser",
"=",
"self",
".",
"create_parser",
"(",
"argv",
"[",
"0",
"]",
",",
"argv",
"[",
"1",
"]",
")",
"options",
"=",
"parser",
".",
"parse_args",
"(",
"argv",
"[",
"2",
":",
"]",
")",
"cmd_options",
"=",
"vars",
"(",
"options",
")",
"# Move positional args out of options to mimic legacy optparse",
"args",
"=",
"cmd_options",
".",
"pop",
"(",
"'args'",
",",
"(",
")",
")",
"handle_default_options",
"(",
"options",
")",
"try",
":",
"self",
".",
"execute",
"(",
"*",
"args",
",",
"*",
"*",
"cmd_options",
")",
"except",
"CommandError",
"as",
"e",
":",
"if",
"options",
".",
"traceback",
":",
"raise",
"# SystemCheckError takes care of its own formatting.",
"if",
"isinstance",
"(",
"e",
",",
"SystemCheckError",
")",
":",
"self",
".",
"stderr",
".",
"write",
"(",
"str",
"(",
"e",
")",
",",
"lambda",
"x",
":",
"x",
")",
"else",
":",
"self",
".",
"stderr",
".",
"write",
"(",
"'%s: %s'",
"%",
"(",
"e",
".",
"__class__",
".",
"__name__",
",",
"e",
")",
")",
"sys",
".",
"exit",
"(",
"e",
".",
"returncode",
")",
"finally",
":",
"try",
":",
"connections",
".",
"close_all",
"(",
")",
"except",
"ImproperlyConfigured",
":",
"# Ignore if connections aren't setup at this point (e.g. no",
"# configured settings).",
"pass"
] | [
336,
4
] | [
370,
20
] | python | en | ['en', 'error', 'th'] | False |
BaseCommand.execute | (self, *args, **options) |
Try to execute this command, performing system checks if needed (as
controlled by the ``requires_system_checks`` attribute, except if
force-skipped).
|
Try to execute this command, performing system checks if needed (as
controlled by the ``requires_system_checks`` attribute, except if
force-skipped).
| def execute(self, *args, **options):
"""
Try to execute this command, performing system checks if needed (as
controlled by the ``requires_system_checks`` attribute, except if
force-skipped).
"""
if options['force_color'] and options['no_color']:
raise CommandError("The --no-color and --force-color options can't be used together.")
if options['force_color']:
self.style = color_style(force_color=True)
elif options['no_color']:
self.style = no_style()
self.stderr.style_func = None
if options.get('stdout'):
self.stdout = OutputWrapper(options['stdout'])
if options.get('stderr'):
self.stderr = OutputWrapper(options['stderr'])
if self.requires_system_checks and not options['skip_checks']:
if self.requires_system_checks == ALL_CHECKS:
self.check()
else:
self.check(tags=self.requires_system_checks)
if self.requires_migrations_checks:
self.check_migrations()
output = self.handle(*args, **options)
if output:
if self.output_transaction:
connection = connections[options.get('database', DEFAULT_DB_ALIAS)]
output = '%s\n%s\n%s' % (
self.style.SQL_KEYWORD(connection.ops.start_transaction_sql()),
output,
self.style.SQL_KEYWORD(connection.ops.end_transaction_sql()),
)
self.stdout.write(output)
return output | [
"def",
"execute",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"options",
")",
":",
"if",
"options",
"[",
"'force_color'",
"]",
"and",
"options",
"[",
"'no_color'",
"]",
":",
"raise",
"CommandError",
"(",
"\"The --no-color and --force-color options can't be used together.\"",
")",
"if",
"options",
"[",
"'force_color'",
"]",
":",
"self",
".",
"style",
"=",
"color_style",
"(",
"force_color",
"=",
"True",
")",
"elif",
"options",
"[",
"'no_color'",
"]",
":",
"self",
".",
"style",
"=",
"no_style",
"(",
")",
"self",
".",
"stderr",
".",
"style_func",
"=",
"None",
"if",
"options",
".",
"get",
"(",
"'stdout'",
")",
":",
"self",
".",
"stdout",
"=",
"OutputWrapper",
"(",
"options",
"[",
"'stdout'",
"]",
")",
"if",
"options",
".",
"get",
"(",
"'stderr'",
")",
":",
"self",
".",
"stderr",
"=",
"OutputWrapper",
"(",
"options",
"[",
"'stderr'",
"]",
")",
"if",
"self",
".",
"requires_system_checks",
"and",
"not",
"options",
"[",
"'skip_checks'",
"]",
":",
"if",
"self",
".",
"requires_system_checks",
"==",
"ALL_CHECKS",
":",
"self",
".",
"check",
"(",
")",
"else",
":",
"self",
".",
"check",
"(",
"tags",
"=",
"self",
".",
"requires_system_checks",
")",
"if",
"self",
".",
"requires_migrations_checks",
":",
"self",
".",
"check_migrations",
"(",
")",
"output",
"=",
"self",
".",
"handle",
"(",
"*",
"args",
",",
"*",
"*",
"options",
")",
"if",
"output",
":",
"if",
"self",
".",
"output_transaction",
":",
"connection",
"=",
"connections",
"[",
"options",
".",
"get",
"(",
"'database'",
",",
"DEFAULT_DB_ALIAS",
")",
"]",
"output",
"=",
"'%s\\n%s\\n%s'",
"%",
"(",
"self",
".",
"style",
".",
"SQL_KEYWORD",
"(",
"connection",
".",
"ops",
".",
"start_transaction_sql",
"(",
")",
")",
",",
"output",
",",
"self",
".",
"style",
".",
"SQL_KEYWORD",
"(",
"connection",
".",
"ops",
".",
"end_transaction_sql",
"(",
")",
")",
",",
")",
"self",
".",
"stdout",
".",
"write",
"(",
"output",
")",
"return",
"output"
] | [
372,
4
] | [
407,
21
] | python | en | ['en', 'error', 'th'] | False |
BaseCommand.check | (self, app_configs=None, tags=None, display_num_errors=False,
include_deployment_checks=False, fail_level=checks.ERROR,
databases=None) |
Use the system check framework to validate entire Django project.
Raise CommandError for any serious message (error or critical errors).
If there are only light messages (like warnings), print them to stderr
and don't raise an exception.
|
Use the system check framework to validate entire Django project.
Raise CommandError for any serious message (error or critical errors).
If there are only light messages (like warnings), print them to stderr
and don't raise an exception.
| def check(self, app_configs=None, tags=None, display_num_errors=False,
include_deployment_checks=False, fail_level=checks.ERROR,
databases=None):
"""
Use the system check framework to validate entire Django project.
Raise CommandError for any serious message (error or critical errors).
If there are only light messages (like warnings), print them to stderr
and don't raise an exception.
"""
all_issues = checks.run_checks(
app_configs=app_configs,
tags=tags,
include_deployment_checks=include_deployment_checks,
databases=databases,
)
header, body, footer = "", "", ""
visible_issue_count = 0 # excludes silenced warnings
if all_issues:
debugs = [e for e in all_issues if e.level < checks.INFO and not e.is_silenced()]
infos = [e for e in all_issues if checks.INFO <= e.level < checks.WARNING and not e.is_silenced()]
warnings = [e for e in all_issues if checks.WARNING <= e.level < checks.ERROR and not e.is_silenced()]
errors = [e for e in all_issues if checks.ERROR <= e.level < checks.CRITICAL and not e.is_silenced()]
criticals = [e for e in all_issues if checks.CRITICAL <= e.level and not e.is_silenced()]
sorted_issues = [
(criticals, 'CRITICALS'),
(errors, 'ERRORS'),
(warnings, 'WARNINGS'),
(infos, 'INFOS'),
(debugs, 'DEBUGS'),
]
for issues, group_name in sorted_issues:
if issues:
visible_issue_count += len(issues)
formatted = (
self.style.ERROR(str(e))
if e.is_serious()
else self.style.WARNING(str(e))
for e in issues)
formatted = "\n".join(sorted(formatted))
body += '\n%s:\n%s\n' % (group_name, formatted)
if visible_issue_count:
header = "System check identified some issues:\n"
if display_num_errors:
if visible_issue_count:
footer += '\n'
footer += "System check identified %s (%s silenced)." % (
"no issues" if visible_issue_count == 0 else
"1 issue" if visible_issue_count == 1 else
"%s issues" % visible_issue_count,
len(all_issues) - visible_issue_count,
)
if any(e.is_serious(fail_level) and not e.is_silenced() for e in all_issues):
msg = self.style.ERROR("SystemCheckError: %s" % header) + body + footer
raise SystemCheckError(msg)
else:
msg = header + body + footer
if msg:
if visible_issue_count:
self.stderr.write(msg, lambda x: x)
else:
self.stdout.write(msg) | [
"def",
"check",
"(",
"self",
",",
"app_configs",
"=",
"None",
",",
"tags",
"=",
"None",
",",
"display_num_errors",
"=",
"False",
",",
"include_deployment_checks",
"=",
"False",
",",
"fail_level",
"=",
"checks",
".",
"ERROR",
",",
"databases",
"=",
"None",
")",
":",
"all_issues",
"=",
"checks",
".",
"run_checks",
"(",
"app_configs",
"=",
"app_configs",
",",
"tags",
"=",
"tags",
",",
"include_deployment_checks",
"=",
"include_deployment_checks",
",",
"databases",
"=",
"databases",
",",
")",
"header",
",",
"body",
",",
"footer",
"=",
"\"\"",
",",
"\"\"",
",",
"\"\"",
"visible_issue_count",
"=",
"0",
"# excludes silenced warnings",
"if",
"all_issues",
":",
"debugs",
"=",
"[",
"e",
"for",
"e",
"in",
"all_issues",
"if",
"e",
".",
"level",
"<",
"checks",
".",
"INFO",
"and",
"not",
"e",
".",
"is_silenced",
"(",
")",
"]",
"infos",
"=",
"[",
"e",
"for",
"e",
"in",
"all_issues",
"if",
"checks",
".",
"INFO",
"<=",
"e",
".",
"level",
"<",
"checks",
".",
"WARNING",
"and",
"not",
"e",
".",
"is_silenced",
"(",
")",
"]",
"warnings",
"=",
"[",
"e",
"for",
"e",
"in",
"all_issues",
"if",
"checks",
".",
"WARNING",
"<=",
"e",
".",
"level",
"<",
"checks",
".",
"ERROR",
"and",
"not",
"e",
".",
"is_silenced",
"(",
")",
"]",
"errors",
"=",
"[",
"e",
"for",
"e",
"in",
"all_issues",
"if",
"checks",
".",
"ERROR",
"<=",
"e",
".",
"level",
"<",
"checks",
".",
"CRITICAL",
"and",
"not",
"e",
".",
"is_silenced",
"(",
")",
"]",
"criticals",
"=",
"[",
"e",
"for",
"e",
"in",
"all_issues",
"if",
"checks",
".",
"CRITICAL",
"<=",
"e",
".",
"level",
"and",
"not",
"e",
".",
"is_silenced",
"(",
")",
"]",
"sorted_issues",
"=",
"[",
"(",
"criticals",
",",
"'CRITICALS'",
")",
",",
"(",
"errors",
",",
"'ERRORS'",
")",
",",
"(",
"warnings",
",",
"'WARNINGS'",
")",
",",
"(",
"infos",
",",
"'INFOS'",
")",
",",
"(",
"debugs",
",",
"'DEBUGS'",
")",
",",
"]",
"for",
"issues",
",",
"group_name",
"in",
"sorted_issues",
":",
"if",
"issues",
":",
"visible_issue_count",
"+=",
"len",
"(",
"issues",
")",
"formatted",
"=",
"(",
"self",
".",
"style",
".",
"ERROR",
"(",
"str",
"(",
"e",
")",
")",
"if",
"e",
".",
"is_serious",
"(",
")",
"else",
"self",
".",
"style",
".",
"WARNING",
"(",
"str",
"(",
"e",
")",
")",
"for",
"e",
"in",
"issues",
")",
"formatted",
"=",
"\"\\n\"",
".",
"join",
"(",
"sorted",
"(",
"formatted",
")",
")",
"body",
"+=",
"'\\n%s:\\n%s\\n'",
"%",
"(",
"group_name",
",",
"formatted",
")",
"if",
"visible_issue_count",
":",
"header",
"=",
"\"System check identified some issues:\\n\"",
"if",
"display_num_errors",
":",
"if",
"visible_issue_count",
":",
"footer",
"+=",
"'\\n'",
"footer",
"+=",
"\"System check identified %s (%s silenced).\"",
"%",
"(",
"\"no issues\"",
"if",
"visible_issue_count",
"==",
"0",
"else",
"\"1 issue\"",
"if",
"visible_issue_count",
"==",
"1",
"else",
"\"%s issues\"",
"%",
"visible_issue_count",
",",
"len",
"(",
"all_issues",
")",
"-",
"visible_issue_count",
",",
")",
"if",
"any",
"(",
"e",
".",
"is_serious",
"(",
"fail_level",
")",
"and",
"not",
"e",
".",
"is_silenced",
"(",
")",
"for",
"e",
"in",
"all_issues",
")",
":",
"msg",
"=",
"self",
".",
"style",
".",
"ERROR",
"(",
"\"SystemCheckError: %s\"",
"%",
"header",
")",
"+",
"body",
"+",
"footer",
"raise",
"SystemCheckError",
"(",
"msg",
")",
"else",
":",
"msg",
"=",
"header",
"+",
"body",
"+",
"footer",
"if",
"msg",
":",
"if",
"visible_issue_count",
":",
"self",
".",
"stderr",
".",
"write",
"(",
"msg",
",",
"lambda",
"x",
":",
"x",
")",
"else",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"msg",
")"
] | [
409,
4
] | [
476,
38
] | python | en | ['en', 'error', 'th'] | False |
BaseCommand.check_migrations | (self) |
Print a warning if the set of migrations on disk don't match the
migrations in the database.
|
Print a warning if the set of migrations on disk don't match the
migrations in the database.
| def check_migrations(self):
"""
Print a warning if the set of migrations on disk don't match the
migrations in the database.
"""
from django.db.migrations.executor import MigrationExecutor
try:
executor = MigrationExecutor(connections[DEFAULT_DB_ALIAS])
except ImproperlyConfigured:
# No databases are configured (or the dummy one)
return
plan = executor.migration_plan(executor.loader.graph.leaf_nodes())
if plan:
apps_waiting_migration = sorted({migration.app_label for migration, backwards in plan})
self.stdout.write(
self.style.NOTICE(
"\nYou have %(unapplied_migration_count)s unapplied migration(s). "
"Your project may not work properly until you apply the "
"migrations for app(s): %(apps_waiting_migration)s." % {
"unapplied_migration_count": len(plan),
"apps_waiting_migration": ", ".join(apps_waiting_migration),
}
)
)
self.stdout.write(self.style.NOTICE("Run 'python manage.py migrate' to apply them.")) | [
"def",
"check_migrations",
"(",
"self",
")",
":",
"from",
"django",
".",
"db",
".",
"migrations",
".",
"executor",
"import",
"MigrationExecutor",
"try",
":",
"executor",
"=",
"MigrationExecutor",
"(",
"connections",
"[",
"DEFAULT_DB_ALIAS",
"]",
")",
"except",
"ImproperlyConfigured",
":",
"# No databases are configured (or the dummy one)",
"return",
"plan",
"=",
"executor",
".",
"migration_plan",
"(",
"executor",
".",
"loader",
".",
"graph",
".",
"leaf_nodes",
"(",
")",
")",
"if",
"plan",
":",
"apps_waiting_migration",
"=",
"sorted",
"(",
"{",
"migration",
".",
"app_label",
"for",
"migration",
",",
"backwards",
"in",
"plan",
"}",
")",
"self",
".",
"stdout",
".",
"write",
"(",
"self",
".",
"style",
".",
"NOTICE",
"(",
"\"\\nYou have %(unapplied_migration_count)s unapplied migration(s). \"",
"\"Your project may not work properly until you apply the \"",
"\"migrations for app(s): %(apps_waiting_migration)s.\"",
"%",
"{",
"\"unapplied_migration_count\"",
":",
"len",
"(",
"plan",
")",
",",
"\"apps_waiting_migration\"",
":",
"\", \"",
".",
"join",
"(",
"apps_waiting_migration",
")",
",",
"}",
")",
")",
"self",
".",
"stdout",
".",
"write",
"(",
"self",
".",
"style",
".",
"NOTICE",
"(",
"\"Run 'python manage.py migrate' to apply them.\"",
")",
")"
] | [
478,
4
] | [
503,
97
] | python | en | ['en', 'error', 'th'] | False |
BaseCommand.handle | (self, *args, **options) |
The actual logic of the command. Subclasses must implement
this method.
|
The actual logic of the command. Subclasses must implement
this method.
| def handle(self, *args, **options):
"""
The actual logic of the command. Subclasses must implement
this method.
"""
raise NotImplementedError('subclasses of BaseCommand must provide a handle() method') | [
"def",
"handle",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"options",
")",
":",
"raise",
"NotImplementedError",
"(",
"'subclasses of BaseCommand must provide a handle() method'",
")"
] | [
505,
4
] | [
510,
93
] | python | en | ['en', 'error', 'th'] | False |
AppCommand.handle_app_config | (self, app_config, **options) |
Perform the command's actions for app_config, an AppConfig instance
corresponding to an application label given on the command line.
|
Perform the command's actions for app_config, an AppConfig instance
corresponding to an application label given on the command line.
| def handle_app_config(self, app_config, **options):
"""
Perform the command's actions for app_config, an AppConfig instance
corresponding to an application label given on the command line.
"""
raise NotImplementedError(
"Subclasses of AppCommand must provide"
"a handle_app_config() method.") | [
"def",
"handle_app_config",
"(",
"self",
",",
"app_config",
",",
"*",
"*",
"options",
")",
":",
"raise",
"NotImplementedError",
"(",
"\"Subclasses of AppCommand must provide\"",
"\"a handle_app_config() method.\"",
")"
] | [
539,
4
] | [
546,
44
] | python | en | ['en', 'error', 'th'] | False |
LabelCommand.handle_label | (self, label, **options) |
Perform the command's actions for ``label``, which will be the
string as given on the command line.
|
Perform the command's actions for ``label``, which will be the
string as given on the command line.
| def handle_label(self, label, **options):
"""
Perform the command's actions for ``label``, which will be the
string as given on the command line.
"""
raise NotImplementedError('subclasses of LabelCommand must provide a handle_label() method') | [
"def",
"handle_label",
"(",
"self",
",",
"label",
",",
"*",
"*",
"options",
")",
":",
"raise",
"NotImplementedError",
"(",
"'subclasses of LabelCommand must provide a handle_label() method'",
")"
] | [
575,
4
] | [
580,
100
] | python | en | ['en', 'error', 'th'] | False |
duration_string | (duration) | Version of str(timedelta) which is not English specific. | Version of str(timedelta) which is not English specific. | def duration_string(duration):
"""Version of str(timedelta) which is not English specific."""
days, hours, minutes, seconds, microseconds = _get_duration_components(duration)
string = '{:02d}:{:02d}:{:02d}'.format(hours, minutes, seconds)
if days:
string = '{} '.format(days) + string
if microseconds:
string += '.{:06d}'.format(microseconds)
return string | [
"def",
"duration_string",
"(",
"duration",
")",
":",
"days",
",",
"hours",
",",
"minutes",
",",
"seconds",
",",
"microseconds",
"=",
"_get_duration_components",
"(",
"duration",
")",
"string",
"=",
"'{:02d}:{:02d}:{:02d}'",
".",
"format",
"(",
"hours",
",",
"minutes",
",",
"seconds",
")",
"if",
"days",
":",
"string",
"=",
"'{} '",
".",
"format",
"(",
"days",
")",
"+",
"string",
"if",
"microseconds",
":",
"string",
"+=",
"'.{:06d}'",
".",
"format",
"(",
"microseconds",
")",
"return",
"string"
] | [
17,
0
] | [
27,
17
] | python | en | ['en', 'en', 'en'] | True |
CheckRegistry.register | (self, check=None, *tags, **kwargs) |
Can be used as a function or a decorator. Register given function
`f` labeled with given `tags`. The function should receive **kwargs
and return list of Errors and Warnings.
Example::
registry = CheckRegistry()
@registry.register('mytag', 'anothertag')
def my_check(app_configs, **kwargs):
# ... perform checks and collect `errors` ...
return errors
# or
registry.register(my_check, 'mytag', 'anothertag')
|
Can be used as a function or a decorator. Register given function
`f` labeled with given `tags`. The function should receive **kwargs
and return list of Errors and Warnings. | def register(self, check=None, *tags, **kwargs):
"""
Can be used as a function or a decorator. Register given function
`f` labeled with given `tags`. The function should receive **kwargs
and return list of Errors and Warnings.
Example::
registry = CheckRegistry()
@registry.register('mytag', 'anothertag')
def my_check(app_configs, **kwargs):
# ... perform checks and collect `errors` ...
return errors
# or
registry.register(my_check, 'mytag', 'anothertag')
"""
def inner(check):
if not func_accepts_kwargs(check):
raise TypeError(
'Check functions must accept keyword arguments (**kwargs).'
)
check.tags = tags
checks = self.deployment_checks if kwargs.get('deploy') else self.registered_checks
checks.add(check)
return check
if callable(check):
return inner(check)
else:
if check:
tags += (check,)
return inner | [
"def",
"register",
"(",
"self",
",",
"check",
"=",
"None",
",",
"*",
"tags",
",",
"*",
"*",
"kwargs",
")",
":",
"def",
"inner",
"(",
"check",
")",
":",
"if",
"not",
"func_accepts_kwargs",
"(",
"check",
")",
":",
"raise",
"TypeError",
"(",
"'Check functions must accept keyword arguments (**kwargs).'",
")",
"check",
".",
"tags",
"=",
"tags",
"checks",
"=",
"self",
".",
"deployment_checks",
"if",
"kwargs",
".",
"get",
"(",
"'deploy'",
")",
"else",
"self",
".",
"registered_checks",
"checks",
".",
"add",
"(",
"check",
")",
"return",
"check",
"if",
"callable",
"(",
"check",
")",
":",
"return",
"inner",
"(",
"check",
")",
"else",
":",
"if",
"check",
":",
"tags",
"+=",
"(",
"check",
",",
")",
"return",
"inner"
] | [
31,
4
] | [
62,
24
] | python | en | ['en', 'error', 'th'] | False |
CheckRegistry.run_checks | (self, app_configs=None, tags=None, include_deployment_checks=False, databases=None) |
Run all registered checks and return list of Errors and Warnings.
|
Run all registered checks and return list of Errors and Warnings.
| def run_checks(self, app_configs=None, tags=None, include_deployment_checks=False, databases=None):
"""
Run all registered checks and return list of Errors and Warnings.
"""
errors = []
checks = self.get_checks(include_deployment_checks)
if tags is not None:
checks = [check for check in checks if not set(check.tags).isdisjoint(tags)]
for check in checks:
new_errors = check(app_configs=app_configs, databases=databases)
assert is_iterable(new_errors), (
"The function %r did not return a list. All functions registered "
"with the checks registry must return a list." % check)
errors.extend(new_errors)
return errors | [
"def",
"run_checks",
"(",
"self",
",",
"app_configs",
"=",
"None",
",",
"tags",
"=",
"None",
",",
"include_deployment_checks",
"=",
"False",
",",
"databases",
"=",
"None",
")",
":",
"errors",
"=",
"[",
"]",
"checks",
"=",
"self",
".",
"get_checks",
"(",
"include_deployment_checks",
")",
"if",
"tags",
"is",
"not",
"None",
":",
"checks",
"=",
"[",
"check",
"for",
"check",
"in",
"checks",
"if",
"not",
"set",
"(",
"check",
".",
"tags",
")",
".",
"isdisjoint",
"(",
"tags",
")",
"]",
"for",
"check",
"in",
"checks",
":",
"new_errors",
"=",
"check",
"(",
"app_configs",
"=",
"app_configs",
",",
"databases",
"=",
"databases",
")",
"assert",
"is_iterable",
"(",
"new_errors",
")",
",",
"(",
"\"The function %r did not return a list. All functions registered \"",
"\"with the checks registry must return a list.\"",
"%",
"check",
")",
"errors",
".",
"extend",
"(",
"new_errors",
")",
"return",
"errors"
] | [
64,
4
] | [
80,
21
] | python | en | ['en', 'error', 'th'] | False |
ping_google | (sitemap_url=None, ping_url=PING_URL, sitemap_uses_https=True) |
Alert Google that the sitemap for the current site has been updated.
If sitemap_url is provided, it should be an absolute path to the sitemap
for this site -- e.g., '/sitemap.xml'. If sitemap_url is not provided, this
function will attempt to deduce it by using urls.reverse().
|
Alert Google that the sitemap for the current site has been updated.
If sitemap_url is provided, it should be an absolute path to the sitemap
for this site -- e.g., '/sitemap.xml'. If sitemap_url is not provided, this
function will attempt to deduce it by using urls.reverse().
| def ping_google(sitemap_url=None, ping_url=PING_URL, sitemap_uses_https=True):
"""
Alert Google that the sitemap for the current site has been updated.
If sitemap_url is provided, it should be an absolute path to the sitemap
for this site -- e.g., '/sitemap.xml'. If sitemap_url is not provided, this
function will attempt to deduce it by using urls.reverse().
"""
sitemap_full_url = _get_sitemap_full_url(sitemap_url, sitemap_uses_https)
params = urlencode({'sitemap': sitemap_full_url})
urlopen('%s?%s' % (ping_url, params)) | [
"def",
"ping_google",
"(",
"sitemap_url",
"=",
"None",
",",
"ping_url",
"=",
"PING_URL",
",",
"sitemap_uses_https",
"=",
"True",
")",
":",
"sitemap_full_url",
"=",
"_get_sitemap_full_url",
"(",
"sitemap_url",
",",
"sitemap_uses_https",
")",
"params",
"=",
"urlencode",
"(",
"{",
"'sitemap'",
":",
"sitemap_full_url",
"}",
")",
"urlopen",
"(",
"'%s?%s'",
"%",
"(",
"ping_url",
",",
"params",
")",
")"
] | [
17,
0
] | [
26,
41
] | python | en | ['en', 'error', 'th'] | False |
fromfile | (file_h) |
Given a string file name, returns a GEOSGeometry. The file may contain WKB,
WKT, or HEX.
|
Given a string file name, returns a GEOSGeometry. The file may contain WKB,
WKT, or HEX.
| def fromfile(file_h):
"""
Given a string file name, returns a GEOSGeometry. The file may contain WKB,
WKT, or HEX.
"""
# If given a file name, get a real handle.
if isinstance(file_h, str):
with open(file_h, 'rb') as file_h:
buf = file_h.read()
else:
buf = file_h.read()
# If we get WKB need to wrap in memoryview(), so run through regexes.
if isinstance(buf, bytes):
try:
decoded = buf.decode()
except UnicodeDecodeError:
pass
else:
if wkt_regex.match(decoded) or hex_regex.match(decoded):
return GEOSGeometry(decoded)
else:
return GEOSGeometry(buf)
return GEOSGeometry(memoryview(buf)) | [
"def",
"fromfile",
"(",
"file_h",
")",
":",
"# If given a file name, get a real handle.",
"if",
"isinstance",
"(",
"file_h",
",",
"str",
")",
":",
"with",
"open",
"(",
"file_h",
",",
"'rb'",
")",
"as",
"file_h",
":",
"buf",
"=",
"file_h",
".",
"read",
"(",
")",
"else",
":",
"buf",
"=",
"file_h",
".",
"read",
"(",
")",
"# If we get WKB need to wrap in memoryview(), so run through regexes.",
"if",
"isinstance",
"(",
"buf",
",",
"bytes",
")",
":",
"try",
":",
"decoded",
"=",
"buf",
".",
"decode",
"(",
")",
"except",
"UnicodeDecodeError",
":",
"pass",
"else",
":",
"if",
"wkt_regex",
".",
"match",
"(",
"decoded",
")",
"or",
"hex_regex",
".",
"match",
"(",
"decoded",
")",
":",
"return",
"GEOSGeometry",
"(",
"decoded",
")",
"else",
":",
"return",
"GEOSGeometry",
"(",
"buf",
")",
"return",
"GEOSGeometry",
"(",
"memoryview",
"(",
"buf",
")",
")"
] | [
3,
0
] | [
27,
40
] | python | en | ['en', 'error', 'th'] | False |
fromstr | (string, **kwargs) | Given a string value, return a GEOSGeometry object. | Given a string value, return a GEOSGeometry object. | def fromstr(string, **kwargs):
"Given a string value, return a GEOSGeometry object."
return GEOSGeometry(string, **kwargs) | [
"def",
"fromstr",
"(",
"string",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"GEOSGeometry",
"(",
"string",
",",
"*",
"*",
"kwargs",
")"
] | [
30,
0
] | [
32,
41
] | python | en | ['en', 'en', 'en'] | True |
dedent_lines | (lines) | Deindent a list of lines maximally | Deindent a list of lines maximally | def dedent_lines(lines):
"""Deindent a list of lines maximally"""
return textwrap.dedent("\n".join(lines)).split("\n") | [
"def",
"dedent_lines",
"(",
"lines",
")",
":",
"return",
"textwrap",
".",
"dedent",
"(",
"\"\\n\"",
".",
"join",
"(",
"lines",
")",
")",
".",
"split",
"(",
"\"\\n\"",
")"
] | [
439,
0
] | [
441,
56
] | python | en | ['en', 'en', 'en'] | True |
Reader.__init__ | (self, data) |
Parameters
----------
data : str
String with lines separated by '\n'.
|
Parameters
----------
data : str
String with lines separated by '\n'. | def __init__(self, data):
"""
Parameters
----------
data : str
String with lines separated by '\n'.
"""
if isinstance(data, list):
self._str = data
else:
self._str = data.split('\n') # store string as list of lines
self.reset() | [
"def",
"__init__",
"(",
"self",
",",
"data",
")",
":",
"if",
"isinstance",
"(",
"data",
",",
"list",
")",
":",
"self",
".",
"_str",
"=",
"data",
"else",
":",
"self",
".",
"_str",
"=",
"data",
".",
"split",
"(",
"'\\n'",
")",
"# store string as list of lines",
"self",
".",
"reset",
"(",
")"
] | [
18,
4
] | [
31,
20
] | python | en | ['en', 'error', 'th'] | False |
NumpyDocString._parse_see_also | (self, content) |
func_name : Descriptive text
continued text
another_func_name : Descriptive text
func_name1, func_name2, :meth:`func_name`, func_name3
|
func_name : Descriptive text
continued text
another_func_name : Descriptive text
func_name1, func_name2, :meth:`func_name`, func_name3 | def _parse_see_also(self, content):
"""
func_name : Descriptive text
continued text
another_func_name : Descriptive text
func_name1, func_name2, :meth:`func_name`, func_name3
"""
items = []
def parse_item_name(text):
"""Match ':role:`name`' or 'name'"""
m = self._name_rgx.match(text)
if m:
g = m.groups()
if g[1] is None:
return g[3], None
else:
return g[2], g[1]
raise ValueError("%s is not a item name" % text)
def push_item(name, rest):
if not name:
return
name, role = parse_item_name(name)
items.append((name, list(rest), role))
del rest[:]
current_func = None
rest = []
for line in content:
if not line.strip():
continue
m = self._name_rgx.match(line)
if m and line[m.end():].strip().startswith(':'):
push_item(current_func, rest)
current_func, line = line[:m.end()], line[m.end():]
rest = [line.split(':', 1)[1].strip()]
if not rest[0]:
rest = []
elif not line.startswith(' '):
push_item(current_func, rest)
current_func = None
if ',' in line:
for func in line.split(','):
if func.strip():
push_item(func, [])
elif line.strip():
current_func = line
elif current_func is not None:
rest.append(line.strip())
push_item(current_func, rest)
return items | [
"def",
"_parse_see_also",
"(",
"self",
",",
"content",
")",
":",
"items",
"=",
"[",
"]",
"def",
"parse_item_name",
"(",
"text",
")",
":",
"\"\"\"Match ':role:`name`' or 'name'\"\"\"",
"m",
"=",
"self",
".",
"_name_rgx",
".",
"match",
"(",
"text",
")",
"if",
"m",
":",
"g",
"=",
"m",
".",
"groups",
"(",
")",
"if",
"g",
"[",
"1",
"]",
"is",
"None",
":",
"return",
"g",
"[",
"3",
"]",
",",
"None",
"else",
":",
"return",
"g",
"[",
"2",
"]",
",",
"g",
"[",
"1",
"]",
"raise",
"ValueError",
"(",
"\"%s is not a item name\"",
"%",
"text",
")",
"def",
"push_item",
"(",
"name",
",",
"rest",
")",
":",
"if",
"not",
"name",
":",
"return",
"name",
",",
"role",
"=",
"parse_item_name",
"(",
"name",
")",
"items",
".",
"append",
"(",
"(",
"name",
",",
"list",
"(",
"rest",
")",
",",
"role",
")",
")",
"del",
"rest",
"[",
":",
"]",
"current_func",
"=",
"None",
"rest",
"=",
"[",
"]",
"for",
"line",
"in",
"content",
":",
"if",
"not",
"line",
".",
"strip",
"(",
")",
":",
"continue",
"m",
"=",
"self",
".",
"_name_rgx",
".",
"match",
"(",
"line",
")",
"if",
"m",
"and",
"line",
"[",
"m",
".",
"end",
"(",
")",
":",
"]",
".",
"strip",
"(",
")",
".",
"startswith",
"(",
"':'",
")",
":",
"push_item",
"(",
"current_func",
",",
"rest",
")",
"current_func",
",",
"line",
"=",
"line",
"[",
":",
"m",
".",
"end",
"(",
")",
"]",
",",
"line",
"[",
"m",
".",
"end",
"(",
")",
":",
"]",
"rest",
"=",
"[",
"line",
".",
"split",
"(",
"':'",
",",
"1",
")",
"[",
"1",
"]",
".",
"strip",
"(",
")",
"]",
"if",
"not",
"rest",
"[",
"0",
"]",
":",
"rest",
"=",
"[",
"]",
"elif",
"not",
"line",
".",
"startswith",
"(",
"' '",
")",
":",
"push_item",
"(",
"current_func",
",",
"rest",
")",
"current_func",
"=",
"None",
"if",
"','",
"in",
"line",
":",
"for",
"func",
"in",
"line",
".",
"split",
"(",
"','",
")",
":",
"if",
"func",
".",
"strip",
"(",
")",
":",
"push_item",
"(",
"func",
",",
"[",
"]",
")",
"elif",
"line",
".",
"strip",
"(",
")",
":",
"current_func",
"=",
"line",
"elif",
"current_func",
"is",
"not",
"None",
":",
"rest",
".",
"append",
"(",
"line",
".",
"strip",
"(",
")",
")",
"push_item",
"(",
"current_func",
",",
"rest",
")",
"return",
"items"
] | [
202,
4
] | [
256,
20
] | python | en | ['en', 'error', 'th'] | False |
NumpyDocString._parse_index | (self, section, content) |
.. index: default
:refguide: something, else, and more
|
.. index: default
:refguide: something, else, and more | def _parse_index(self, section, content):
"""
.. index: default
:refguide: something, else, and more
"""
def strip_each_in(lst):
return [s.strip() for s in lst]
out = {}
section = section.split('::')
if len(section) > 1:
out['default'] = strip_each_in(section[1].split(','))[0]
for line in content:
line = line.split(':')
if len(line) > 2:
out[line[1]] = strip_each_in(line[2].split(','))
return out | [
"def",
"_parse_index",
"(",
"self",
",",
"section",
",",
"content",
")",
":",
"def",
"strip_each_in",
"(",
"lst",
")",
":",
"return",
"[",
"s",
".",
"strip",
"(",
")",
"for",
"s",
"in",
"lst",
"]",
"out",
"=",
"{",
"}",
"section",
"=",
"section",
".",
"split",
"(",
"'::'",
")",
"if",
"len",
"(",
"section",
")",
">",
"1",
":",
"out",
"[",
"'default'",
"]",
"=",
"strip_each_in",
"(",
"section",
"[",
"1",
"]",
".",
"split",
"(",
"','",
")",
")",
"[",
"0",
"]",
"for",
"line",
"in",
"content",
":",
"line",
"=",
"line",
".",
"split",
"(",
"':'",
")",
"if",
"len",
"(",
"line",
")",
">",
"2",
":",
"out",
"[",
"line",
"[",
"1",
"]",
"]",
"=",
"strip_each_in",
"(",
"line",
"[",
"2",
"]",
".",
"split",
"(",
"','",
")",
")",
"return",
"out"
] | [
258,
4
] | [
275,
18
] | python | en | ['en', 'error', 'th'] | False |
NumpyDocString._parse_summary | (self) | Grab signature (if given) and summary | Grab signature (if given) and summary | def _parse_summary(self):
"""Grab signature (if given) and summary"""
if self._is_at_section():
return
# If several signatures present, take the last one
while True:
summary = self._doc.read_to_next_empty_line()
summary_str = " ".join([s.strip() for s in summary]).strip()
if re.compile('^([\w., ]+=)?\s*[\w\.]+\(.*\)$').match(summary_str):
self['Signature'] = summary_str
if not self._is_at_section():
continue
break
if summary is not None:
self['Summary'] = summary
if not self._is_at_section():
self['Extended Summary'] = self._read_to_next_section() | [
"def",
"_parse_summary",
"(",
"self",
")",
":",
"if",
"self",
".",
"_is_at_section",
"(",
")",
":",
"return",
"# If several signatures present, take the last one",
"while",
"True",
":",
"summary",
"=",
"self",
".",
"_doc",
".",
"read_to_next_empty_line",
"(",
")",
"summary_str",
"=",
"\" \"",
".",
"join",
"(",
"[",
"s",
".",
"strip",
"(",
")",
"for",
"s",
"in",
"summary",
"]",
")",
".",
"strip",
"(",
")",
"if",
"re",
".",
"compile",
"(",
"'^([\\w., ]+=)?\\s*[\\w\\.]+\\(.*\\)$'",
")",
".",
"match",
"(",
"summary_str",
")",
":",
"self",
"[",
"'Signature'",
"]",
"=",
"summary_str",
"if",
"not",
"self",
".",
"_is_at_section",
"(",
")",
":",
"continue",
"break",
"if",
"summary",
"is",
"not",
"None",
":",
"self",
"[",
"'Summary'",
"]",
"=",
"summary",
"if",
"not",
"self",
".",
"_is_at_section",
"(",
")",
":",
"self",
"[",
"'Extended Summary'",
"]",
"=",
"self",
".",
"_read_to_next_section",
"(",
")"
] | [
277,
4
] | [
296,
67
] | python | en | ['en', 'en', 'en'] | True |
handle_newfile | (data, context) | Background Cloud Function to be triggered by Cloud Storage.
This generic function calls the Cloud Run URL endpoint.
Args:
data (dict): The Cloud Functions event payload.
context (google.cloud.functions.Context): Metadata of triggering event.
Returns:
None; the output is written to Stackdriver Logging
| Background Cloud Function to be triggered by Cloud Storage.
This generic function calls the Cloud Run URL endpoint. | def handle_newfile(data, context):
"""Background Cloud Function to be triggered by Cloud Storage.
This generic function calls the Cloud Run URL endpoint.
Args:
data (dict): The Cloud Functions event payload.
context (google.cloud.functions.Context): Metadata of triggering event.
Returns:
None; the output is written to Stackdriver Logging
"""
payload = {
'bucket' : data['bucket'],
'filename': data['name']
}
# Notes:
# (1) Ideally, we can simply invoke mlp_babyweight.finetune from here
# However, kfp.Client() has dependencies on binaries that are not available in Cloud Functions
# Hence, this workaround of putting mlp_babyweight.py in a Docker container and invoking it
# via Cloud Run
# (2) We could reduce the traffic to Cloud Run by checking filename pattern here
# but for reusability and maintainability reasons, I'm keeping this
# Cloud Function as a simple pass-through
# receiving service url
url = os.environ.get('DESTINATION_URL', "No DESTINATION_URL")
print("Invoking Cloud Run at {} with {}".format(url, payload))
# See https://cloud.google.com/run/docs/authenticating/service-to-service
metadata_server_token_url = 'http://metadata/computeMetadata/v1/instance/service-accounts/default/identity?audience='
token_request_url = metadata_server_token_url + url
token_request_headers = {'Metadata-Flavor': 'Google'}
token_response = requests.get(token_request_url, headers=token_request_headers)
jwt = token_response.content.decode("utf-8")
# Provide the token in the request to the receiving service
headers = {
'Authorization': f'bearer {jwt}',
'Content-Type':'application/json'
}
print("Headers = {}".format(headers))
resp = requests.post(url, data=json.dumps(payload), headers=headers)
return (resp.status_code == requests.codes.ok) | [
"def",
"handle_newfile",
"(",
"data",
",",
"context",
")",
":",
"payload",
"=",
"{",
"'bucket'",
":",
"data",
"[",
"'bucket'",
"]",
",",
"'filename'",
":",
"data",
"[",
"'name'",
"]",
"}",
"# Notes: ",
"# (1) Ideally, we can simply invoke mlp_babyweight.finetune from here",
"# However, kfp.Client() has dependencies on binaries that are not available in Cloud Functions",
"# Hence, this workaround of putting mlp_babyweight.py in a Docker container and invoking it",
"# via Cloud Run",
"# (2) We could reduce the traffic to Cloud Run by checking filename pattern here",
"# but for reusability and maintainability reasons, I'm keeping this",
"# Cloud Function as a simple pass-through",
"# receiving service url",
"url",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'DESTINATION_URL'",
",",
"\"No DESTINATION_URL\"",
")",
"print",
"(",
"\"Invoking Cloud Run at {} with {}\"",
".",
"format",
"(",
"url",
",",
"payload",
")",
")",
"# See https://cloud.google.com/run/docs/authenticating/service-to-service",
"metadata_server_token_url",
"=",
"'http://metadata/computeMetadata/v1/instance/service-accounts/default/identity?audience='",
"token_request_url",
"=",
"metadata_server_token_url",
"+",
"url",
"token_request_headers",
"=",
"{",
"'Metadata-Flavor'",
":",
"'Google'",
"}",
"token_response",
"=",
"requests",
".",
"get",
"(",
"token_request_url",
",",
"headers",
"=",
"token_request_headers",
")",
"jwt",
"=",
"token_response",
".",
"content",
".",
"decode",
"(",
"\"utf-8\"",
")",
"# Provide the token in the request to the receiving service",
"headers",
"=",
"{",
"'Authorization'",
":",
"f'bearer {jwt}'",
",",
"'Content-Type'",
":",
"'application/json'",
"}",
"print",
"(",
"\"Headers = {}\"",
".",
"format",
"(",
"headers",
")",
")",
"resp",
"=",
"requests",
".",
"post",
"(",
"url",
",",
"data",
"=",
"json",
".",
"dumps",
"(",
"payload",
")",
",",
"headers",
"=",
"headers",
")",
"return",
"(",
"resp",
".",
"status_code",
"==",
"requests",
".",
"codes",
".",
"ok",
")"
] | [
18,
0
] | [
60,
50
] | python | en | ['en', 'en', 'en'] | True |
get_content_type | (mimetype, charset) | Returns the full content type string with charset for a mimetype.
If the mimetype represents text, the charset parameter will be
appended, otherwise the mimetype is returned unchanged.
:param mimetype: The mimetype to be used as content type.
:param charset: The charset to be appended for text mimetypes.
:return: The content type.
.. verionchanged:: 0.15
Any type that ends with ``+xml`` gets a charset, not just those
that start with ``application/``. Known text types such as
``application/javascript`` are also given charsets.
| Returns the full content type string with charset for a mimetype. | def get_content_type(mimetype, charset):
"""Returns the full content type string with charset for a mimetype.
If the mimetype represents text, the charset parameter will be
appended, otherwise the mimetype is returned unchanged.
:param mimetype: The mimetype to be used as content type.
:param charset: The charset to be appended for text mimetypes.
:return: The content type.
.. verionchanged:: 0.15
Any type that ends with ``+xml`` gets a charset, not just those
that start with ``application/``. Known text types such as
``application/javascript`` are also given charsets.
"""
if (
mimetype.startswith("text/")
or mimetype in _charset_mimetypes
or mimetype.endswith("+xml")
):
mimetype += "; charset=" + charset
return mimetype | [
"def",
"get_content_type",
"(",
"mimetype",
",",
"charset",
")",
":",
"if",
"(",
"mimetype",
".",
"startswith",
"(",
"\"text/\"",
")",
"or",
"mimetype",
"in",
"_charset_mimetypes",
"or",
"mimetype",
".",
"endswith",
"(",
"\"+xml\"",
")",
")",
":",
"mimetype",
"+=",
"\"; charset=\"",
"+",
"charset",
"return",
"mimetype"
] | [
265,
0
] | [
287,
19
] | python | en | ['en', 'en', 'en'] | True |
detect_utf_encoding | (data) | Detect which UTF encoding was used to encode the given bytes.
The latest JSON standard (:rfc:`8259`) suggests that only UTF-8 is
accepted. Older documents allowed 8, 16, or 32. 16 and 32 can be big
or little endian. Some editors or libraries may prepend a BOM.
:internal:
:param data: Bytes in unknown UTF encoding.
:return: UTF encoding name
.. versionadded:: 0.15
| Detect which UTF encoding was used to encode the given bytes. | def detect_utf_encoding(data):
"""Detect which UTF encoding was used to encode the given bytes.
The latest JSON standard (:rfc:`8259`) suggests that only UTF-8 is
accepted. Older documents allowed 8, 16, or 32. 16 and 32 can be big
or little endian. Some editors or libraries may prepend a BOM.
:internal:
:param data: Bytes in unknown UTF encoding.
:return: UTF encoding name
.. versionadded:: 0.15
"""
head = data[:4]
if head[:3] == codecs.BOM_UTF8:
return "utf-8-sig"
if b"\x00" not in head:
return "utf-8"
if head in (codecs.BOM_UTF32_BE, codecs.BOM_UTF32_LE):
return "utf-32"
if head[:2] in (codecs.BOM_UTF16_BE, codecs.BOM_UTF16_LE):
return "utf-16"
if len(head) == 4:
if head[:3] == b"\x00\x00\x00":
return "utf-32-be"
if head[::2] == b"\x00\x00":
return "utf-16-be"
if head[1:] == b"\x00\x00\x00":
return "utf-32-le"
if head[1::2] == b"\x00\x00":
return "utf-16-le"
if len(head) == 2:
return "utf-16-be" if head.startswith(b"\x00") else "utf-16-le"
return "utf-8" | [
"def",
"detect_utf_encoding",
"(",
"data",
")",
":",
"head",
"=",
"data",
"[",
":",
"4",
"]",
"if",
"head",
"[",
":",
"3",
"]",
"==",
"codecs",
".",
"BOM_UTF8",
":",
"return",
"\"utf-8-sig\"",
"if",
"b\"\\x00\"",
"not",
"in",
"head",
":",
"return",
"\"utf-8\"",
"if",
"head",
"in",
"(",
"codecs",
".",
"BOM_UTF32_BE",
",",
"codecs",
".",
"BOM_UTF32_LE",
")",
":",
"return",
"\"utf-32\"",
"if",
"head",
"[",
":",
"2",
"]",
"in",
"(",
"codecs",
".",
"BOM_UTF16_BE",
",",
"codecs",
".",
"BOM_UTF16_LE",
")",
":",
"return",
"\"utf-16\"",
"if",
"len",
"(",
"head",
")",
"==",
"4",
":",
"if",
"head",
"[",
":",
"3",
"]",
"==",
"b\"\\x00\\x00\\x00\"",
":",
"return",
"\"utf-32-be\"",
"if",
"head",
"[",
":",
":",
"2",
"]",
"==",
"b\"\\x00\\x00\"",
":",
"return",
"\"utf-16-be\"",
"if",
"head",
"[",
"1",
":",
"]",
"==",
"b\"\\x00\\x00\\x00\"",
":",
"return",
"\"utf-32-le\"",
"if",
"head",
"[",
"1",
":",
":",
"2",
"]",
"==",
"b\"\\x00\\x00\"",
":",
"return",
"\"utf-16-le\"",
"if",
"len",
"(",
"head",
")",
"==",
"2",
":",
"return",
"\"utf-16-be\"",
"if",
"head",
".",
"startswith",
"(",
"b\"\\x00\"",
")",
"else",
"\"utf-16-le\"",
"return",
"\"utf-8\""
] | [
290,
0
] | [
334,
18
] | python | en | ['en', 'en', 'en'] | True |
format_string | (string, context) | String-template format a string:
>>> format_string('$foo and ${foo}s', dict(foo=42))
'42 and 42s'
This does not do any attribute lookup etc. For more advanced string
formattings have a look at the `werkzeug.template` module.
:param string: the format string.
:param context: a dict with the variables to insert.
| String-template format a string: | def format_string(string, context):
"""String-template format a string:
>>> format_string('$foo and ${foo}s', dict(foo=42))
'42 and 42s'
This does not do any attribute lookup etc. For more advanced string
formattings have a look at the `werkzeug.template` module.
:param string: the format string.
:param context: a dict with the variables to insert.
"""
def lookup_arg(match):
x = context[match.group(1) or match.group(2)]
if not isinstance(x, string_types):
x = type(string)(x)
return x
return _format_re.sub(lookup_arg, string) | [
"def",
"format_string",
"(",
"string",
",",
"context",
")",
":",
"def",
"lookup_arg",
"(",
"match",
")",
":",
"x",
"=",
"context",
"[",
"match",
".",
"group",
"(",
"1",
")",
"or",
"match",
".",
"group",
"(",
"2",
")",
"]",
"if",
"not",
"isinstance",
"(",
"x",
",",
"string_types",
")",
":",
"x",
"=",
"type",
"(",
"string",
")",
"(",
"x",
")",
"return",
"x",
"return",
"_format_re",
".",
"sub",
"(",
"lookup_arg",
",",
"string",
")"
] | [
337,
0
] | [
356,
45
] | python | en | ['en', 'en', 'en'] | True |
secure_filename | (filename) | r"""Pass it a filename and it will return a secure version of it. This
filename can then safely be stored on a regular file system and passed
to :func:`os.path.join`. The filename returned is an ASCII only string
for maximum portability.
On windows systems the function also makes sure that the file is not
named after one of the special device files.
>>> secure_filename("My cool movie.mov")
'My_cool_movie.mov'
>>> secure_filename("../../../etc/passwd")
'etc_passwd'
>>> secure_filename(u'i contain cool \xfcml\xe4uts.txt')
'i_contain_cool_umlauts.txt'
The function might return an empty filename. It's your responsibility
to ensure that the filename is unique and that you abort or
generate a random filename if the function returned an empty one.
.. versionadded:: 0.5
:param filename: the filename to secure
| r"""Pass it a filename and it will return a secure version of it. This
filename can then safely be stored on a regular file system and passed
to :func:`os.path.join`. The filename returned is an ASCII only string
for maximum portability. | def secure_filename(filename):
r"""Pass it a filename and it will return a secure version of it. This
filename can then safely be stored on a regular file system and passed
to :func:`os.path.join`. The filename returned is an ASCII only string
for maximum portability.
On windows systems the function also makes sure that the file is not
named after one of the special device files.
>>> secure_filename("My cool movie.mov")
'My_cool_movie.mov'
>>> secure_filename("../../../etc/passwd")
'etc_passwd'
>>> secure_filename(u'i contain cool \xfcml\xe4uts.txt')
'i_contain_cool_umlauts.txt'
The function might return an empty filename. It's your responsibility
to ensure that the filename is unique and that you abort or
generate a random filename if the function returned an empty one.
.. versionadded:: 0.5
:param filename: the filename to secure
"""
if isinstance(filename, text_type):
from unicodedata import normalize
filename = normalize("NFKD", filename).encode("ascii", "ignore")
if not PY2:
filename = filename.decode("ascii")
for sep in os.path.sep, os.path.altsep:
if sep:
filename = filename.replace(sep, " ")
filename = str(_filename_ascii_strip_re.sub("", "_".join(filename.split()))).strip(
"._"
)
# on nt a couple of special files are present in each folder. We
# have to ensure that the target file is not such a filename. In
# this case we prepend an underline
if (
os.name == "nt"
and filename
and filename.split(".")[0].upper() in _windows_device_files
):
filename = "_" + filename
return filename | [
"def",
"secure_filename",
"(",
"filename",
")",
":",
"if",
"isinstance",
"(",
"filename",
",",
"text_type",
")",
":",
"from",
"unicodedata",
"import",
"normalize",
"filename",
"=",
"normalize",
"(",
"\"NFKD\"",
",",
"filename",
")",
".",
"encode",
"(",
"\"ascii\"",
",",
"\"ignore\"",
")",
"if",
"not",
"PY2",
":",
"filename",
"=",
"filename",
".",
"decode",
"(",
"\"ascii\"",
")",
"for",
"sep",
"in",
"os",
".",
"path",
".",
"sep",
",",
"os",
".",
"path",
".",
"altsep",
":",
"if",
"sep",
":",
"filename",
"=",
"filename",
".",
"replace",
"(",
"sep",
",",
"\" \"",
")",
"filename",
"=",
"str",
"(",
"_filename_ascii_strip_re",
".",
"sub",
"(",
"\"\"",
",",
"\"_\"",
".",
"join",
"(",
"filename",
".",
"split",
"(",
")",
")",
")",
")",
".",
"strip",
"(",
"\"._\"",
")",
"# on nt a couple of special files are present in each folder. We",
"# have to ensure that the target file is not such a filename. In",
"# this case we prepend an underline",
"if",
"(",
"os",
".",
"name",
"==",
"\"nt\"",
"and",
"filename",
"and",
"filename",
".",
"split",
"(",
"\".\"",
")",
"[",
"0",
"]",
".",
"upper",
"(",
")",
"in",
"_windows_device_files",
")",
":",
"filename",
"=",
"\"_\"",
"+",
"filename",
"return",
"filename"
] | [
359,
0
] | [
406,
19
] | python | en | ['en', 'en', 'en'] | True |
escape | (s, quote=None) | Replace special characters "&", "<", ">" and (") to HTML-safe sequences.
There is a special handling for `None` which escapes to an empty string.
.. versionchanged:: 0.9
`quote` is now implicitly on.
:param s: the string to escape.
:param quote: ignored.
| Replace special characters "&", "<", ">" and (") to HTML-safe sequences. | def escape(s, quote=None):
"""Replace special characters "&", "<", ">" and (") to HTML-safe sequences.
There is a special handling for `None` which escapes to an empty string.
.. versionchanged:: 0.9
`quote` is now implicitly on.
:param s: the string to escape.
:param quote: ignored.
"""
if s is None:
return ""
elif hasattr(s, "__html__"):
return text_type(s.__html__())
elif not isinstance(s, string_types):
s = text_type(s)
if quote is not None:
from warnings import warn
warn(
"The 'quote' parameter is no longer used as of version 0.9"
" and will be removed in version 1.0.",
DeprecationWarning,
stacklevel=2,
)
s = (
s.replace("&", "&")
.replace("<", "<")
.replace(">", ">")
.replace('"', """)
)
return s | [
"def",
"escape",
"(",
"s",
",",
"quote",
"=",
"None",
")",
":",
"if",
"s",
"is",
"None",
":",
"return",
"\"\"",
"elif",
"hasattr",
"(",
"s",
",",
"\"__html__\"",
")",
":",
"return",
"text_type",
"(",
"s",
".",
"__html__",
"(",
")",
")",
"elif",
"not",
"isinstance",
"(",
"s",
",",
"string_types",
")",
":",
"s",
"=",
"text_type",
"(",
"s",
")",
"if",
"quote",
"is",
"not",
"None",
":",
"from",
"warnings",
"import",
"warn",
"warn",
"(",
"\"The 'quote' parameter is no longer used as of version 0.9\"",
"\" and will be removed in version 1.0.\"",
",",
"DeprecationWarning",
",",
"stacklevel",
"=",
"2",
",",
")",
"s",
"=",
"(",
"s",
".",
"replace",
"(",
"\"&\"",
",",
"\"&\"",
")",
".",
"replace",
"(",
"\"<\"",
",",
"\"<\"",
")",
".",
"replace",
"(",
"\">\"",
",",
"\">\"",
")",
".",
"replace",
"(",
"'\"'",
",",
"\""\"",
")",
")",
"return",
"s"
] | [
409,
0
] | [
441,
12
] | python | en | ['en', 'en', 'en'] | True |
unescape | (s) | The reverse function of `escape`. This unescapes all the HTML
entities, not only the XML entities inserted by `escape`.
:param s: the string to unescape.
| The reverse function of `escape`. This unescapes all the HTML
entities, not only the XML entities inserted by `escape`. | def unescape(s):
"""The reverse function of `escape`. This unescapes all the HTML
entities, not only the XML entities inserted by `escape`.
:param s: the string to unescape.
"""
def handle_match(m):
name = m.group(1)
if name in HTMLBuilder._entities:
return unichr(HTMLBuilder._entities[name])
try:
if name[:2] in ("#x", "#X"):
return unichr(int(name[2:], 16))
elif name.startswith("#"):
return unichr(int(name[1:]))
except ValueError:
pass
return u""
return _entity_re.sub(handle_match, s) | [
"def",
"unescape",
"(",
"s",
")",
":",
"def",
"handle_match",
"(",
"m",
")",
":",
"name",
"=",
"m",
".",
"group",
"(",
"1",
")",
"if",
"name",
"in",
"HTMLBuilder",
".",
"_entities",
":",
"return",
"unichr",
"(",
"HTMLBuilder",
".",
"_entities",
"[",
"name",
"]",
")",
"try",
":",
"if",
"name",
"[",
":",
"2",
"]",
"in",
"(",
"\"#x\"",
",",
"\"#X\"",
")",
":",
"return",
"unichr",
"(",
"int",
"(",
"name",
"[",
"2",
":",
"]",
",",
"16",
")",
")",
"elif",
"name",
".",
"startswith",
"(",
"\"#\"",
")",
":",
"return",
"unichr",
"(",
"int",
"(",
"name",
"[",
"1",
":",
"]",
")",
")",
"except",
"ValueError",
":",
"pass",
"return",
"u\"\"",
"return",
"_entity_re",
".",
"sub",
"(",
"handle_match",
",",
"s",
")"
] | [
444,
0
] | [
464,
42
] | python | en | ['en', 'en', 'en'] | True |
redirect | (location, code=302, Response=None) | Returns a response object (a WSGI application) that, if called,
redirects the client to the target location. Supported codes are
301, 302, 303, 305, 307, and 308. 300 is not supported because
it's not a real redirect and 304 because it's the answer for a
request with a request with defined If-Modified-Since headers.
.. versionadded:: 0.6
The location can now be a unicode string that is encoded using
the :func:`iri_to_uri` function.
.. versionadded:: 0.10
The class used for the Response object can now be passed in.
:param location: the location the response should redirect to.
:param code: the redirect status code. defaults to 302.
:param class Response: a Response class to use when instantiating a
response. The default is :class:`werkzeug.wrappers.Response` if
unspecified.
| Returns a response object (a WSGI application) that, if called,
redirects the client to the target location. Supported codes are
301, 302, 303, 305, 307, and 308. 300 is not supported because
it's not a real redirect and 304 because it's the answer for a
request with a request with defined If-Modified-Since headers. | def redirect(location, code=302, Response=None):
"""Returns a response object (a WSGI application) that, if called,
redirects the client to the target location. Supported codes are
301, 302, 303, 305, 307, and 308. 300 is not supported because
it's not a real redirect and 304 because it's the answer for a
request with a request with defined If-Modified-Since headers.
.. versionadded:: 0.6
The location can now be a unicode string that is encoded using
the :func:`iri_to_uri` function.
.. versionadded:: 0.10
The class used for the Response object can now be passed in.
:param location: the location the response should redirect to.
:param code: the redirect status code. defaults to 302.
:param class Response: a Response class to use when instantiating a
response. The default is :class:`werkzeug.wrappers.Response` if
unspecified.
"""
if Response is None:
from .wrappers import Response
display_location = escape(location)
if isinstance(location, text_type):
# Safe conversion is necessary here as we might redirect
# to a broken URI scheme (for instance itms-services).
from .urls import iri_to_uri
location = iri_to_uri(location, safe_conversion=True)
response = Response(
'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">\n'
"<title>Redirecting...</title>\n"
"<h1>Redirecting...</h1>\n"
"<p>You should be redirected automatically to target URL: "
'<a href="%s">%s</a>. If not click the link.'
% (escape(location), display_location),
code,
mimetype="text/html",
)
response.headers["Location"] = location
return response | [
"def",
"redirect",
"(",
"location",
",",
"code",
"=",
"302",
",",
"Response",
"=",
"None",
")",
":",
"if",
"Response",
"is",
"None",
":",
"from",
".",
"wrappers",
"import",
"Response",
"display_location",
"=",
"escape",
"(",
"location",
")",
"if",
"isinstance",
"(",
"location",
",",
"text_type",
")",
":",
"# Safe conversion is necessary here as we might redirect",
"# to a broken URI scheme (for instance itms-services).",
"from",
".",
"urls",
"import",
"iri_to_uri",
"location",
"=",
"iri_to_uri",
"(",
"location",
",",
"safe_conversion",
"=",
"True",
")",
"response",
"=",
"Response",
"(",
"'<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2 Final//EN\">\\n'",
"\"<title>Redirecting...</title>\\n\"",
"\"<h1>Redirecting...</h1>\\n\"",
"\"<p>You should be redirected automatically to target URL: \"",
"'<a href=\"%s\">%s</a>. If not click the link.'",
"%",
"(",
"escape",
"(",
"location",
")",
",",
"display_location",
")",
",",
"code",
",",
"mimetype",
"=",
"\"text/html\"",
",",
")",
"response",
".",
"headers",
"[",
"\"Location\"",
"]",
"=",
"location",
"return",
"response"
] | [
467,
0
] | [
508,
19
] | python | en | ['en', 'en', 'en'] | True |
append_slash_redirect | (environ, code=301) | Redirects to the same URL but with a slash appended. The behavior
of this function is undefined if the path ends with a slash already.
:param environ: the WSGI environment for the request that triggers
the redirect.
:param code: the status code for the redirect.
| Redirects to the same URL but with a slash appended. The behavior
of this function is undefined if the path ends with a slash already. | def append_slash_redirect(environ, code=301):
"""Redirects to the same URL but with a slash appended. The behavior
of this function is undefined if the path ends with a slash already.
:param environ: the WSGI environment for the request that triggers
the redirect.
:param code: the status code for the redirect.
"""
new_path = environ["PATH_INFO"].strip("/") + "/"
query_string = environ.get("QUERY_STRING")
if query_string:
new_path += "?" + query_string
return redirect(new_path, code) | [
"def",
"append_slash_redirect",
"(",
"environ",
",",
"code",
"=",
"301",
")",
":",
"new_path",
"=",
"environ",
"[",
"\"PATH_INFO\"",
"]",
".",
"strip",
"(",
"\"/\"",
")",
"+",
"\"/\"",
"query_string",
"=",
"environ",
".",
"get",
"(",
"\"QUERY_STRING\"",
")",
"if",
"query_string",
":",
"new_path",
"+=",
"\"?\"",
"+",
"query_string",
"return",
"redirect",
"(",
"new_path",
",",
"code",
")"
] | [
511,
0
] | [
523,
35
] | python | en | ['en', 'en', 'en'] | True |
import_string | (import_name, silent=False) | Imports an object based on a string. This is useful if you want to
use import paths as endpoints or something similar. An import path can
be specified either in dotted notation (``xml.sax.saxutils.escape``)
or with a colon as object delimiter (``xml.sax.saxutils:escape``).
If `silent` is True the return value will be `None` if the import fails.
:param import_name: the dotted name for the object to import.
:param silent: if set to `True` import errors are ignored and
`None` is returned instead.
:return: imported object
| Imports an object based on a string. This is useful if you want to
use import paths as endpoints or something similar. An import path can
be specified either in dotted notation (``xml.sax.saxutils.escape``)
or with a colon as object delimiter (``xml.sax.saxutils:escape``). | def import_string(import_name, silent=False):
"""Imports an object based on a string. This is useful if you want to
use import paths as endpoints or something similar. An import path can
be specified either in dotted notation (``xml.sax.saxutils.escape``)
or with a colon as object delimiter (``xml.sax.saxutils:escape``).
If `silent` is True the return value will be `None` if the import fails.
:param import_name: the dotted name for the object to import.
:param silent: if set to `True` import errors are ignored and
`None` is returned instead.
:return: imported object
"""
# force the import name to automatically convert to strings
# __import__ is not able to handle unicode strings in the fromlist
# if the module is a package
import_name = str(import_name).replace(":", ".")
try:
try:
__import__(import_name)
except ImportError:
if "." not in import_name:
raise
else:
return sys.modules[import_name]
module_name, obj_name = import_name.rsplit(".", 1)
module = __import__(module_name, globals(), locals(), [obj_name])
try:
return getattr(module, obj_name)
except AttributeError as e:
raise ImportError(e)
except ImportError as e:
if not silent:
reraise(
ImportStringError, ImportStringError(import_name, e), sys.exc_info()[2]
) | [
"def",
"import_string",
"(",
"import_name",
",",
"silent",
"=",
"False",
")",
":",
"# force the import name to automatically convert to strings",
"# __import__ is not able to handle unicode strings in the fromlist",
"# if the module is a package",
"import_name",
"=",
"str",
"(",
"import_name",
")",
".",
"replace",
"(",
"\":\"",
",",
"\".\"",
")",
"try",
":",
"try",
":",
"__import__",
"(",
"import_name",
")",
"except",
"ImportError",
":",
"if",
"\".\"",
"not",
"in",
"import_name",
":",
"raise",
"else",
":",
"return",
"sys",
".",
"modules",
"[",
"import_name",
"]",
"module_name",
",",
"obj_name",
"=",
"import_name",
".",
"rsplit",
"(",
"\".\"",
",",
"1",
")",
"module",
"=",
"__import__",
"(",
"module_name",
",",
"globals",
"(",
")",
",",
"locals",
"(",
")",
",",
"[",
"obj_name",
"]",
")",
"try",
":",
"return",
"getattr",
"(",
"module",
",",
"obj_name",
")",
"except",
"AttributeError",
"as",
"e",
":",
"raise",
"ImportError",
"(",
"e",
")",
"except",
"ImportError",
"as",
"e",
":",
"if",
"not",
"silent",
":",
"reraise",
"(",
"ImportStringError",
",",
"ImportStringError",
"(",
"import_name",
",",
"e",
")",
",",
"sys",
".",
"exc_info",
"(",
")",
"[",
"2",
"]",
")"
] | [
526,
0
] | [
563,
13
] | python | en | ['en', 'en', 'en'] | True |
find_modules | (import_path, include_packages=False, recursive=False) | Finds all the modules below a package. This can be useful to
automatically import all views / controllers so that their metaclasses /
function decorators have a chance to register themselves on the
application.
Packages are not returned unless `include_packages` is `True`. This can
also recursively list modules but in that case it will import all the
packages to get the correct load path of that module.
:param import_path: the dotted name for the package to find child modules.
:param include_packages: set to `True` if packages should be returned, too.
:param recursive: set to `True` if recursion should happen.
:return: generator
| Finds all the modules below a package. This can be useful to
automatically import all views / controllers so that their metaclasses /
function decorators have a chance to register themselves on the
application. | def find_modules(import_path, include_packages=False, recursive=False):
"""Finds all the modules below a package. This can be useful to
automatically import all views / controllers so that their metaclasses /
function decorators have a chance to register themselves on the
application.
Packages are not returned unless `include_packages` is `True`. This can
also recursively list modules but in that case it will import all the
packages to get the correct load path of that module.
:param import_path: the dotted name for the package to find child modules.
:param include_packages: set to `True` if packages should be returned, too.
:param recursive: set to `True` if recursion should happen.
:return: generator
"""
module = import_string(import_path)
path = getattr(module, "__path__", None)
if path is None:
raise ValueError("%r is not a package" % import_path)
basename = module.__name__ + "."
for _importer, modname, ispkg in pkgutil.iter_modules(path):
modname = basename + modname
if ispkg:
if include_packages:
yield modname
if recursive:
for item in find_modules(modname, include_packages, True):
yield item
else:
yield modname | [
"def",
"find_modules",
"(",
"import_path",
",",
"include_packages",
"=",
"False",
",",
"recursive",
"=",
"False",
")",
":",
"module",
"=",
"import_string",
"(",
"import_path",
")",
"path",
"=",
"getattr",
"(",
"module",
",",
"\"__path__\"",
",",
"None",
")",
"if",
"path",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"%r is not a package\"",
"%",
"import_path",
")",
"basename",
"=",
"module",
".",
"__name__",
"+",
"\".\"",
"for",
"_importer",
",",
"modname",
",",
"ispkg",
"in",
"pkgutil",
".",
"iter_modules",
"(",
"path",
")",
":",
"modname",
"=",
"basename",
"+",
"modname",
"if",
"ispkg",
":",
"if",
"include_packages",
":",
"yield",
"modname",
"if",
"recursive",
":",
"for",
"item",
"in",
"find_modules",
"(",
"modname",
",",
"include_packages",
",",
"True",
")",
":",
"yield",
"item",
"else",
":",
"yield",
"modname"
] | [
566,
0
] | [
595,
25
] | python | en | ['en', 'en', 'en'] | True |
validate_arguments | (func, args, kwargs, drop_extra=True) | Checks if the function accepts the arguments and keyword arguments.
Returns a new ``(args, kwargs)`` tuple that can safely be passed to
the function without causing a `TypeError` because the function signature
is incompatible. If `drop_extra` is set to `True` (which is the default)
any extra positional or keyword arguments are dropped automatically.
The exception raised provides three attributes:
`missing`
A set of argument names that the function expected but where
missing.
`extra`
A dict of keyword arguments that the function can not handle but
where provided.
`extra_positional`
A list of values that where given by positional argument but the
function cannot accept.
This can be useful for decorators that forward user submitted data to
a view function::
from werkzeug.utils import ArgumentValidationError, validate_arguments
def sanitize(f):
def proxy(request):
data = request.values.to_dict()
try:
args, kwargs = validate_arguments(f, (request,), data)
except ArgumentValidationError:
raise BadRequest('The browser failed to transmit all '
'the data expected.')
return f(*args, **kwargs)
return proxy
:param func: the function the validation is performed against.
:param args: a tuple of positional arguments.
:param kwargs: a dict of keyword arguments.
:param drop_extra: set to `False` if you don't want extra arguments
to be silently dropped.
:return: tuple in the form ``(args, kwargs)``.
| Checks if the function accepts the arguments and keyword arguments.
Returns a new ``(args, kwargs)`` tuple that can safely be passed to
the function without causing a `TypeError` because the function signature
is incompatible. If `drop_extra` is set to `True` (which is the default)
any extra positional or keyword arguments are dropped automatically. | def validate_arguments(func, args, kwargs, drop_extra=True):
"""Checks if the function accepts the arguments and keyword arguments.
Returns a new ``(args, kwargs)`` tuple that can safely be passed to
the function without causing a `TypeError` because the function signature
is incompatible. If `drop_extra` is set to `True` (which is the default)
any extra positional or keyword arguments are dropped automatically.
The exception raised provides three attributes:
`missing`
A set of argument names that the function expected but where
missing.
`extra`
A dict of keyword arguments that the function can not handle but
where provided.
`extra_positional`
A list of values that where given by positional argument but the
function cannot accept.
This can be useful for decorators that forward user submitted data to
a view function::
from werkzeug.utils import ArgumentValidationError, validate_arguments
def sanitize(f):
def proxy(request):
data = request.values.to_dict()
try:
args, kwargs = validate_arguments(f, (request,), data)
except ArgumentValidationError:
raise BadRequest('The browser failed to transmit all '
'the data expected.')
return f(*args, **kwargs)
return proxy
:param func: the function the validation is performed against.
:param args: a tuple of positional arguments.
:param kwargs: a dict of keyword arguments.
:param drop_extra: set to `False` if you don't want extra arguments
to be silently dropped.
:return: tuple in the form ``(args, kwargs)``.
"""
parser = _parse_signature(func)
args, kwargs, missing, extra, extra_positional = parser(args, kwargs)[:5]
if missing:
raise ArgumentValidationError(tuple(missing))
elif (extra or extra_positional) and not drop_extra:
raise ArgumentValidationError(None, extra, extra_positional)
return tuple(args), kwargs | [
"def",
"validate_arguments",
"(",
"func",
",",
"args",
",",
"kwargs",
",",
"drop_extra",
"=",
"True",
")",
":",
"parser",
"=",
"_parse_signature",
"(",
"func",
")",
"args",
",",
"kwargs",
",",
"missing",
",",
"extra",
",",
"extra_positional",
"=",
"parser",
"(",
"args",
",",
"kwargs",
")",
"[",
":",
"5",
"]",
"if",
"missing",
":",
"raise",
"ArgumentValidationError",
"(",
"tuple",
"(",
"missing",
")",
")",
"elif",
"(",
"extra",
"or",
"extra_positional",
")",
"and",
"not",
"drop_extra",
":",
"raise",
"ArgumentValidationError",
"(",
"None",
",",
"extra",
",",
"extra_positional",
")",
"return",
"tuple",
"(",
"args",
")",
",",
"kwargs"
] | [
598,
0
] | [
648,
30
] | python | en | ['en', 'en', 'en'] | True |
bind_arguments | (func, args, kwargs) | Bind the arguments provided into a dict. When passed a function,
a tuple of arguments and a dict of keyword arguments `bind_arguments`
returns a dict of names as the function would see it. This can be useful
to implement a cache decorator that uses the function arguments to build
the cache key based on the values of the arguments.
:param func: the function the arguments should be bound for.
:param args: tuple of positional arguments.
:param kwargs: a dict of keyword arguments.
:return: a :class:`dict` of bound keyword arguments.
| Bind the arguments provided into a dict. When passed a function,
a tuple of arguments and a dict of keyword arguments `bind_arguments`
returns a dict of names as the function would see it. This can be useful
to implement a cache decorator that uses the function arguments to build
the cache key based on the values of the arguments. | def bind_arguments(func, args, kwargs):
"""Bind the arguments provided into a dict. When passed a function,
a tuple of arguments and a dict of keyword arguments `bind_arguments`
returns a dict of names as the function would see it. This can be useful
to implement a cache decorator that uses the function arguments to build
the cache key based on the values of the arguments.
:param func: the function the arguments should be bound for.
:param args: tuple of positional arguments.
:param kwargs: a dict of keyword arguments.
:return: a :class:`dict` of bound keyword arguments.
"""
(
args,
kwargs,
missing,
extra,
extra_positional,
arg_spec,
vararg_var,
kwarg_var,
) = _parse_signature(func)(args, kwargs)
values = {}
for (name, _has_default, _default), value in zip(arg_spec, args):
values[name] = value
if vararg_var is not None:
values[vararg_var] = tuple(extra_positional)
elif extra_positional:
raise TypeError("too many positional arguments")
if kwarg_var is not None:
multikw = set(extra) & set([x[0] for x in arg_spec])
if multikw:
raise TypeError(
"got multiple values for keyword argument " + repr(next(iter(multikw)))
)
values[kwarg_var] = extra
elif extra:
raise TypeError("got unexpected keyword argument " + repr(next(iter(extra))))
return values | [
"def",
"bind_arguments",
"(",
"func",
",",
"args",
",",
"kwargs",
")",
":",
"(",
"args",
",",
"kwargs",
",",
"missing",
",",
"extra",
",",
"extra_positional",
",",
"arg_spec",
",",
"vararg_var",
",",
"kwarg_var",
",",
")",
"=",
"_parse_signature",
"(",
"func",
")",
"(",
"args",
",",
"kwargs",
")",
"values",
"=",
"{",
"}",
"for",
"(",
"name",
",",
"_has_default",
",",
"_default",
")",
",",
"value",
"in",
"zip",
"(",
"arg_spec",
",",
"args",
")",
":",
"values",
"[",
"name",
"]",
"=",
"value",
"if",
"vararg_var",
"is",
"not",
"None",
":",
"values",
"[",
"vararg_var",
"]",
"=",
"tuple",
"(",
"extra_positional",
")",
"elif",
"extra_positional",
":",
"raise",
"TypeError",
"(",
"\"too many positional arguments\"",
")",
"if",
"kwarg_var",
"is",
"not",
"None",
":",
"multikw",
"=",
"set",
"(",
"extra",
")",
"&",
"set",
"(",
"[",
"x",
"[",
"0",
"]",
"for",
"x",
"in",
"arg_spec",
"]",
")",
"if",
"multikw",
":",
"raise",
"TypeError",
"(",
"\"got multiple values for keyword argument \"",
"+",
"repr",
"(",
"next",
"(",
"iter",
"(",
"multikw",
")",
")",
")",
")",
"values",
"[",
"kwarg_var",
"]",
"=",
"extra",
"elif",
"extra",
":",
"raise",
"TypeError",
"(",
"\"got unexpected keyword argument \"",
"+",
"repr",
"(",
"next",
"(",
"iter",
"(",
"extra",
")",
")",
")",
")",
"return",
"values"
] | [
651,
0
] | [
689,
17
] | python | en | ['en', 'en', 'en'] | True |
XViewMiddleware.process_view | (self, request, view_func, view_args, view_kwargs) |
If the request method is HEAD and either the IP is internal or the
user is a logged-in staff member, return a response with an x-view
header indicating the view function. This is used to lookup the view
function for an arbitrary page.
|
If the request method is HEAD and either the IP is internal or the
user is a logged-in staff member, return a response with an x-view
header indicating the view function. This is used to lookup the view
function for an arbitrary page.
| def process_view(self, request, view_func, view_args, view_kwargs):
"""
If the request method is HEAD and either the IP is internal or the
user is a logged-in staff member, return a response with an x-view
header indicating the view function. This is used to lookup the view
function for an arbitrary page.
"""
assert hasattr(request, 'user'), (
"The XView middleware requires authentication middleware to be "
"installed. Edit your MIDDLEWARE setting to insert "
"'django.contrib.auth.middleware.AuthenticationMiddleware'."
)
if request.method == 'HEAD' and (request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS or
(request.user.is_active and request.user.is_staff)):
response = HttpResponse()
response.headers['X-View'] = get_view_name(view_func)
return response | [
"def",
"process_view",
"(",
"self",
",",
"request",
",",
"view_func",
",",
"view_args",
",",
"view_kwargs",
")",
":",
"assert",
"hasattr",
"(",
"request",
",",
"'user'",
")",
",",
"(",
"\"The XView middleware requires authentication middleware to be \"",
"\"installed. Edit your MIDDLEWARE setting to insert \"",
"\"'django.contrib.auth.middleware.AuthenticationMiddleware'.\"",
")",
"if",
"request",
".",
"method",
"==",
"'HEAD'",
"and",
"(",
"request",
".",
"META",
".",
"get",
"(",
"'REMOTE_ADDR'",
")",
"in",
"settings",
".",
"INTERNAL_IPS",
"or",
"(",
"request",
".",
"user",
".",
"is_active",
"and",
"request",
".",
"user",
".",
"is_staff",
")",
")",
":",
"response",
"=",
"HttpResponse",
"(",
")",
"response",
".",
"headers",
"[",
"'X-View'",
"]",
"=",
"get_view_name",
"(",
"view_func",
")",
"return",
"response"
] | [
11,
4
] | [
27,
27
] | python | en | ['en', 'error', 'th'] | False |
tempdir_registry | () | Provides a scoped global tempdir registry that can be used to dictate
whether directories should be deleted.
| Provides a scoped global tempdir registry that can be used to dictate
whether directories should be deleted.
| def tempdir_registry():
# type: () -> Iterator[TempDirectoryTypeRegistry]
"""Provides a scoped global tempdir registry that can be used to dictate
whether directories should be deleted.
"""
global _tempdir_registry
old_tempdir_registry = _tempdir_registry
_tempdir_registry = TempDirectoryTypeRegistry()
try:
yield _tempdir_registry
finally:
_tempdir_registry = old_tempdir_registry | [
"def",
"tempdir_registry",
"(",
")",
":",
"# type: () -> Iterator[TempDirectoryTypeRegistry]",
"global",
"_tempdir_registry",
"old_tempdir_registry",
"=",
"_tempdir_registry",
"_tempdir_registry",
"=",
"TempDirectoryTypeRegistry",
"(",
")",
"try",
":",
"yield",
"_tempdir_registry",
"finally",
":",
"_tempdir_registry",
"=",
"old_tempdir_registry"
] | [
65,
0
] | [
76,
48
] | python | en | ['en', 'en', 'en'] | True |
TempDirectoryTypeRegistry.set_delete | (self, kind, value) | Indicate whether a TempDirectory of the given kind should be
auto-deleted.
| Indicate whether a TempDirectory of the given kind should be
auto-deleted.
| def set_delete(self, kind, value):
# type: (str, bool) -> None
"""Indicate whether a TempDirectory of the given kind should be
auto-deleted.
"""
self._should_delete[kind] = value | [
"def",
"set_delete",
"(",
"self",
",",
"kind",
",",
"value",
")",
":",
"# type: (str, bool) -> None",
"self",
".",
"_should_delete",
"[",
"kind",
"]",
"=",
"value"
] | [
46,
4
] | [
51,
41
] | python | en | ['en', 'en', 'en'] | True |
TempDirectoryTypeRegistry.get_delete | (self, kind) | Get configured auto-delete flag for a given TempDirectory type,
default True.
| Get configured auto-delete flag for a given TempDirectory type,
default True.
| def get_delete(self, kind):
# type: (str) -> bool
"""Get configured auto-delete flag for a given TempDirectory type,
default True.
"""
return self._should_delete.get(kind, True) | [
"def",
"get_delete",
"(",
"self",
",",
"kind",
")",
":",
"# type: (str) -> bool",
"return",
"self",
".",
"_should_delete",
".",
"get",
"(",
"kind",
",",
"True",
")"
] | [
53,
4
] | [
58,
50
] | python | en | ['en', 'en', 'en'] | True |
TempDirectory._create | (self, kind) | Create a temporary directory and store its path in self.path | Create a temporary directory and store its path in self.path | def _create(self, kind):
# type: (str) -> str
"""Create a temporary directory and store its path in self.path"""
# We realpath here because some systems have their default tmpdir
# symlinked to another directory. This tends to confuse build
# scripts, so we canonicalize the path by traversing potential
# symlinks here.
path = os.path.realpath(tempfile.mkdtemp(prefix=f"pip-{kind}-"))
logger.debug("Created temporary directory: %s", path)
return path | [
"def",
"_create",
"(",
"self",
",",
"kind",
")",
":",
"# type: (str) -> str",
"# We realpath here because some systems have their default tmpdir",
"# symlinked to another directory. This tends to confuse build",
"# scripts, so we canonicalize the path by traversing potential",
"# symlinks here.",
"path",
"=",
"os",
".",
"path",
".",
"realpath",
"(",
"tempfile",
".",
"mkdtemp",
"(",
"prefix",
"=",
"f\"pip-{kind}-\"",
")",
")",
"logger",
".",
"debug",
"(",
"\"Created temporary directory: %s\"",
",",
"path",
")",
"return",
"path"
] | [
166,
4
] | [
175,
19
] | python | en | ['en', 'en', 'en'] | True |
TempDirectory.cleanup | (self) | Remove the temporary directory created and reset state | Remove the temporary directory created and reset state | def cleanup(self):
# type: () -> None
"""Remove the temporary directory created and reset state"""
self._deleted = True
if not os.path.exists(self._path):
return
rmtree(self._path) | [
"def",
"cleanup",
"(",
"self",
")",
":",
"# type: () -> None",
"self",
".",
"_deleted",
"=",
"True",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"_path",
")",
":",
"return",
"rmtree",
"(",
"self",
".",
"_path",
")"
] | [
177,
4
] | [
183,
26
] | python | en | ['en', 'en', 'en'] | True |
AdjacentTempDirectory._generate_names | (cls, name) | Generates a series of temporary names.
The algorithm replaces the leading characters in the name
with ones that are valid filesystem characters, but are not
valid package names (for both Python and pip definitions of
package).
| Generates a series of temporary names. | def _generate_names(cls, name):
# type: (str) -> Iterator[str]
"""Generates a series of temporary names.
The algorithm replaces the leading characters in the name
with ones that are valid filesystem characters, but are not
valid package names (for both Python and pip definitions of
package).
"""
for i in range(1, len(name)):
for candidate in itertools.combinations_with_replacement(
cls.LEADING_CHARS, i - 1
):
new_name = "~" + "".join(candidate) + name[i:]
if new_name != name:
yield new_name
# If we make it this far, we will have to make a longer name
for i in range(len(cls.LEADING_CHARS)):
for candidate in itertools.combinations_with_replacement(
cls.LEADING_CHARS, i
):
new_name = "~" + "".join(candidate) + name
if new_name != name:
yield new_name | [
"def",
"_generate_names",
"(",
"cls",
",",
"name",
")",
":",
"# type: (str) -> Iterator[str]",
"for",
"i",
"in",
"range",
"(",
"1",
",",
"len",
"(",
"name",
")",
")",
":",
"for",
"candidate",
"in",
"itertools",
".",
"combinations_with_replacement",
"(",
"cls",
".",
"LEADING_CHARS",
",",
"i",
"-",
"1",
")",
":",
"new_name",
"=",
"\"~\"",
"+",
"\"\"",
".",
"join",
"(",
"candidate",
")",
"+",
"name",
"[",
"i",
":",
"]",
"if",
"new_name",
"!=",
"name",
":",
"yield",
"new_name",
"# If we make it this far, we will have to make a longer name",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"cls",
".",
"LEADING_CHARS",
")",
")",
":",
"for",
"candidate",
"in",
"itertools",
".",
"combinations_with_replacement",
"(",
"cls",
".",
"LEADING_CHARS",
",",
"i",
")",
":",
"new_name",
"=",
"\"~\"",
"+",
"\"\"",
".",
"join",
"(",
"candidate",
")",
"+",
"name",
"if",
"new_name",
"!=",
"name",
":",
"yield",
"new_name"
] | [
214,
4
] | [
238,
34
] | python | en | ['en', 'en', 'en'] | True |
check_version | () |
Log error and exit when the installed version of paddlepaddle is
not satisfied.
|
Log error and exit when the installed version of paddlepaddle is
not satisfied.
| def check_version():
"""
Log error and exit when the installed version of paddlepaddle is
not satisfied.
"""
err = "PaddlePaddle version 1.8.0 or higher is required, " \
"or a suitable develop version is satisfied as well. \n" \
"Please make sure the version is good with your code."
try:
pass
# paddle.utils.require_version('0.0.0')
except Exception:
logger.error(err)
sys.exit(1) | [
"def",
"check_version",
"(",
")",
":",
"err",
"=",
"\"PaddlePaddle version 1.8.0 or higher is required, \"",
"\"or a suitable develop version is satisfied as well. \\n\"",
"\"Please make sure the version is good with your code.\"",
"try",
":",
"pass",
"# paddle.utils.require_version('0.0.0')",
"except",
"Exception",
":",
"logger",
".",
"error",
"(",
"err",
")",
"sys",
".",
"exit",
"(",
"1",
")"
] | [
29,
0
] | [
42,
19
] | python | en | ['en', 'error', 'th'] | False |
check_gpu | () |
Log error and exit when using paddlepaddle cpu version.
|
Log error and exit when using paddlepaddle cpu version.
| def check_gpu():
"""
Log error and exit when using paddlepaddle cpu version.
"""
err = "You are using paddlepaddle cpu version! Please try to " \
"install paddlepaddle-gpu to run model on GPU."
try:
assert is_compiled_with_cuda()
except AssertionError:
logger.error(err)
sys.exit(1) | [
"def",
"check_gpu",
"(",
")",
":",
"err",
"=",
"\"You are using paddlepaddle cpu version! Please try to \"",
"\"install paddlepaddle-gpu to run model on GPU.\"",
"try",
":",
"assert",
"is_compiled_with_cuda",
"(",
")",
"except",
"AssertionError",
":",
"logger",
".",
"error",
"(",
"err",
")",
"sys",
".",
"exit",
"(",
"1",
")"
] | [
45,
0
] | [
56,
19
] | python | en | ['en', 'error', 'th'] | False |
check_architecture | (architecture) |
check architecture and recommend similar architectures
|
check architecture and recommend similar architectures
| def check_architecture(architecture):
"""
check architecture and recommend similar architectures
"""
assert isinstance(architecture, dict), \
("the type of architecture({}) should be dict". format(architecture))
assert "name" in architecture, \
("name must be in the architecture keys, just contains: {}". format(
architecture.keys()))
similar_names = similar_architectures(architecture["name"],
get_architectures())
model_list = ', '.join(similar_names)
err = "Architecture [{}] is not exist! Maybe you want: [{}]" \
"".format(architecture["name"], model_list)
try:
assert architecture["name"] in similar_names
except AssertionError:
logger.error(err)
sys.exit(1) | [
"def",
"check_architecture",
"(",
"architecture",
")",
":",
"assert",
"isinstance",
"(",
"architecture",
",",
"dict",
")",
",",
"(",
"\"the type of architecture({}) should be dict\"",
".",
"format",
"(",
"architecture",
")",
")",
"assert",
"\"name\"",
"in",
"architecture",
",",
"(",
"\"name must be in the architecture keys, just contains: {}\"",
".",
"format",
"(",
"architecture",
".",
"keys",
"(",
")",
")",
")",
"similar_names",
"=",
"similar_architectures",
"(",
"architecture",
"[",
"\"name\"",
"]",
",",
"get_architectures",
"(",
")",
")",
"model_list",
"=",
"', '",
".",
"join",
"(",
"similar_names",
")",
"err",
"=",
"\"Architecture [{}] is not exist! Maybe you want: [{}]\"",
"\"\"",
".",
"format",
"(",
"architecture",
"[",
"\"name\"",
"]",
",",
"model_list",
")",
"try",
":",
"assert",
"architecture",
"[",
"\"name\"",
"]",
"in",
"similar_names",
"except",
"AssertionError",
":",
"logger",
".",
"error",
"(",
"err",
")",
"sys",
".",
"exit",
"(",
"1",
")"
] | [
59,
0
] | [
78,
19
] | python | en | ['en', 'error', 'th'] | False |
check_mix | (architecture, use_mix=False) |
check mix parameter
|
check mix parameter
| def check_mix(architecture, use_mix=False):
"""
check mix parameter
"""
err = "Cannot use mix processing in GoogLeNet, " \
"please set use_mix = False."
try:
if architecture["name"] == "GoogLeNet":
assert use_mix is not True
except AssertionError:
logger.error(err)
sys.exit(1) | [
"def",
"check_mix",
"(",
"architecture",
",",
"use_mix",
"=",
"False",
")",
":",
"err",
"=",
"\"Cannot use mix processing in GoogLeNet, \"",
"\"please set use_mix = False.\"",
"try",
":",
"if",
"architecture",
"[",
"\"name\"",
"]",
"==",
"\"GoogLeNet\"",
":",
"assert",
"use_mix",
"is",
"not",
"True",
"except",
"AssertionError",
":",
"logger",
".",
"error",
"(",
"err",
")",
"sys",
".",
"exit",
"(",
"1",
")"
] | [
81,
0
] | [
92,
19
] | python | en | ['en', 'error', 'th'] | False |
check_classes_num | (classes_num) |
check classes_num
|
check classes_num
| def check_classes_num(classes_num):
"""
check classes_num
"""
err = "classes_num({}) should be a positive integer" \
"and larger than 1".format(classes_num)
try:
assert isinstance(classes_num, int)
assert classes_num > 1
except AssertionError:
logger.error(err)
sys.exit(1) | [
"def",
"check_classes_num",
"(",
"classes_num",
")",
":",
"err",
"=",
"\"classes_num({}) should be a positive integer\"",
"\"and larger than 1\"",
".",
"format",
"(",
"classes_num",
")",
"try",
":",
"assert",
"isinstance",
"(",
"classes_num",
",",
"int",
")",
"assert",
"classes_num",
">",
"1",
"except",
"AssertionError",
":",
"logger",
".",
"error",
"(",
"err",
")",
"sys",
".",
"exit",
"(",
"1",
")"
] | [
95,
0
] | [
106,
19
] | python | en | ['en', 'error', 'th'] | False |
check_data_dir | (path) |
check cata_dir
|
check cata_dir
| def check_data_dir(path):
"""
check cata_dir
"""
err = "Data path is not exist, please given a right path" \
"".format(path)
try:
assert os.isdir(path)
except AssertionError:
logger.error(err)
sys.exit(1) | [
"def",
"check_data_dir",
"(",
"path",
")",
":",
"err",
"=",
"\"Data path is not exist, please given a right path\"",
"\"\"",
".",
"format",
"(",
"path",
")",
"try",
":",
"assert",
"os",
".",
"isdir",
"(",
"path",
")",
"except",
"AssertionError",
":",
"logger",
".",
"error",
"(",
"err",
")",
"sys",
".",
"exit",
"(",
"1",
")"
] | [
109,
0
] | [
119,
19
] | python | en | ['en', 'error', 'th'] | False |
check_function_params | (config, key) |
check specify config
|
check specify config
| def check_function_params(config, key):
"""
check specify config
"""
k_config = config.get(key)
assert k_config is not None, \
('{} is required in config'.format(key))
assert k_config.get('function'), \
('function is required {} config'.format(key))
params = k_config.get('params')
assert params is not None, \
('params is required in {} config'.format(key))
assert isinstance(params, dict), \
('the params in {} config should be a dict'.format(key)) | [
"def",
"check_function_params",
"(",
"config",
",",
"key",
")",
":",
"k_config",
"=",
"config",
".",
"get",
"(",
"key",
")",
"assert",
"k_config",
"is",
"not",
"None",
",",
"(",
"'{} is required in config'",
".",
"format",
"(",
"key",
")",
")",
"assert",
"k_config",
".",
"get",
"(",
"'function'",
")",
",",
"(",
"'function is required {} config'",
".",
"format",
"(",
"key",
")",
")",
"params",
"=",
"k_config",
".",
"get",
"(",
"'params'",
")",
"assert",
"params",
"is",
"not",
"None",
",",
"(",
"'params is required in {} config'",
".",
"format",
"(",
"key",
")",
")",
"assert",
"isinstance",
"(",
"params",
",",
"dict",
")",
",",
"(",
"'the params in {} config should be a dict'",
".",
"format",
"(",
"key",
")",
")"
] | [
122,
0
] | [
136,
64
] | python | en | ['en', 'error', 'th'] | False |
sanitize_content_filename | (filename: str) |
Sanitize the "filename" value from a Content-Disposition header.
|
Sanitize the "filename" value from a Content-Disposition header.
| def sanitize_content_filename(filename: str) -> str:
"""
Sanitize the "filename" value from a Content-Disposition header.
"""
return os.path.basename(filename) | [
"def",
"sanitize_content_filename",
"(",
"filename",
":",
"str",
")",
"->",
"str",
":",
"return",
"os",
".",
"path",
".",
"basename",
"(",
"filename",
")"
] | [
70,
0
] | [
74,
37
] | python | en | ['en', 'error', 'th'] | False |
parse_content_disposition | (content_disposition: str, default_filename: str) |
Parse the "filename" value from a Content-Disposition header, and
return the default filename if the result is empty.
|
Parse the "filename" value from a Content-Disposition header, and
return the default filename if the result is empty.
| def parse_content_disposition(content_disposition: str, default_filename: str) -> str:
"""
Parse the "filename" value from a Content-Disposition header, and
return the default filename if the result is empty.
"""
_type, params = cgi.parse_header(content_disposition)
filename = params.get("filename")
if filename:
# We need to sanitize the filename to prevent directory traversal
# in case the filename contains ".." path parts.
filename = sanitize_content_filename(filename)
return filename or default_filename | [
"def",
"parse_content_disposition",
"(",
"content_disposition",
":",
"str",
",",
"default_filename",
":",
"str",
")",
"->",
"str",
":",
"_type",
",",
"params",
"=",
"cgi",
".",
"parse_header",
"(",
"content_disposition",
")",
"filename",
"=",
"params",
".",
"get",
"(",
"\"filename\"",
")",
"if",
"filename",
":",
"# We need to sanitize the filename to prevent directory traversal",
"# in case the filename contains \"..\" path parts.",
"filename",
"=",
"sanitize_content_filename",
"(",
"filename",
")",
"return",
"filename",
"or",
"default_filename"
] | [
77,
0
] | [
88,
39
] | python | en | ['en', 'error', 'th'] | False |
_get_http_response_filename | (resp: Response, link: Link) | Get an ideal filename from the given HTTP response, falling back to
the link filename if not provided.
| Get an ideal filename from the given HTTP response, falling back to
the link filename if not provided.
| def _get_http_response_filename(resp: Response, link: Link) -> str:
"""Get an ideal filename from the given HTTP response, falling back to
the link filename if not provided.
"""
filename = link.filename # fallback
# Have a look at the Content-Disposition header for a better guess
content_disposition = resp.headers.get("content-disposition")
if content_disposition:
filename = parse_content_disposition(content_disposition, filename)
ext: Optional[str] = splitext(filename)[1]
if not ext:
ext = mimetypes.guess_extension(resp.headers.get("content-type", ""))
if ext:
filename += ext
if not ext and link.url != resp.url:
ext = os.path.splitext(resp.url)[1]
if ext:
filename += ext
return filename | [
"def",
"_get_http_response_filename",
"(",
"resp",
":",
"Response",
",",
"link",
":",
"Link",
")",
"->",
"str",
":",
"filename",
"=",
"link",
".",
"filename",
"# fallback",
"# Have a look at the Content-Disposition header for a better guess",
"content_disposition",
"=",
"resp",
".",
"headers",
".",
"get",
"(",
"\"content-disposition\"",
")",
"if",
"content_disposition",
":",
"filename",
"=",
"parse_content_disposition",
"(",
"content_disposition",
",",
"filename",
")",
"ext",
":",
"Optional",
"[",
"str",
"]",
"=",
"splitext",
"(",
"filename",
")",
"[",
"1",
"]",
"if",
"not",
"ext",
":",
"ext",
"=",
"mimetypes",
".",
"guess_extension",
"(",
"resp",
".",
"headers",
".",
"get",
"(",
"\"content-type\"",
",",
"\"\"",
")",
")",
"if",
"ext",
":",
"filename",
"+=",
"ext",
"if",
"not",
"ext",
"and",
"link",
".",
"url",
"!=",
"resp",
".",
"url",
":",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"resp",
".",
"url",
")",
"[",
"1",
"]",
"if",
"ext",
":",
"filename",
"+=",
"ext",
"return",
"filename"
] | [
91,
0
] | [
109,
19
] | python | en | ['en', 'en', 'en'] | True |
Downloader.__call__ | (self, link: Link, location: str) | Download the file given by link into location. | Download the file given by link into location. | def __call__(self, link: Link, location: str) -> Tuple[str, str]:
"""Download the file given by link into location."""
try:
resp = _http_get_download(self._session, link)
except NetworkConnectionError as e:
assert e.response is not None
logger.critical(
"HTTP error %s while getting %s", e.response.status_code, link
)
raise
filename = _get_http_response_filename(resp, link)
filepath = os.path.join(location, filename)
chunks = _prepare_download(resp, link, self._progress_bar)
with open(filepath, "wb") as content_file:
for chunk in chunks:
content_file.write(chunk)
content_type = resp.headers.get("Content-Type", "")
return filepath, content_type | [
"def",
"__call__",
"(",
"self",
",",
"link",
":",
"Link",
",",
"location",
":",
"str",
")",
"->",
"Tuple",
"[",
"str",
",",
"str",
"]",
":",
"try",
":",
"resp",
"=",
"_http_get_download",
"(",
"self",
".",
"_session",
",",
"link",
")",
"except",
"NetworkConnectionError",
"as",
"e",
":",
"assert",
"e",
".",
"response",
"is",
"not",
"None",
"logger",
".",
"critical",
"(",
"\"HTTP error %s while getting %s\"",
",",
"e",
".",
"response",
".",
"status_code",
",",
"link",
")",
"raise",
"filename",
"=",
"_get_http_response_filename",
"(",
"resp",
",",
"link",
")",
"filepath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"location",
",",
"filename",
")",
"chunks",
"=",
"_prepare_download",
"(",
"resp",
",",
"link",
",",
"self",
".",
"_progress_bar",
")",
"with",
"open",
"(",
"filepath",
",",
"\"wb\"",
")",
"as",
"content_file",
":",
"for",
"chunk",
"in",
"chunks",
":",
"content_file",
".",
"write",
"(",
"chunk",
")",
"content_type",
"=",
"resp",
".",
"headers",
".",
"get",
"(",
"\"Content-Type\"",
",",
"\"\"",
")",
"return",
"filepath",
",",
"content_type"
] | [
128,
4
] | [
147,
37
] | python | en | ['en', 'en', 'en'] | True |
BatchDownloader.__call__ | (
self, links: Iterable[Link], location: str
) | Download the files given by links into location. | Download the files given by links into location. | def __call__(
self, links: Iterable[Link], location: str
) -> Iterable[Tuple[Link, Tuple[str, str]]]:
"""Download the files given by links into location."""
for link in links:
try:
resp = _http_get_download(self._session, link)
except NetworkConnectionError as e:
assert e.response is not None
logger.critical(
"HTTP error %s while getting %s",
e.response.status_code,
link,
)
raise
filename = _get_http_response_filename(resp, link)
filepath = os.path.join(location, filename)
chunks = _prepare_download(resp, link, self._progress_bar)
with open(filepath, "wb") as content_file:
for chunk in chunks:
content_file.write(chunk)
content_type = resp.headers.get("Content-Type", "")
yield link, (filepath, content_type) | [
"def",
"__call__",
"(",
"self",
",",
"links",
":",
"Iterable",
"[",
"Link",
"]",
",",
"location",
":",
"str",
")",
"->",
"Iterable",
"[",
"Tuple",
"[",
"Link",
",",
"Tuple",
"[",
"str",
",",
"str",
"]",
"]",
"]",
":",
"for",
"link",
"in",
"links",
":",
"try",
":",
"resp",
"=",
"_http_get_download",
"(",
"self",
".",
"_session",
",",
"link",
")",
"except",
"NetworkConnectionError",
"as",
"e",
":",
"assert",
"e",
".",
"response",
"is",
"not",
"None",
"logger",
".",
"critical",
"(",
"\"HTTP error %s while getting %s\"",
",",
"e",
".",
"response",
".",
"status_code",
",",
"link",
",",
")",
"raise",
"filename",
"=",
"_get_http_response_filename",
"(",
"resp",
",",
"link",
")",
"filepath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"location",
",",
"filename",
")",
"chunks",
"=",
"_prepare_download",
"(",
"resp",
",",
"link",
",",
"self",
".",
"_progress_bar",
")",
"with",
"open",
"(",
"filepath",
",",
"\"wb\"",
")",
"as",
"content_file",
":",
"for",
"chunk",
"in",
"chunks",
":",
"content_file",
".",
"write",
"(",
"chunk",
")",
"content_type",
"=",
"resp",
".",
"headers",
".",
"get",
"(",
"\"Content-Type\"",
",",
"\"\"",
")",
"yield",
"link",
",",
"(",
"filepath",
",",
"content_type",
")"
] | [
159,
4
] | [
183,
48
] | python | en | ['en', 'en', 'en'] | True |
after_this_request | (f) | Executes a function after this request. This is useful to modify
response objects. The function is passed the response object and has
to return the same or a new one.
Example::
@app.route('/')
def index():
@after_this_request
def add_header(response):
response.headers['X-Foo'] = 'Parachute'
return response
return 'Hello World!'
This is more useful if a function other than the view function wants to
modify a response. For instance think of a decorator that wants to add
some headers without converting the return value into a response object.
.. versionadded:: 0.9
| Executes a function after this request. This is useful to modify
response objects. The function is passed the response object and has
to return the same or a new one. | def after_this_request(f):
"""Executes a function after this request. This is useful to modify
response objects. The function is passed the response object and has
to return the same or a new one.
Example::
@app.route('/')
def index():
@after_this_request
def add_header(response):
response.headers['X-Foo'] = 'Parachute'
return response
return 'Hello World!'
This is more useful if a function other than the view function wants to
modify a response. For instance think of a decorator that wants to add
some headers without converting the return value into a response object.
.. versionadded:: 0.9
"""
_request_ctx_stack.top._after_request_functions.append(f)
return f | [
"def",
"after_this_request",
"(",
"f",
")",
":",
"_request_ctx_stack",
".",
"top",
".",
"_after_request_functions",
".",
"append",
"(",
"f",
")",
"return",
"f"
] | [
53,
0
] | [
75,
12
] | python | en | ['en', 'en', 'en'] | True |
copy_current_request_context | (f) | A helper function that decorates a function to retain the current
request context. This is useful when working with greenlets. The moment
the function is decorated a copy of the request context is created and
then pushed when the function is called.
Example::
import gevent
from flask import copy_current_request_context
@app.route('/')
def index():
@copy_current_request_context
def do_some_work():
# do some work here, it can access flask.request like you
# would otherwise in the view function.
...
gevent.spawn(do_some_work)
return 'Regular response'
.. versionadded:: 0.10
| A helper function that decorates a function to retain the current
request context. This is useful when working with greenlets. The moment
the function is decorated a copy of the request context is created and
then pushed when the function is called. | def copy_current_request_context(f):
"""A helper function that decorates a function to retain the current
request context. This is useful when working with greenlets. The moment
the function is decorated a copy of the request context is created and
then pushed when the function is called.
Example::
import gevent
from flask import copy_current_request_context
@app.route('/')
def index():
@copy_current_request_context
def do_some_work():
# do some work here, it can access flask.request like you
# would otherwise in the view function.
...
gevent.spawn(do_some_work)
return 'Regular response'
.. versionadded:: 0.10
"""
top = _request_ctx_stack.top
if top is None:
raise RuntimeError('This decorator can only be used at local scopes '
'when a request context is on the stack. For instance within '
'view functions.')
reqctx = top.copy()
def wrapper(*args, **kwargs):
with reqctx:
return f(*args, **kwargs)
return update_wrapper(wrapper, f) | [
"def",
"copy_current_request_context",
"(",
"f",
")",
":",
"top",
"=",
"_request_ctx_stack",
".",
"top",
"if",
"top",
"is",
"None",
":",
"raise",
"RuntimeError",
"(",
"'This decorator can only be used at local scopes '",
"'when a request context is on the stack. For instance within '",
"'view functions.'",
")",
"reqctx",
"=",
"top",
".",
"copy",
"(",
")",
"def",
"wrapper",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"with",
"reqctx",
":",
"return",
"f",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"update_wrapper",
"(",
"wrapper",
",",
"f",
")"
] | [
78,
0
] | [
110,
37
] | python | en | ['en', 'en', 'en'] | True |
has_request_context | () | If you have code that wants to test if a request context is there or
not this function can be used. For instance, you may want to take advantage
of request information if the request object is available, but fail
silently if it is unavailable.
::
class User(db.Model):
def __init__(self, username, remote_addr=None):
self.username = username
if remote_addr is None and has_request_context():
remote_addr = request.remote_addr
self.remote_addr = remote_addr
Alternatively you can also just test any of the context bound objects
(such as :class:`request` or :class:`g` for truthness)::
class User(db.Model):
def __init__(self, username, remote_addr=None):
self.username = username
if remote_addr is None and request:
remote_addr = request.remote_addr
self.remote_addr = remote_addr
.. versionadded:: 0.7
| If you have code that wants to test if a request context is there or
not this function can be used. For instance, you may want to take advantage
of request information if the request object is available, but fail
silently if it is unavailable. | def has_request_context():
"""If you have code that wants to test if a request context is there or
not this function can be used. For instance, you may want to take advantage
of request information if the request object is available, but fail
silently if it is unavailable.
::
class User(db.Model):
def __init__(self, username, remote_addr=None):
self.username = username
if remote_addr is None and has_request_context():
remote_addr = request.remote_addr
self.remote_addr = remote_addr
Alternatively you can also just test any of the context bound objects
(such as :class:`request` or :class:`g` for truthness)::
class User(db.Model):
def __init__(self, username, remote_addr=None):
self.username = username
if remote_addr is None and request:
remote_addr = request.remote_addr
self.remote_addr = remote_addr
.. versionadded:: 0.7
"""
return _request_ctx_stack.top is not None | [
"def",
"has_request_context",
"(",
")",
":",
"return",
"_request_ctx_stack",
".",
"top",
"is",
"not",
"None"
] | [
113,
0
] | [
142,
45
] | python | en | ['en', 'en', 'en'] | True |
has_app_context | () | Works like :func:`has_request_context` but for the application
context. You can also just do a boolean check on the
:data:`current_app` object instead.
.. versionadded:: 0.9
| Works like :func:`has_request_context` but for the application
context. You can also just do a boolean check on the
:data:`current_app` object instead. | def has_app_context():
"""Works like :func:`has_request_context` but for the application
context. You can also just do a boolean check on the
:data:`current_app` object instead.
.. versionadded:: 0.9
"""
return _app_ctx_stack.top is not None | [
"def",
"has_app_context",
"(",
")",
":",
"return",
"_app_ctx_stack",
".",
"top",
"is",
"not",
"None"
] | [
145,
0
] | [
152,
41
] | python | en | ['en', 'en', 'en'] | True |
AppContext.push | (self) | Binds the app context to the current context. | Binds the app context to the current context. | def push(self):
"""Binds the app context to the current context."""
self._refcnt += 1
if hasattr(sys, 'exc_clear'):
sys.exc_clear()
_app_ctx_stack.push(self)
appcontext_pushed.send(self.app) | [
"def",
"push",
"(",
"self",
")",
":",
"self",
".",
"_refcnt",
"+=",
"1",
"if",
"hasattr",
"(",
"sys",
",",
"'exc_clear'",
")",
":",
"sys",
".",
"exc_clear",
"(",
")",
"_app_ctx_stack",
".",
"push",
"(",
"self",
")",
"appcontext_pushed",
".",
"send",
"(",
"self",
".",
"app",
")"
] | [
173,
4
] | [
179,
40
] | python | en | ['en', 'en', 'en'] | True |
AppContext.pop | (self, exc=_sentinel) | Pops the app context. | Pops the app context. | def pop(self, exc=_sentinel):
"""Pops the app context."""
try:
self._refcnt -= 1
if self._refcnt <= 0:
if exc is _sentinel:
exc = sys.exc_info()[1]
self.app.do_teardown_appcontext(exc)
finally:
rv = _app_ctx_stack.pop()
assert rv is self, 'Popped wrong app context. (%r instead of %r)' \
% (rv, self)
appcontext_popped.send(self.app) | [
"def",
"pop",
"(",
"self",
",",
"exc",
"=",
"_sentinel",
")",
":",
"try",
":",
"self",
".",
"_refcnt",
"-=",
"1",
"if",
"self",
".",
"_refcnt",
"<=",
"0",
":",
"if",
"exc",
"is",
"_sentinel",
":",
"exc",
"=",
"sys",
".",
"exc_info",
"(",
")",
"[",
"1",
"]",
"self",
".",
"app",
".",
"do_teardown_appcontext",
"(",
"exc",
")",
"finally",
":",
"rv",
"=",
"_app_ctx_stack",
".",
"pop",
"(",
")",
"assert",
"rv",
"is",
"self",
",",
"'Popped wrong app context. (%r instead of %r)'",
"%",
"(",
"rv",
",",
"self",
")",
"appcontext_popped",
".",
"send",
"(",
"self",
".",
"app",
")"
] | [
181,
4
] | [
193,
40
] | python | en | ['en', 'ca', 'en'] | True |
RequestContext.copy | (self) | Creates a copy of this request context with the same request object.
This can be used to move a request context to a different greenlet.
Because the actual request object is the same this cannot be used to
move a request context to a different thread unless access to the
request object is locked.
.. versionadded:: 0.10
| Creates a copy of this request context with the same request object.
This can be used to move a request context to a different greenlet.
Because the actual request object is the same this cannot be used to
move a request context to a different thread unless access to the
request object is locked. | def copy(self):
"""Creates a copy of this request context with the same request object.
This can be used to move a request context to a different greenlet.
Because the actual request object is the same this cannot be used to
move a request context to a different thread unless access to the
request object is locked.
.. versionadded:: 0.10
"""
return self.__class__(self.app,
environ=self.request.environ,
request=self.request
) | [
"def",
"copy",
"(",
"self",
")",
":",
"return",
"self",
".",
"__class__",
"(",
"self",
".",
"app",
",",
"environ",
"=",
"self",
".",
"request",
".",
"environ",
",",
"request",
"=",
"self",
".",
"request",
")"
] | [
273,
4
] | [
285,
9
] | python | en | ['en', 'en', 'en'] | True |
RequestContext.match_request | (self) | Can be overridden by a subclass to hook into the matching
of the request.
| Can be overridden by a subclass to hook into the matching
of the request.
| def match_request(self):
"""Can be overridden by a subclass to hook into the matching
of the request.
"""
try:
url_rule, self.request.view_args = \
self.url_adapter.match(return_rule=True)
self.request.url_rule = url_rule
except HTTPException as e:
self.request.routing_exception = e | [
"def",
"match_request",
"(",
"self",
")",
":",
"try",
":",
"url_rule",
",",
"self",
".",
"request",
".",
"view_args",
"=",
"self",
".",
"url_adapter",
".",
"match",
"(",
"return_rule",
"=",
"True",
")",
"self",
".",
"request",
".",
"url_rule",
"=",
"url_rule",
"except",
"HTTPException",
"as",
"e",
":",
"self",
".",
"request",
".",
"routing_exception",
"=",
"e"
] | [
287,
4
] | [
296,
46
] | python | en | ['en', 'en', 'en'] | True |
RequestContext.push | (self) | Binds the request context to the current context. | Binds the request context to the current context. | def push(self):
"""Binds the request context to the current context."""
# If an exception occurs in debug mode or if context preservation is
# activated under exception situations exactly one context stays
# on the stack. The rationale is that you want to access that
# information under debug situations. However if someone forgets to
# pop that context again we want to make sure that on the next push
# it's invalidated, otherwise we run at risk that something leaks
# memory. This is usually only a problem in test suite since this
# functionality is not active in production environments.
top = _request_ctx_stack.top
if top is not None and top.preserved:
top.pop(top._preserved_exc)
# Before we push the request context we have to ensure that there
# is an application context.
app_ctx = _app_ctx_stack.top
if app_ctx is None or app_ctx.app != self.app:
app_ctx = self.app.app_context()
app_ctx.push()
self._implicit_app_ctx_stack.append(app_ctx)
else:
self._implicit_app_ctx_stack.append(None)
if hasattr(sys, 'exc_clear'):
sys.exc_clear()
_request_ctx_stack.push(self)
# Open the session at the moment that the request context is
# available. This allows a custom open_session method to use the
# request context (e.g. code that access database information
# stored on `g` instead of the appcontext).
self.session = self.app.open_session(self.request)
if self.session is None:
self.session = self.app.make_null_session() | [
"def",
"push",
"(",
"self",
")",
":",
"# If an exception occurs in debug mode or if context preservation is",
"# activated under exception situations exactly one context stays",
"# on the stack. The rationale is that you want to access that",
"# information under debug situations. However if someone forgets to",
"# pop that context again we want to make sure that on the next push",
"# it's invalidated, otherwise we run at risk that something leaks",
"# memory. This is usually only a problem in test suite since this",
"# functionality is not active in production environments.",
"top",
"=",
"_request_ctx_stack",
".",
"top",
"if",
"top",
"is",
"not",
"None",
"and",
"top",
".",
"preserved",
":",
"top",
".",
"pop",
"(",
"top",
".",
"_preserved_exc",
")",
"# Before we push the request context we have to ensure that there",
"# is an application context.",
"app_ctx",
"=",
"_app_ctx_stack",
".",
"top",
"if",
"app_ctx",
"is",
"None",
"or",
"app_ctx",
".",
"app",
"!=",
"self",
".",
"app",
":",
"app_ctx",
"=",
"self",
".",
"app",
".",
"app_context",
"(",
")",
"app_ctx",
".",
"push",
"(",
")",
"self",
".",
"_implicit_app_ctx_stack",
".",
"append",
"(",
"app_ctx",
")",
"else",
":",
"self",
".",
"_implicit_app_ctx_stack",
".",
"append",
"(",
"None",
")",
"if",
"hasattr",
"(",
"sys",
",",
"'exc_clear'",
")",
":",
"sys",
".",
"exc_clear",
"(",
")",
"_request_ctx_stack",
".",
"push",
"(",
"self",
")",
"# Open the session at the moment that the request context is",
"# available. This allows a custom open_session method to use the",
"# request context (e.g. code that access database information",
"# stored on `g` instead of the appcontext).",
"self",
".",
"session",
"=",
"self",
".",
"app",
".",
"open_session",
"(",
"self",
".",
"request",
")",
"if",
"self",
".",
"session",
"is",
"None",
":",
"self",
".",
"session",
"=",
"self",
".",
"app",
".",
"make_null_session",
"(",
")"
] | [
298,
4
] | [
333,
55
] | python | en | ['en', 'en', 'en'] | True |
RequestContext.pop | (self, exc=_sentinel) | Pops the request context and unbinds it by doing that. This will
also trigger the execution of functions registered by the
:meth:`~flask.Flask.teardown_request` decorator.
.. versionchanged:: 0.9
Added the `exc` argument.
| Pops the request context and unbinds it by doing that. This will
also trigger the execution of functions registered by the
:meth:`~flask.Flask.teardown_request` decorator. | def pop(self, exc=_sentinel):
"""Pops the request context and unbinds it by doing that. This will
also trigger the execution of functions registered by the
:meth:`~flask.Flask.teardown_request` decorator.
.. versionchanged:: 0.9
Added the `exc` argument.
"""
app_ctx = self._implicit_app_ctx_stack.pop()
try:
clear_request = False
if not self._implicit_app_ctx_stack:
self.preserved = False
self._preserved_exc = None
if exc is _sentinel:
exc = sys.exc_info()[1]
self.app.do_teardown_request(exc)
# If this interpreter supports clearing the exception information
# we do that now. This will only go into effect on Python 2.x,
# on 3.x it disappears automatically at the end of the exception
# stack.
if hasattr(sys, 'exc_clear'):
sys.exc_clear()
request_close = getattr(self.request, 'close', None)
if request_close is not None:
request_close()
clear_request = True
finally:
rv = _request_ctx_stack.pop()
# get rid of circular dependencies at the end of the request
# so that we don't require the GC to be active.
if clear_request:
rv.request.environ['werkzeug.request'] = None
# Get rid of the app as well if necessary.
if app_ctx is not None:
app_ctx.pop(exc)
assert rv is self, 'Popped wrong request context. ' \
'(%r instead of %r)' % (rv, self) | [
"def",
"pop",
"(",
"self",
",",
"exc",
"=",
"_sentinel",
")",
":",
"app_ctx",
"=",
"self",
".",
"_implicit_app_ctx_stack",
".",
"pop",
"(",
")",
"try",
":",
"clear_request",
"=",
"False",
"if",
"not",
"self",
".",
"_implicit_app_ctx_stack",
":",
"self",
".",
"preserved",
"=",
"False",
"self",
".",
"_preserved_exc",
"=",
"None",
"if",
"exc",
"is",
"_sentinel",
":",
"exc",
"=",
"sys",
".",
"exc_info",
"(",
")",
"[",
"1",
"]",
"self",
".",
"app",
".",
"do_teardown_request",
"(",
"exc",
")",
"# If this interpreter supports clearing the exception information",
"# we do that now. This will only go into effect on Python 2.x,",
"# on 3.x it disappears automatically at the end of the exception",
"# stack.",
"if",
"hasattr",
"(",
"sys",
",",
"'exc_clear'",
")",
":",
"sys",
".",
"exc_clear",
"(",
")",
"request_close",
"=",
"getattr",
"(",
"self",
".",
"request",
",",
"'close'",
",",
"None",
")",
"if",
"request_close",
"is",
"not",
"None",
":",
"request_close",
"(",
")",
"clear_request",
"=",
"True",
"finally",
":",
"rv",
"=",
"_request_ctx_stack",
".",
"pop",
"(",
")",
"# get rid of circular dependencies at the end of the request",
"# so that we don't require the GC to be active.",
"if",
"clear_request",
":",
"rv",
".",
"request",
".",
"environ",
"[",
"'werkzeug.request'",
"]",
"=",
"None",
"# Get rid of the app as well if necessary.",
"if",
"app_ctx",
"is",
"not",
"None",
":",
"app_ctx",
".",
"pop",
"(",
"exc",
")",
"assert",
"rv",
"is",
"self",
",",
"'Popped wrong request context. '",
"'(%r instead of %r)'",
"%",
"(",
"rv",
",",
"self",
")"
] | [
335,
4
] | [
378,
49
] | python | en | ['en', 'en', 'en'] | True |
AbstractProvider.identify | (self, requirement_or_candidate) | Given a requirement, return an identifier for it.
This is used to identify a requirement, e.g. whether two requirements
should have their specifier parts merged.
| Given a requirement, return an identifier for it. | def identify(self, requirement_or_candidate):
"""Given a requirement, return an identifier for it.
This is used to identify a requirement, e.g. whether two requirements
should have their specifier parts merged.
"""
raise NotImplementedError | [
"def",
"identify",
"(",
"self",
",",
"requirement_or_candidate",
")",
":",
"raise",
"NotImplementedError"
] | [
3,
4
] | [
9,
33
] | python | en | ['en', 'en', 'en'] | True |
AbstractProvider.get_preference | (self, identifier, resolutions, candidates, information) | Produce a sort key for given requirement based on preference.
The preference is defined as "I think this requirement should be
resolved first". The lower the return value is, the more preferred
this group of arguments is.
:param identifier: An identifier as returned by ``identify()``. This
identifies the dependency matches of which should be returned.
:param resolutions: Mapping of candidates currently pinned by the
resolver. Each key is an identifier, and the value a candidate.
The candidate may conflict with requirements from ``information``.
:param candidates: Mapping of each dependency's possible candidates.
Each value is an iterator of candidates.
:param information: Mapping of requirement information of each package.
Each value is an iterator of *requirement information*.
A *requirement information* instance is a named tuple with two members:
* ``requirement`` specifies a requirement contributing to the current
list of candidates.
* ``parent`` specifies the candidate that provides (dependend on) the
requirement, or ``None`` to indicate a root requirement.
The preference could depend on a various of issues, including (not
necessarily in this order):
* Is this package pinned in the current resolution result?
* How relaxed is the requirement? Stricter ones should probably be
worked on first? (I don't know, actually.)
* How many possibilities are there to satisfy this requirement? Those
with few left should likely be worked on first, I guess?
* Are there any known conflicts for this requirement? We should
probably work on those with the most known conflicts.
A sortable value should be returned (this will be used as the ``key``
parameter of the built-in sorting function). The smaller the value is,
the more preferred this requirement is (i.e. the sorting function
is called with ``reverse=False``).
| Produce a sort key for given requirement based on preference. | def get_preference(self, identifier, resolutions, candidates, information):
"""Produce a sort key for given requirement based on preference.
The preference is defined as "I think this requirement should be
resolved first". The lower the return value is, the more preferred
this group of arguments is.
:param identifier: An identifier as returned by ``identify()``. This
identifies the dependency matches of which should be returned.
:param resolutions: Mapping of candidates currently pinned by the
resolver. Each key is an identifier, and the value a candidate.
The candidate may conflict with requirements from ``information``.
:param candidates: Mapping of each dependency's possible candidates.
Each value is an iterator of candidates.
:param information: Mapping of requirement information of each package.
Each value is an iterator of *requirement information*.
A *requirement information* instance is a named tuple with two members:
* ``requirement`` specifies a requirement contributing to the current
list of candidates.
* ``parent`` specifies the candidate that provides (dependend on) the
requirement, or ``None`` to indicate a root requirement.
The preference could depend on a various of issues, including (not
necessarily in this order):
* Is this package pinned in the current resolution result?
* How relaxed is the requirement? Stricter ones should probably be
worked on first? (I don't know, actually.)
* How many possibilities are there to satisfy this requirement? Those
with few left should likely be worked on first, I guess?
* Are there any known conflicts for this requirement? We should
probably work on those with the most known conflicts.
A sortable value should be returned (this will be used as the ``key``
parameter of the built-in sorting function). The smaller the value is,
the more preferred this requirement is (i.e. the sorting function
is called with ``reverse=False``).
"""
raise NotImplementedError | [
"def",
"get_preference",
"(",
"self",
",",
"identifier",
",",
"resolutions",
",",
"candidates",
",",
"information",
")",
":",
"raise",
"NotImplementedError"
] | [
11,
4
] | [
51,
33
] | python | en | ['en', 'en', 'en'] | True |
AbstractProvider.find_matches | (self, identifier, requirements, incompatibilities) | Find all possible candidates that satisfy given constraints.
:param identifier: An identifier as returned by ``identify()``. This
identifies the dependency matches of which should be returned.
:param requirements: A mapping of requirements that all returned
candidates must satisfy. Each key is an identifier, and the value
an iterator of requirements for that dependency.
:param incompatibilities: A mapping of known incompatibilities of
each dependency. Each key is an identifier, and the value an
iterator of incompatibilities known to the resolver. All
incompatibilities *must* be excluded from the return value.
This should try to get candidates based on the requirements' types.
For VCS, local, and archive requirements, the one-and-only match is
returned, and for a "named" requirement, the index(es) should be
consulted to find concrete candidates for this requirement.
The return value should produce candidates ordered by preference; the
most preferred candidate should come first. The return type may be one
of the following:
* A callable that returns an iterator that yields candidates.
* An collection of candidates.
* An iterable of candidates. This will be consumed immediately into a
list of candidates.
| Find all possible candidates that satisfy given constraints. | def find_matches(self, identifier, requirements, incompatibilities):
"""Find all possible candidates that satisfy given constraints.
:param identifier: An identifier as returned by ``identify()``. This
identifies the dependency matches of which should be returned.
:param requirements: A mapping of requirements that all returned
candidates must satisfy. Each key is an identifier, and the value
an iterator of requirements for that dependency.
:param incompatibilities: A mapping of known incompatibilities of
each dependency. Each key is an identifier, and the value an
iterator of incompatibilities known to the resolver. All
incompatibilities *must* be excluded from the return value.
This should try to get candidates based on the requirements' types.
For VCS, local, and archive requirements, the one-and-only match is
returned, and for a "named" requirement, the index(es) should be
consulted to find concrete candidates for this requirement.
The return value should produce candidates ordered by preference; the
most preferred candidate should come first. The return type may be one
of the following:
* A callable that returns an iterator that yields candidates.
* An collection of candidates.
* An iterable of candidates. This will be consumed immediately into a
list of candidates.
"""
raise NotImplementedError | [
"def",
"find_matches",
"(",
"self",
",",
"identifier",
",",
"requirements",
",",
"incompatibilities",
")",
":",
"raise",
"NotImplementedError"
] | [
53,
4
] | [
80,
33
] | python | en | ['en', 'en', 'en'] | True |
AbstractProvider.is_satisfied_by | (self, requirement, candidate) | Whether the given requirement can be satisfied by a candidate.
The candidate is guarenteed to have been generated from the
requirement.
A boolean should be returned to indicate whether ``candidate`` is a
viable solution to the requirement.
| Whether the given requirement can be satisfied by a candidate. | def is_satisfied_by(self, requirement, candidate):
"""Whether the given requirement can be satisfied by a candidate.
The candidate is guarenteed to have been generated from the
requirement.
A boolean should be returned to indicate whether ``candidate`` is a
viable solution to the requirement.
"""
raise NotImplementedError | [
"def",
"is_satisfied_by",
"(",
"self",
",",
"requirement",
",",
"candidate",
")",
":",
"raise",
"NotImplementedError"
] | [
82,
4
] | [
91,
33
] | python | en | ['en', 'en', 'en'] | True |
AbstractProvider.get_dependencies | (self, candidate) | Get dependencies of a candidate.
This should return a collection of requirements that `candidate`
specifies as its dependencies.
| Get dependencies of a candidate. | def get_dependencies(self, candidate):
"""Get dependencies of a candidate.
This should return a collection of requirements that `candidate`
specifies as its dependencies.
"""
raise NotImplementedError | [
"def",
"get_dependencies",
"(",
"self",
",",
"candidate",
")",
":",
"raise",
"NotImplementedError"
] | [
93,
4
] | [
99,
33
] | python | en | ['en', 'en', 'en'] | True |
AbstractResolver.resolve | (self, requirements, **kwargs) | Take a collection of constraints, spit out the resolution result.
This returns a representation of the final resolution state, with one
guarenteed attribute ``mapping`` that contains resolved candidates as
values. The keys are their respective identifiers.
:param requirements: A collection of constraints.
:param kwargs: Additional keyword arguments that subclasses may accept.
:raises: ``self.base_exception`` or its subclass.
| Take a collection of constraints, spit out the resolution result. | def resolve(self, requirements, **kwargs):
"""Take a collection of constraints, spit out the resolution result.
This returns a representation of the final resolution state, with one
guarenteed attribute ``mapping`` that contains resolved candidates as
values. The keys are their respective identifiers.
:param requirements: A collection of constraints.
:param kwargs: Additional keyword arguments that subclasses may accept.
:raises: ``self.base_exception`` or its subclass.
"""
raise NotImplementedError | [
"def",
"resolve",
"(",
"self",
",",
"requirements",
",",
"*",
"*",
"kwargs",
")",
":",
"raise",
"NotImplementedError"
] | [
111,
4
] | [
123,
33
] | python | en | ['en', 'en', 'en'] | True |
init_logger | (logpath=None, loglevel=1, quiet=False) | Initializes the logger for system messages. | Initializes the logger for system messages. | def init_logger(logpath=None, loglevel=1, quiet=False):
"Initializes the logger for system messages."
logger = logging.getLogger()
# Set the loglevel.
if loglevel > 3:
loglevel = 3 # Cap at 3 to avoid index errors.
levels = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG]
logger.setLevel(levels[loglevel])
logformat = "%(asctime)-14s %(levelname)-8s %(message)s"
formatter = logging.Formatter(logformat, "%Y-%m-%d %H:%M:%S")
if not quiet:
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setFormatter(formatter)
logger.addHandler(console_handler)
log.debug("Added logging console handler.")
log.info("Loglevel is {}.".format(levels[loglevel]))
if logpath:
try:
logfile = os.path.abspath(logpath)
file_handler = logging.FileHandler(logfile)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
log.debug("Added logging file handler: {}.".format(logfile))
except IOError:
log.error("Could not attach file handler.") | [
"def",
"init_logger",
"(",
"logpath",
"=",
"None",
",",
"loglevel",
"=",
"1",
",",
"quiet",
"=",
"False",
")",
":",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
")",
"# Set the loglevel.",
"if",
"loglevel",
">",
"3",
":",
"loglevel",
"=",
"3",
"# Cap at 3 to avoid index errors.",
"levels",
"=",
"[",
"logging",
".",
"ERROR",
",",
"logging",
".",
"WARN",
",",
"logging",
".",
"INFO",
",",
"logging",
".",
"DEBUG",
"]",
"logger",
".",
"setLevel",
"(",
"levels",
"[",
"loglevel",
"]",
")",
"logformat",
"=",
"\"%(asctime)-14s %(levelname)-8s %(message)s\"",
"formatter",
"=",
"logging",
".",
"Formatter",
"(",
"logformat",
",",
"\"%Y-%m-%d %H:%M:%S\"",
")",
"if",
"not",
"quiet",
":",
"console_handler",
"=",
"logging",
".",
"StreamHandler",
"(",
"sys",
".",
"stdout",
")",
"console_handler",
".",
"setFormatter",
"(",
"formatter",
")",
"logger",
".",
"addHandler",
"(",
"console_handler",
")",
"log",
".",
"debug",
"(",
"\"Added logging console handler.\"",
")",
"log",
".",
"info",
"(",
"\"Loglevel is {}.\"",
".",
"format",
"(",
"levels",
"[",
"loglevel",
"]",
")",
")",
"if",
"logpath",
":",
"try",
":",
"logfile",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"logpath",
")",
"file_handler",
"=",
"logging",
".",
"FileHandler",
"(",
"logfile",
")",
"file_handler",
".",
"setFormatter",
"(",
"formatter",
")",
"logger",
".",
"addHandler",
"(",
"file_handler",
")",
"log",
".",
"debug",
"(",
"\"Added logging file handler: {}.\"",
".",
"format",
"(",
"logfile",
")",
")",
"except",
"IOError",
":",
"log",
".",
"error",
"(",
"\"Could not attach file handler.\"",
")"
] | [
202,
0
] | [
230,
55
] | python | en | ['en', 'en', 'en'] | True |
main | () | Entry point for the CLI DBus interface. | Entry point for the CLI DBus interface. | def main():
"Entry point for the CLI DBus interface."
args = docopt(__doc__, version="0.2")
init_logger(args["--log"], args["-v"], args["--quiet"])
spotify = BlockifyDBus()
if args["toggle"]:
spotify.playpause()
elif args["next"]:
spotify.next()
elif args["prev"]:
spotify.prev()
elif args["play"]:
spotify.play()
elif args["stop"]:
spotify.stop()
if args["openuri"]:
spotify.open_uri(args["<uri>"])
elif args["seek"]:
spotify.seek(args["<secs>"])
elif args["setpos"]:
spotify.set_pos(args["<pos>"])
if args["title"]:
print spotify.get_song_title()
elif args["artist"]:
print spotify.get_song_artist()
elif args["status"]:
print spotify.get_song_status()
elif args["all"]:
spotify.print_info()
elif args["get"]:
length = spotify.get_song_length()
m, s = divmod(length, 60)
if args["length"]:
print "{}m{}s ({})".format(m, s, length)
else:
rating = spotify.get_property("Metadata")["xesam:autoRating"]
artist = spotify.get_song_artist()
title = spotify.get_song_title()
album = spotify.get_song_album()
state = spotify.get_song_status()
print "{} - {} ({}), {}m{}s, {} ({})".format(artist, title, album,
m, s, rating, state) | [
"def",
"main",
"(",
")",
":",
"args",
"=",
"docopt",
"(",
"__doc__",
",",
"version",
"=",
"\"0.2\"",
")",
"init_logger",
"(",
"args",
"[",
"\"--log\"",
"]",
",",
"args",
"[",
"\"-v\"",
"]",
",",
"args",
"[",
"\"--quiet\"",
"]",
")",
"spotify",
"=",
"BlockifyDBus",
"(",
")",
"if",
"args",
"[",
"\"toggle\"",
"]",
":",
"spotify",
".",
"playpause",
"(",
")",
"elif",
"args",
"[",
"\"next\"",
"]",
":",
"spotify",
".",
"next",
"(",
")",
"elif",
"args",
"[",
"\"prev\"",
"]",
":",
"spotify",
".",
"prev",
"(",
")",
"elif",
"args",
"[",
"\"play\"",
"]",
":",
"spotify",
".",
"play",
"(",
")",
"elif",
"args",
"[",
"\"stop\"",
"]",
":",
"spotify",
".",
"stop",
"(",
")",
"if",
"args",
"[",
"\"openuri\"",
"]",
":",
"spotify",
".",
"open_uri",
"(",
"args",
"[",
"\"<uri>\"",
"]",
")",
"elif",
"args",
"[",
"\"seek\"",
"]",
":",
"spotify",
".",
"seek",
"(",
"args",
"[",
"\"<secs>\"",
"]",
")",
"elif",
"args",
"[",
"\"setpos\"",
"]",
":",
"spotify",
".",
"set_pos",
"(",
"args",
"[",
"\"<pos>\"",
"]",
")",
"if",
"args",
"[",
"\"title\"",
"]",
":",
"print",
"spotify",
".",
"get_song_title",
"(",
")",
"elif",
"args",
"[",
"\"artist\"",
"]",
":",
"print",
"spotify",
".",
"get_song_artist",
"(",
")",
"elif",
"args",
"[",
"\"status\"",
"]",
":",
"print",
"spotify",
".",
"get_song_status",
"(",
")",
"elif",
"args",
"[",
"\"all\"",
"]",
":",
"spotify",
".",
"print_info",
"(",
")",
"elif",
"args",
"[",
"\"get\"",
"]",
":",
"length",
"=",
"spotify",
".",
"get_song_length",
"(",
")",
"m",
",",
"s",
"=",
"divmod",
"(",
"length",
",",
"60",
")",
"if",
"args",
"[",
"\"length\"",
"]",
":",
"print",
"\"{}m{}s ({})\"",
".",
"format",
"(",
"m",
",",
"s",
",",
"length",
")",
"else",
":",
"rating",
"=",
"spotify",
".",
"get_property",
"(",
"\"Metadata\"",
")",
"[",
"\"xesam:autoRating\"",
"]",
"artist",
"=",
"spotify",
".",
"get_song_artist",
"(",
")",
"title",
"=",
"spotify",
".",
"get_song_title",
"(",
")",
"album",
"=",
"spotify",
".",
"get_song_album",
"(",
")",
"state",
"=",
"spotify",
".",
"get_song_status",
"(",
")",
"print",
"\"{} - {} ({}), {}m{}s, {} ({})\"",
".",
"format",
"(",
"artist",
",",
"title",
",",
"album",
",",
"m",
",",
"s",
",",
"rating",
",",
"state",
")"
] | [
233,
0
] | [
277,
77
] | python | en | ['en', 'en', 'en'] | True |
BlockifyDBus.is_running | (self) | TODO: Make this not redundant | TODO: Make this not redundant | def is_running(self):
"TODO: Make this not redundant"
if self.spotify_path is None:
return False
return True | [
"def",
"is_running",
"(",
"self",
")",
":",
"if",
"self",
".",
"spotify_path",
"is",
"None",
":",
"return",
"False",
"return",
"True"
] | [
59,
4
] | [
63,
19
] | python | en | ['en', 'sn', 'en'] | True |
BlockifyDBus.get_property | (self, key) | Gets the value from any available property. | Gets the value from any available property. | def get_property(self, key):
"Gets the value from any available property."
if self.is_running():
return self.properties.Get(self.player_path, key) | [
"def",
"get_property",
"(",
"self",
",",
"key",
")",
":",
"if",
"self",
".",
"is_running",
"(",
")",
":",
"return",
"self",
".",
"properties",
".",
"Get",
"(",
"self",
".",
"player_path",
",",
"key",
")"
] | [
65,
4
] | [
68,
61
] | python | en | ['en', 'en', 'en'] | True |
BlockifyDBus.set_property | (self, key, value) | Sets the value for any available property. | Sets the value for any available property. | def set_property(self, key, value):
"Sets the value for any available property."
if self.is_running():
return self.properties.Set(self.player_path, key, value) | [
"def",
"set_property",
"(",
"self",
",",
"key",
",",
"value",
")",
":",
"if",
"self",
".",
"is_running",
"(",
")",
":",
"return",
"self",
".",
"properties",
".",
"Set",
"(",
"self",
".",
"player_path",
",",
"key",
",",
"value",
")"
] | [
70,
4
] | [
73,
68
] | python | en | ['en', 'en', 'en'] | True |
BlockifyDBus.playpause | (self) | Toggles the current song between Play and Pause. | Toggles the current song between Play and Pause. | def playpause(self):
"Toggles the current song between Play and Pause."
if self.is_running():
can_pause = self.get_property("CanPause")
can_play = self.get_property("CanPlay")
if can_pause and can_play:
self.player.PlayPause()
else:
log.warn("Cannot Play/Pause") | [
"def",
"playpause",
"(",
"self",
")",
":",
"if",
"self",
".",
"is_running",
"(",
")",
":",
"can_pause",
"=",
"self",
".",
"get_property",
"(",
"\"CanPause\"",
")",
"can_play",
"=",
"self",
".",
"get_property",
"(",
"\"CanPlay\"",
")",
"if",
"can_pause",
"and",
"can_play",
":",
"self",
".",
"player",
".",
"PlayPause",
"(",
")",
"else",
":",
"log",
".",
"warn",
"(",
"\"Cannot Play/Pause\"",
")"
] | [
75,
4
] | [
83,
45
] | python | en | ['en', 'en', 'en'] | True |
BlockifyDBus.play | (self) | DEFUNCT: Tries to play the current title. | DEFUNCT: Tries to play the current title. | def play(self):
"DEFUNCT: Tries to play the current title."
if self.is_running():
can_play = self.get_property("CanPlay")
if can_play:
self.player.Play()
else:
log.warn("Cannot Play") | [
"def",
"play",
"(",
"self",
")",
":",
"if",
"self",
".",
"is_running",
"(",
")",
":",
"can_play",
"=",
"self",
".",
"get_property",
"(",
"\"CanPlay\"",
")",
"if",
"can_play",
":",
"self",
".",
"player",
".",
"Play",
"(",
")",
"else",
":",
"log",
".",
"warn",
"(",
"\"Cannot Play\"",
")"
] | [
85,
4
] | [
92,
39
] | python | en | ['en', 'en', 'en'] | True |
BlockifyDBus.stop | (self) | Tries to stop playback. PlayPause is probably preferable. | Tries to stop playback. PlayPause is probably preferable. | def stop(self):
"Tries to stop playback. PlayPause is probably preferable."
if self.is_running():
self.player.Stop() | [
"def",
"stop",
"(",
"self",
")",
":",
"if",
"self",
".",
"is_running",
"(",
")",
":",
"self",
".",
"player",
".",
"Stop",
"(",
")"
] | [
94,
4
] | [
97,
30
] | python | en | ['en', 'en', 'en'] | True |
BlockifyDBus.next | (self) | Tries to skip to next song. | Tries to skip to next song. | def next(self):
"Tries to skip to next song."
if self.is_running():
can_next = self.get_property("CanGoNext")
if can_next:
self.player.Next()
else:
log.warn("Cannot Go Next") | [
"def",
"next",
"(",
"self",
")",
":",
"if",
"self",
".",
"is_running",
"(",
")",
":",
"can_next",
"=",
"self",
".",
"get_property",
"(",
"\"CanGoNext\"",
")",
"if",
"can_next",
":",
"self",
".",
"player",
".",
"Next",
"(",
")",
"else",
":",
"log",
".",
"warn",
"(",
"\"Cannot Go Next\"",
")"
] | [
99,
4
] | [
106,
42
] | python | en | ['en', 'en', 'en'] | True |
BlockifyDBus.prev | (self) | Tries to go back to last song. | Tries to go back to last song. | def prev(self):
"Tries to go back to last song."
if self.is_running():
can_prev = self.get_property("CanGoPrevious")
if can_prev:
self.player.Previous()
else:
log.warn("Cannot Go Previous.") | [
"def",
"prev",
"(",
"self",
")",
":",
"if",
"self",
".",
"is_running",
"(",
")",
":",
"can_prev",
"=",
"self",
".",
"get_property",
"(",
"\"CanGoPrevious\"",
")",
"if",
"can_prev",
":",
"self",
".",
"player",
".",
"Previous",
"(",
")",
"else",
":",
"log",
".",
"warn",
"(",
"\"Cannot Go Previous.\"",
")"
] | [
108,
4
] | [
115,
47
] | python | en | ['en', 'en', 'en'] | True |
BlockifyDBus.seek | (self, seconds) | DEFUNCT: Calls seek method. | DEFUNCT: Calls seek method. | def seek(self, seconds):
"DEFUNCT: Calls seek method."
if self.is_running():
can_seek = self.get_property("CanSeek")
if can_seek:
self.player.Seek(seconds)
else:
log.warn("Cannot Seek.") | [
"def",
"seek",
"(",
"self",
",",
"seconds",
")",
":",
"if",
"self",
".",
"is_running",
"(",
")",
":",
"can_seek",
"=",
"self",
".",
"get_property",
"(",
"\"CanSeek\"",
")",
"if",
"can_seek",
":",
"self",
".",
"player",
".",
"Seek",
"(",
"seconds",
")",
"else",
":",
"log",
".",
"warn",
"(",
"\"Cannot Seek.\"",
")"
] | [
125,
4
] | [
132,
40
] | python | af | ['id', 'af', 'en'] | False |
BlockifyDBus.get_song_status | (self) | Get current PlaybackStatus (Paused/Playing...). | Get current PlaybackStatus (Paused/Playing...). | def get_song_status(self):
"Get current PlaybackStatus (Paused/Playing...)."
if self.is_running():
return self.get_property("PlaybackStatus") | [
"def",
"get_song_status",
"(",
"self",
")",
":",
"if",
"self",
".",
"is_running",
"(",
")",
":",
"return",
"self",
".",
"get_property",
"(",
"\"PlaybackStatus\"",
")"
] | [
134,
4
] | [
137,
54
] | python | en | ['en', 'en', 'en'] | True |
BlockifyDBus.get_song_length | (self) | Gets the length of current song from metadata (in seconds). | Gets the length of current song from metadata (in seconds). | def get_song_length(self):
"Gets the length of current song from metadata (in seconds)."
if self.is_running():
metadata = self.get_property("Metadata")
if metadata:
return int(metadata["mpris:length"] / 1000000) | [
"def",
"get_song_length",
"(",
"self",
")",
":",
"if",
"self",
".",
"is_running",
"(",
")",
":",
"metadata",
"=",
"self",
".",
"get_property",
"(",
"\"Metadata\"",
")",
"if",
"metadata",
":",
"return",
"int",
"(",
"metadata",
"[",
"\"mpris:length\"",
"]",
"/",
"1000000",
")"
] | [
139,
4
] | [
144,
62
] | python | en | ['en', 'en', 'en'] | True |
BlockifyDBus.get_song_title | (self) | Gets title of current song from metadata | Gets title of current song from metadata | def get_song_title(self):
"Gets title of current song from metadata"
if self.is_running():
metadata = self.get_property("Metadata")
if metadata:
return metadata["xesam:title"].encode("utf-8") | [
"def",
"get_song_title",
"(",
"self",
")",
":",
"if",
"self",
".",
"is_running",
"(",
")",
":",
"metadata",
"=",
"self",
".",
"get_property",
"(",
"\"Metadata\"",
")",
"if",
"metadata",
":",
"return",
"metadata",
"[",
"\"xesam:title\"",
"]",
".",
"encode",
"(",
"\"utf-8\"",
")"
] | [
146,
4
] | [
151,
62
] | python | en | ['en', 'en', 'en'] | True |
BlockifyDBus.get_song_album | (self) | Gets album of current song from metadata | Gets album of current song from metadata | def get_song_album(self):
"Gets album of current song from metadata"
if self.is_running():
metadata = self.get_property("Metadata")
if metadata:
return metadata["xesam:album"].encode("utf-8") | [
"def",
"get_song_album",
"(",
"self",
")",
":",
"if",
"self",
".",
"is_running",
"(",
")",
":",
"metadata",
"=",
"self",
".",
"get_property",
"(",
"\"Metadata\"",
")",
"if",
"metadata",
":",
"return",
"metadata",
"[",
"\"xesam:album\"",
"]",
".",
"encode",
"(",
"\"utf-8\"",
")"
] | [
153,
4
] | [
158,
62
] | python | en | ['en', 'en', 'en'] | True |
BlockifyDBus.get_song_artist | (self) | Gets the artist of current song from metadata | Gets the artist of current song from metadata | def get_song_artist(self):
"Gets the artist of current song from metadata"
if self.is_running():
metadata = self.get_property("Metadata")
if metadata:
return str(metadata["xesam:artist"][0]) | [
"def",
"get_song_artist",
"(",
"self",
")",
":",
"if",
"self",
".",
"is_running",
"(",
")",
":",
"metadata",
"=",
"self",
".",
"get_property",
"(",
"\"Metadata\"",
")",
"if",
"metadata",
":",
"return",
"str",
"(",
"metadata",
"[",
"\"xesam:artist\"",
"]",
"[",
"0",
"]",
")"
] | [
160,
4
] | [
165,
55
] | python | en | ['en', 'en', 'en'] | True |
BlockifyDBus.print_info | (self) | Print all the DBus info we can get our hands on. | Print all the DBus info we can get our hands on. | def print_info(self):
"Print all the DBus info we can get our hands on."
try:
interfaces = self.properties.GetAll(self.player_path)
metadata = self.get_property("Metadata")
i_keys = list(map(str, interfaces.keys()))
i_keys.remove("Metadata")
i_keys.sort()
for i in i_keys:
if len(i) < 7:
print i, "\t\t= ", self.get_property(i)
else:
print i, "\t= ", self.get_property(i)
print ""
d_keys = list(metadata.keys())
d_keys.sort()
for k in d_keys:
d = k.split(":")[1]
if d == "artist":
print d, "\t\t= ", metadata[k][0]
# elif d == "length":
elif len(d) < 7:
print d, "\t\t= ", metadata[k]
else:
print d, "\t= ", metadata[k]
except AttributeError as e:
log.error("Could not get properties: {}".format(e)) | [
"def",
"print_info",
"(",
"self",
")",
":",
"try",
":",
"interfaces",
"=",
"self",
".",
"properties",
".",
"GetAll",
"(",
"self",
".",
"player_path",
")",
"metadata",
"=",
"self",
".",
"get_property",
"(",
"\"Metadata\"",
")",
"i_keys",
"=",
"list",
"(",
"map",
"(",
"str",
",",
"interfaces",
".",
"keys",
"(",
")",
")",
")",
"i_keys",
".",
"remove",
"(",
"\"Metadata\"",
")",
"i_keys",
".",
"sort",
"(",
")",
"for",
"i",
"in",
"i_keys",
":",
"if",
"len",
"(",
"i",
")",
"<",
"7",
":",
"print",
"i",
",",
"\"\\t\\t= \"",
",",
"self",
".",
"get_property",
"(",
"i",
")",
"else",
":",
"print",
"i",
",",
"\"\\t= \"",
",",
"self",
".",
"get_property",
"(",
"i",
")",
"print",
"\"\"",
"d_keys",
"=",
"list",
"(",
"metadata",
".",
"keys",
"(",
")",
")",
"d_keys",
".",
"sort",
"(",
")",
"for",
"k",
"in",
"d_keys",
":",
"d",
"=",
"k",
".",
"split",
"(",
"\":\"",
")",
"[",
"1",
"]",
"if",
"d",
"==",
"\"artist\"",
":",
"print",
"d",
",",
"\"\\t\\t= \"",
",",
"metadata",
"[",
"k",
"]",
"[",
"0",
"]",
"# elif d == \"length\":",
"elif",
"len",
"(",
"d",
")",
"<",
"7",
":",
"print",
"d",
",",
"\"\\t\\t= \"",
",",
"metadata",
"[",
"k",
"]",
"else",
":",
"print",
"d",
",",
"\"\\t= \"",
",",
"metadata",
"[",
"k",
"]",
"except",
"AttributeError",
"as",
"e",
":",
"log",
".",
"error",
"(",
"\"Could not get properties: {}\"",
".",
"format",
"(",
"e",
")",
")"
] | [
167,
4
] | [
199,
63
] | python | en | ['en', 'en', 'en'] | True |
index | () | The home page has a list of prior translations and a form to
ask for a new translation.
| The home page has a list of prior translations and a form to
ask for a new translation.
| def index():
""" The home page has a list of prior translations and a form to
ask for a new translation.
"""
doc_list = []
docs = db.collection('translations').stream()
for doc in docs:
doc_list.append(doc.to_dict())
return render_template('index.html', translations=doc_list) | [
"def",
"index",
"(",
")",
":",
"doc_list",
"=",
"[",
"]",
"docs",
"=",
"db",
".",
"collection",
"(",
"'translations'",
")",
".",
"stream",
"(",
")",
"for",
"doc",
"in",
"docs",
":",
"doc_list",
".",
"append",
"(",
"doc",
".",
"to_dict",
"(",
")",
")",
"return",
"render_template",
"(",
"'index.html'",
",",
"translations",
"=",
"doc_list",
")"
] | [
40,
0
] | [
50,
63
] | python | en | ['en', 'en', 'en'] | True |
translate | () | Handle a request to translate a string (form field 'v') to a given
language (form field 'lang'), by sending a PubSub message to a topic.
| Handle a request to translate a string (form field 'v') to a given
language (form field 'lang'), by sending a PubSub message to a topic.
| def translate():
""" Handle a request to translate a string (form field 'v') to a given
language (form field 'lang'), by sending a PubSub message to a topic.
"""
source_string = request.form.get('v', '')
to_language = request.form.get('lang', '')
if source_string == '':
error_message = 'Empty value'
return error_message, 400
if to_language not in ACCEPTABLE_LANGUAGES:
error_message = 'Unsupported language: {}'.format(to_language)
return error_message, 400
message = {
'Original': source_string,
'Language': to_language,
'Translated': '',
'OriginalLanguage': '',
}
topic_name = 'projects/{}/topics/{}'.format(
os.getenv('GOOGLE_CLOUD_PROJECT'), 'translate'
)
publisher.publish(topic_name, json.dumps(message).encode('utf8'))
return redirect('/') | [
"def",
"translate",
"(",
")",
":",
"source_string",
"=",
"request",
".",
"form",
".",
"get",
"(",
"'v'",
",",
"''",
")",
"to_language",
"=",
"request",
".",
"form",
".",
"get",
"(",
"'lang'",
",",
"''",
")",
"if",
"source_string",
"==",
"''",
":",
"error_message",
"=",
"'Empty value'",
"return",
"error_message",
",",
"400",
"if",
"to_language",
"not",
"in",
"ACCEPTABLE_LANGUAGES",
":",
"error_message",
"=",
"'Unsupported language: {}'",
".",
"format",
"(",
"to_language",
")",
"return",
"error_message",
",",
"400",
"message",
"=",
"{",
"'Original'",
":",
"source_string",
",",
"'Language'",
":",
"to_language",
",",
"'Translated'",
":",
"''",
",",
"'OriginalLanguage'",
":",
"''",
",",
"}",
"topic_name",
"=",
"'projects/{}/topics/{}'",
".",
"format",
"(",
"os",
".",
"getenv",
"(",
"'GOOGLE_CLOUD_PROJECT'",
")",
",",
"'translate'",
")",
"publisher",
".",
"publish",
"(",
"topic_name",
",",
"json",
".",
"dumps",
"(",
"message",
")",
".",
"encode",
"(",
"'utf8'",
")",
")",
"return",
"redirect",
"(",
"'/'",
")"
] | [
56,
0
] | [
82,
24
] | python | en | ['en', 'en', 'en'] | True |
Command.show_list | (self, connection, app_names=None) |
Show a list of all migrations on the system, or only those of
some named apps.
|
Show a list of all migrations on the system, or only those of
some named apps.
| def show_list(self, connection, app_names=None):
"""
Show a list of all migrations on the system, or only those of
some named apps.
"""
# Load migrations from disk/DB
loader = MigrationLoader(connection, ignore_no_migrations=True)
graph = loader.graph
# If we were passed a list of apps, validate it
if app_names:
self._validate_app_names(loader, app_names)
# Otherwise, show all apps in alphabetic order
else:
app_names = sorted(loader.migrated_apps)
# For each app, print its migrations in order from oldest (roots) to
# newest (leaves).
for app_name in app_names:
self.stdout.write(app_name, self.style.MIGRATE_LABEL)
shown = set()
for node in graph.leaf_nodes(app_name):
for plan_node in graph.forwards_plan(node):
if plan_node not in shown and plan_node[0] == app_name:
# Give it a nice title if it's a squashed one
title = plan_node[1]
if graph.nodes[plan_node].replaces:
title += " (%s squashed migrations)" % len(graph.nodes[plan_node].replaces)
applied_migration = loader.applied_migrations.get(plan_node)
# Mark it as applied/unapplied
if applied_migration:
output = ' [X] %s' % title
if self.verbosity >= 2:
output += ' (applied at %s)' % applied_migration.applied.strftime('%Y-%m-%d %H:%M:%S')
self.stdout.write(output)
else:
self.stdout.write(" [ ] %s" % title)
shown.add(plan_node)
# If we didn't print anything, then a small message
if not shown:
self.stdout.write(" (no migrations)", self.style.ERROR) | [
"def",
"show_list",
"(",
"self",
",",
"connection",
",",
"app_names",
"=",
"None",
")",
":",
"# Load migrations from disk/DB",
"loader",
"=",
"MigrationLoader",
"(",
"connection",
",",
"ignore_no_migrations",
"=",
"True",
")",
"graph",
"=",
"loader",
".",
"graph",
"# If we were passed a list of apps, validate it",
"if",
"app_names",
":",
"self",
".",
"_validate_app_names",
"(",
"loader",
",",
"app_names",
")",
"# Otherwise, show all apps in alphabetic order",
"else",
":",
"app_names",
"=",
"sorted",
"(",
"loader",
".",
"migrated_apps",
")",
"# For each app, print its migrations in order from oldest (roots) to",
"# newest (leaves).",
"for",
"app_name",
"in",
"app_names",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"app_name",
",",
"self",
".",
"style",
".",
"MIGRATE_LABEL",
")",
"shown",
"=",
"set",
"(",
")",
"for",
"node",
"in",
"graph",
".",
"leaf_nodes",
"(",
"app_name",
")",
":",
"for",
"plan_node",
"in",
"graph",
".",
"forwards_plan",
"(",
"node",
")",
":",
"if",
"plan_node",
"not",
"in",
"shown",
"and",
"plan_node",
"[",
"0",
"]",
"==",
"app_name",
":",
"# Give it a nice title if it's a squashed one",
"title",
"=",
"plan_node",
"[",
"1",
"]",
"if",
"graph",
".",
"nodes",
"[",
"plan_node",
"]",
".",
"replaces",
":",
"title",
"+=",
"\" (%s squashed migrations)\"",
"%",
"len",
"(",
"graph",
".",
"nodes",
"[",
"plan_node",
"]",
".",
"replaces",
")",
"applied_migration",
"=",
"loader",
".",
"applied_migrations",
".",
"get",
"(",
"plan_node",
")",
"# Mark it as applied/unapplied",
"if",
"applied_migration",
":",
"output",
"=",
"' [X] %s'",
"%",
"title",
"if",
"self",
".",
"verbosity",
">=",
"2",
":",
"output",
"+=",
"' (applied at %s)'",
"%",
"applied_migration",
".",
"applied",
".",
"strftime",
"(",
"'%Y-%m-%d %H:%M:%S'",
")",
"self",
".",
"stdout",
".",
"write",
"(",
"output",
")",
"else",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"\" [ ] %s\"",
"%",
"title",
")",
"shown",
".",
"add",
"(",
"plan_node",
")",
"# If we didn't print anything, then a small message",
"if",
"not",
"shown",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"\" (no migrations)\"",
",",
"self",
".",
"style",
".",
"ERROR",
")"
] | [
64,
4
] | [
102,
71
] | python | en | ['en', 'error', 'th'] | False |
Command.show_plan | (self, connection, app_names=None) |
Show all known migrations (or only those of the specified app_names)
in the order they will be applied.
|
Show all known migrations (or only those of the specified app_names)
in the order they will be applied.
| def show_plan(self, connection, app_names=None):
"""
Show all known migrations (or only those of the specified app_names)
in the order they will be applied.
"""
# Load migrations from disk/DB
loader = MigrationLoader(connection)
graph = loader.graph
if app_names:
self._validate_app_names(loader, app_names)
targets = [key for key in graph.leaf_nodes() if key[0] in app_names]
else:
targets = graph.leaf_nodes()
plan = []
seen = set()
# Generate the plan
for target in targets:
for migration in graph.forwards_plan(target):
if migration not in seen:
node = graph.node_map[migration]
plan.append(node)
seen.add(migration)
# Output
def print_deps(node):
out = []
for parent in sorted(node.parents):
out.append("%s.%s" % parent.key)
if out:
return " ... (%s)" % ", ".join(out)
return ""
for node in plan:
deps = ""
if self.verbosity >= 2:
deps = print_deps(node)
if node.key in loader.applied_migrations:
self.stdout.write("[X] %s.%s%s" % (node.key[0], node.key[1], deps))
else:
self.stdout.write("[ ] %s.%s%s" % (node.key[0], node.key[1], deps))
if not plan:
self.stdout.write('(no migrations)', self.style.ERROR) | [
"def",
"show_plan",
"(",
"self",
",",
"connection",
",",
"app_names",
"=",
"None",
")",
":",
"# Load migrations from disk/DB",
"loader",
"=",
"MigrationLoader",
"(",
"connection",
")",
"graph",
"=",
"loader",
".",
"graph",
"if",
"app_names",
":",
"self",
".",
"_validate_app_names",
"(",
"loader",
",",
"app_names",
")",
"targets",
"=",
"[",
"key",
"for",
"key",
"in",
"graph",
".",
"leaf_nodes",
"(",
")",
"if",
"key",
"[",
"0",
"]",
"in",
"app_names",
"]",
"else",
":",
"targets",
"=",
"graph",
".",
"leaf_nodes",
"(",
")",
"plan",
"=",
"[",
"]",
"seen",
"=",
"set",
"(",
")",
"# Generate the plan",
"for",
"target",
"in",
"targets",
":",
"for",
"migration",
"in",
"graph",
".",
"forwards_plan",
"(",
"target",
")",
":",
"if",
"migration",
"not",
"in",
"seen",
":",
"node",
"=",
"graph",
".",
"node_map",
"[",
"migration",
"]",
"plan",
".",
"append",
"(",
"node",
")",
"seen",
".",
"add",
"(",
"migration",
")",
"# Output",
"def",
"print_deps",
"(",
"node",
")",
":",
"out",
"=",
"[",
"]",
"for",
"parent",
"in",
"sorted",
"(",
"node",
".",
"parents",
")",
":",
"out",
".",
"append",
"(",
"\"%s.%s\"",
"%",
"parent",
".",
"key",
")",
"if",
"out",
":",
"return",
"\" ... (%s)\"",
"%",
"\", \"",
".",
"join",
"(",
"out",
")",
"return",
"\"\"",
"for",
"node",
"in",
"plan",
":",
"deps",
"=",
"\"\"",
"if",
"self",
".",
"verbosity",
">=",
"2",
":",
"deps",
"=",
"print_deps",
"(",
"node",
")",
"if",
"node",
".",
"key",
"in",
"loader",
".",
"applied_migrations",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"\"[X] %s.%s%s\"",
"%",
"(",
"node",
".",
"key",
"[",
"0",
"]",
",",
"node",
".",
"key",
"[",
"1",
"]",
",",
"deps",
")",
")",
"else",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"\"[ ] %s.%s%s\"",
"%",
"(",
"node",
".",
"key",
"[",
"0",
"]",
",",
"node",
".",
"key",
"[",
"1",
"]",
",",
"deps",
")",
")",
"if",
"not",
"plan",
":",
"self",
".",
"stdout",
".",
"write",
"(",
"'(no migrations)'",
",",
"self",
".",
"style",
".",
"ERROR",
")"
] | [
104,
4
] | [
146,
66
] | python | en | ['en', 'error', 'th'] | False |
register_handler | (handler) |
Install application-specific BUFR image handler.
:param handler: Handler object.
|
Install application-specific BUFR image handler. | def register_handler(handler):
"""
Install application-specific BUFR image handler.
:param handler: Handler object.
"""
global _handler
_handler = handler | [
"def",
"register_handler",
"(",
"handler",
")",
":",
"global",
"_handler",
"_handler",
"=",
"handler"
] | [
16,
0
] | [
23,
22
] | python | en | ['en', 'error', 'th'] | False |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.