Search is not available for this dataset
identifier
stringlengths
1
155
parameters
stringlengths
2
6.09k
docstring
stringlengths
11
63.4k
docstring_summary
stringlengths
0
63.4k
function
stringlengths
29
99.8k
function_tokens
sequence
start_point
sequence
end_point
sequence
language
stringclasses
1 value
docstring_language
stringlengths
2
7
docstring_language_predictions
stringlengths
18
23
is_langid_reliable
stringclasses
2 values
MigrationWriter.as_string
(self)
Return a string of the file contents.
Return a string of the file contents.
def as_string(self): """Return a string of the file contents.""" items = { "replaces_str": "", "initial_str": "", } imports = set() # Deconstruct operations operations = [] for operation in self.migration.operations: operation_string, operation_imports = OperationWriter(operation).serialize() imports.update(operation_imports) operations.append(operation_string) items["operations"] = "\n".join(operations) + "\n" if operations else "" # Format dependencies and write out swappable dependencies right dependencies = [] for dependency in self.migration.dependencies: if dependency[0] == "__setting__": dependencies.append(" migrations.swappable_dependency(settings.%s)," % dependency[1]) imports.add("from django.conf import settings") else: dependencies.append(" %s," % self.serialize(dependency)[0]) items["dependencies"] = "\n".join(dependencies) + "\n" if dependencies else "" # Format imports nicely, swapping imports of functions from migration files # for comments migration_imports = set() for line in list(imports): if re.match(r"^import (.*)\.\d+[^\s]*$", line): migration_imports.add(line.split("import")[1].strip()) imports.remove(line) self.needs_manual_porting = True # django.db.migrations is always used, but models import may not be. # If models import exists, merge it with migrations import. if "from django.db import models" in imports: imports.discard("from django.db import models") imports.add("from django.db import migrations, models") else: imports.add("from django.db import migrations") # Sort imports by the package / module to be imported (the part after # "from" in "from ... import ..." or after "import" in "import ..."). sorted_imports = sorted(imports, key=lambda i: i.split()[1]) items["imports"] = "\n".join(sorted_imports) + "\n" if imports else "" if migration_imports: items["imports"] += ( "\n\n# Functions from the following migrations need manual " "copying.\n# Move them and any dependencies into this file, " "then update the\n# RunPython operations to refer to the local " "versions:\n# %s" ) % "\n# ".join(sorted(migration_imports)) # If there's a replaces, make a string for it if self.migration.replaces: items['replaces_str'] = "\n replaces = %s\n" % self.serialize(self.migration.replaces)[0] # Hinting that goes into comment if self.include_header: items['migration_header'] = MIGRATION_HEADER_TEMPLATE % { 'version': get_version(), 'timestamp': now().strftime("%Y-%m-%d %H:%M"), } else: items['migration_header'] = "" if self.migration.initial: items['initial_str'] = "\n initial = True\n" return MIGRATION_TEMPLATE % items
[ "def", "as_string", "(", "self", ")", ":", "items", "=", "{", "\"replaces_str\"", ":", "\"\"", ",", "\"initial_str\"", ":", "\"\"", ",", "}", "imports", "=", "set", "(", ")", "# Deconstruct operations", "operations", "=", "[", "]", "for", "operation", "in", "self", ".", "migration", ".", "operations", ":", "operation_string", ",", "operation_imports", "=", "OperationWriter", "(", "operation", ")", ".", "serialize", "(", ")", "imports", ".", "update", "(", "operation_imports", ")", "operations", ".", "append", "(", "operation_string", ")", "items", "[", "\"operations\"", "]", "=", "\"\\n\"", ".", "join", "(", "operations", ")", "+", "\"\\n\"", "if", "operations", "else", "\"\"", "# Format dependencies and write out swappable dependencies right", "dependencies", "=", "[", "]", "for", "dependency", "in", "self", ".", "migration", ".", "dependencies", ":", "if", "dependency", "[", "0", "]", "==", "\"__setting__\"", ":", "dependencies", ".", "append", "(", "\" migrations.swappable_dependency(settings.%s),\"", "%", "dependency", "[", "1", "]", ")", "imports", ".", "add", "(", "\"from django.conf import settings\"", ")", "else", ":", "dependencies", ".", "append", "(", "\" %s,\"", "%", "self", ".", "serialize", "(", "dependency", ")", "[", "0", "]", ")", "items", "[", "\"dependencies\"", "]", "=", "\"\\n\"", ".", "join", "(", "dependencies", ")", "+", "\"\\n\"", "if", "dependencies", "else", "\"\"", "# Format imports nicely, swapping imports of functions from migration files", "# for comments", "migration_imports", "=", "set", "(", ")", "for", "line", "in", "list", "(", "imports", ")", ":", "if", "re", ".", "match", "(", "r\"^import (.*)\\.\\d+[^\\s]*$\"", ",", "line", ")", ":", "migration_imports", ".", "add", "(", "line", ".", "split", "(", "\"import\"", ")", "[", "1", "]", ".", "strip", "(", ")", ")", "imports", ".", "remove", "(", "line", ")", "self", ".", "needs_manual_porting", "=", "True", "# django.db.migrations is always used, but models import may not be.", "# If models import exists, merge it with migrations import.", "if", "\"from django.db import models\"", "in", "imports", ":", "imports", ".", "discard", "(", "\"from django.db import models\"", ")", "imports", ".", "add", "(", "\"from django.db import migrations, models\"", ")", "else", ":", "imports", ".", "add", "(", "\"from django.db import migrations\"", ")", "# Sort imports by the package / module to be imported (the part after", "# \"from\" in \"from ... import ...\" or after \"import\" in \"import ...\").", "sorted_imports", "=", "sorted", "(", "imports", ",", "key", "=", "lambda", "i", ":", "i", ".", "split", "(", ")", "[", "1", "]", ")", "items", "[", "\"imports\"", "]", "=", "\"\\n\"", ".", "join", "(", "sorted_imports", ")", "+", "\"\\n\"", "if", "imports", "else", "\"\"", "if", "migration_imports", ":", "items", "[", "\"imports\"", "]", "+=", "(", "\"\\n\\n# Functions from the following migrations need manual \"", "\"copying.\\n# Move them and any dependencies into this file, \"", "\"then update the\\n# RunPython operations to refer to the local \"", "\"versions:\\n# %s\"", ")", "%", "\"\\n# \"", ".", "join", "(", "sorted", "(", "migration_imports", ")", ")", "# If there's a replaces, make a string for it", "if", "self", ".", "migration", ".", "replaces", ":", "items", "[", "'replaces_str'", "]", "=", "\"\\n replaces = %s\\n\"", "%", "self", ".", "serialize", "(", "self", ".", "migration", ".", "replaces", ")", "[", "0", "]", "# Hinting that goes into comment", "if", "self", ".", "include_header", ":", "items", "[", "'migration_header'", "]", "=", "MIGRATION_HEADER_TEMPLATE", "%", "{", "'version'", ":", "get_version", "(", ")", ",", "'timestamp'", ":", "now", "(", ")", ".", "strftime", "(", "\"%Y-%m-%d %H:%M\"", ")", ",", "}", "else", ":", "items", "[", "'migration_header'", "]", "=", "\"\"", "if", "self", ".", "migration", ".", "initial", ":", "items", "[", "'initial_str'", "]", "=", "\"\\n initial = True\\n\"", "return", "MIGRATION_TEMPLATE", "%", "items" ]
[ 128, 4 ]
[ 198, 41 ]
python
en
['en', 'en', 'en']
True
FrameSymbolVisitor.visit_Name
(self, node, store_as_param=False, **kwargs)
All assignments to names go through this function.
All assignments to names go through this function.
def visit_Name(self, node, store_as_param=False, **kwargs): """All assignments to names go through this function.""" if store_as_param or node.ctx == 'param': self.symbols.declare_parameter(node.name) elif node.ctx == 'store': self.symbols.store(node.name) elif node.ctx == 'load': self.symbols.load(node.name)
[ "def", "visit_Name", "(", "self", ",", "node", ",", "store_as_param", "=", "False", ",", "*", "*", "kwargs", ")", ":", "if", "store_as_param", "or", "node", ".", "ctx", "==", "'param'", ":", "self", ".", "symbols", ".", "declare_parameter", "(", "node", ".", "name", ")", "elif", "node", ".", "ctx", "==", "'store'", ":", "self", ".", "symbols", ".", "store", "(", "node", ".", "name", ")", "elif", "node", ".", "ctx", "==", "'load'", ":", "self", ".", "symbols", ".", "load", "(", "node", ".", "name", ")" ]
[ 208, 4 ]
[ 215, 40 ]
python
en
['en', 'en', 'en']
True
FrameSymbolVisitor.visit_Assign
(self, node, **kwargs)
Visit assignments in the correct order.
Visit assignments in the correct order.
def visit_Assign(self, node, **kwargs): """Visit assignments in the correct order.""" self.visit(node.node, **kwargs) self.visit(node.target, **kwargs)
[ "def", "visit_Assign", "(", "self", ",", "node", ",", "*", "*", "kwargs", ")", ":", "self", ".", "visit", "(", "node", ".", "node", ",", "*", "*", "kwargs", ")", "self", ".", "visit", "(", "node", ".", "target", ",", "*", "*", "kwargs", ")" ]
[ 253, 4 ]
[ 256, 41 ]
python
en
['en', 'en', 'en']
True
FrameSymbolVisitor.visit_For
(self, node, **kwargs)
Visiting stops at for blocks. However the block sequence is visited as part of the outer scope.
Visiting stops at for blocks. However the block sequence is visited as part of the outer scope.
def visit_For(self, node, **kwargs): """Visiting stops at for blocks. However the block sequence is visited as part of the outer scope. """ self.visit(node.iter, **kwargs)
[ "def", "visit_For", "(", "self", ",", "node", ",", "*", "*", "kwargs", ")", ":", "self", ".", "visit", "(", "node", ".", "iter", ",", "*", "*", "kwargs", ")" ]
[ 258, 4 ]
[ 262, 39 ]
python
en
['en', 'en', 'en']
True
FrameSymbolVisitor.visit_AssignBlock
(self, node, **kwargs)
Stop visiting at block assigns.
Stop visiting at block assigns.
def visit_AssignBlock(self, node, **kwargs): """Stop visiting at block assigns.""" self.visit(node.target, **kwargs)
[ "def", "visit_AssignBlock", "(", "self", ",", "node", ",", "*", "*", "kwargs", ")", ":", "self", ".", "visit", "(", "node", ".", "target", ",", "*", "*", "kwargs", ")" ]
[ 274, 4 ]
[ 276, 41 ]
python
en
['en', 'fil', 'en']
True
FrameSymbolVisitor.visit_Scope
(self, node, **kwargs)
Stop visiting at scopes.
Stop visiting at scopes.
def visit_Scope(self, node, **kwargs): """Stop visiting at scopes."""
[ "def", "visit_Scope", "(", "self", ",", "node", ",", "*", "*", "kwargs", ")", ":" ]
[ 278, 4 ]
[ 279, 38 ]
python
en
['en', 'fil', 'en']
True
FrameSymbolVisitor.visit_Block
(self, node, **kwargs)
Stop visiting at blocks.
Stop visiting at blocks.
def visit_Block(self, node, **kwargs): """Stop visiting at blocks."""
[ "def", "visit_Block", "(", "self", ",", "node", ",", "*", "*", "kwargs", ")", ":" ]
[ 281, 4 ]
[ 282, 38 ]
python
en
['en', 'fil', 'en']
True
FrameSymbolVisitor.visit_OverlayScope
(self, node, **kwargs)
Do not visit into overlay scopes.
Do not visit into overlay scopes.
def visit_OverlayScope(self, node, **kwargs): """Do not visit into overlay scopes."""
[ "def", "visit_OverlayScope", "(", "self", ",", "node", ",", "*", "*", "kwargs", ")", ":" ]
[ 284, 4 ]
[ 285, 47 ]
python
en
['en', 'en', 'en']
True
HashedFilesMixin.file_hash
(self, name, content=None)
Return a hash of the file with the given name and optional content.
Return a hash of the file with the given name and optional content.
def file_hash(self, name, content=None): """ Return a hash of the file with the given name and optional content. """ if content is None: return None md5 = hashlib.md5() for chunk in content.chunks(): md5.update(chunk) return md5.hexdigest()[:12]
[ "def", "file_hash", "(", "self", ",", "name", ",", "content", "=", "None", ")", ":", "if", "content", "is", "None", ":", "return", "None", "md5", "=", "hashlib", ".", "md5", "(", ")", "for", "chunk", "in", "content", ".", "chunks", "(", ")", ":", "md5", ".", "update", "(", "chunk", ")", "return", "md5", ".", "hexdigest", "(", ")", "[", ":", "12", "]" ]
[ 67, 4 ]
[ 76, 35 ]
python
en
['en', 'error', 'th']
False
HashedFilesMixin._url
(self, hashed_name_func, name, force=False, hashed_files=None)
Return the non-hashed URL in DEBUG mode.
Return the non-hashed URL in DEBUG mode.
def _url(self, hashed_name_func, name, force=False, hashed_files=None): """ Return the non-hashed URL in DEBUG mode. """ if settings.DEBUG and not force: hashed_name, fragment = name, '' else: clean_name, fragment = urldefrag(name) if urlsplit(clean_name).path.endswith('/'): # don't hash paths hashed_name = name else: args = (clean_name,) if hashed_files is not None: args += (hashed_files,) hashed_name = hashed_name_func(*args) final_url = super().url(hashed_name) # Special casing for a @font-face hack, like url(myfont.eot?#iefix") # http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax query_fragment = '?#' in name # [sic!] if fragment or query_fragment: urlparts = list(urlsplit(final_url)) if fragment and not urlparts[4]: urlparts[4] = fragment if query_fragment and not urlparts[3]: urlparts[2] += '?' final_url = urlunsplit(urlparts) return unquote(final_url)
[ "def", "_url", "(", "self", ",", "hashed_name_func", ",", "name", ",", "force", "=", "False", ",", "hashed_files", "=", "None", ")", ":", "if", "settings", ".", "DEBUG", "and", "not", "force", ":", "hashed_name", ",", "fragment", "=", "name", ",", "''", "else", ":", "clean_name", ",", "fragment", "=", "urldefrag", "(", "name", ")", "if", "urlsplit", "(", "clean_name", ")", ".", "path", ".", "endswith", "(", "'/'", ")", ":", "# don't hash paths", "hashed_name", "=", "name", "else", ":", "args", "=", "(", "clean_name", ",", ")", "if", "hashed_files", "is", "not", "None", ":", "args", "+=", "(", "hashed_files", ",", ")", "hashed_name", "=", "hashed_name_func", "(", "*", "args", ")", "final_url", "=", "super", "(", ")", ".", "url", "(", "hashed_name", ")", "# Special casing for a @font-face hack, like url(myfont.eot?#iefix\")", "# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax", "query_fragment", "=", "'?#'", "in", "name", "# [sic!]", "if", "fragment", "or", "query_fragment", ":", "urlparts", "=", "list", "(", "urlsplit", "(", "final_url", ")", ")", "if", "fragment", "and", "not", "urlparts", "[", "4", "]", ":", "urlparts", "[", "4", "]", "=", "fragment", "if", "query_fragment", "and", "not", "urlparts", "[", "3", "]", ":", "urlparts", "[", "2", "]", "+=", "'?'", "final_url", "=", "urlunsplit", "(", "urlparts", ")", "return", "unquote", "(", "final_url", ")" ]
[ 111, 4 ]
[ 140, 33 ]
python
en
['en', 'error', 'th']
False
HashedFilesMixin.url
(self, name, force=False)
Return the non-hashed URL in DEBUG mode.
Return the non-hashed URL in DEBUG mode.
def url(self, name, force=False): """ Return the non-hashed URL in DEBUG mode. """ return self._url(self.stored_name, name, force)
[ "def", "url", "(", "self", ",", "name", ",", "force", "=", "False", ")", ":", "return", "self", ".", "_url", "(", "self", ".", "stored_name", ",", "name", ",", "force", ")" ]
[ 142, 4 ]
[ 146, 55 ]
python
en
['en', 'error', 'th']
False
HashedFilesMixin.url_converter
(self, name, hashed_files, template=None)
Return the custom URL converter for the given file name.
Return the custom URL converter for the given file name.
def url_converter(self, name, hashed_files, template=None): """ Return the custom URL converter for the given file name. """ if template is None: template = self.default_template def converter(matchobj): """ Convert the matched URL to a normalized and hashed URL. This requires figuring out which files the matched URL resolves to and calling the url() method of the storage. """ matched, url = matchobj.groups() # Ignore absolute/protocol-relative and data-uri URLs. if re.match(r'^[a-z]+:', url): return matched # Ignore absolute URLs that don't point to a static file (dynamic # CSS / JS?). Note that STATIC_URL cannot be empty. if url.startswith('/') and not url.startswith(settings.STATIC_URL): return matched # Strip off the fragment so a path-like fragment won't interfere. url_path, fragment = urldefrag(url) if url_path.startswith('/'): # Otherwise the condition above would have returned prematurely. assert url_path.startswith(settings.STATIC_URL) target_name = url_path[len(settings.STATIC_URL):] else: # We're using the posixpath module to mix paths and URLs conveniently. source_name = name if os.sep == '/' else name.replace(os.sep, '/') target_name = posixpath.join(posixpath.dirname(source_name), url_path) # Determine the hashed name of the target file with the storage backend. hashed_url = self._url( self._stored_name, unquote(target_name), force=True, hashed_files=hashed_files, ) transformed_url = '/'.join(url_path.split('/')[:-1] + hashed_url.split('/')[-1:]) # Restore the fragment that was stripped off earlier. if fragment: transformed_url += ('?#' if '?#' in url else '#') + fragment # Return the hashed version to the file return template % unquote(transformed_url) return converter
[ "def", "url_converter", "(", "self", ",", "name", ",", "hashed_files", ",", "template", "=", "None", ")", ":", "if", "template", "is", "None", ":", "template", "=", "self", ".", "default_template", "def", "converter", "(", "matchobj", ")", ":", "\"\"\"\n Convert the matched URL to a normalized and hashed URL.\n\n This requires figuring out which files the matched URL resolves\n to and calling the url() method of the storage.\n \"\"\"", "matched", ",", "url", "=", "matchobj", ".", "groups", "(", ")", "# Ignore absolute/protocol-relative and data-uri URLs.", "if", "re", ".", "match", "(", "r'^[a-z]+:'", ",", "url", ")", ":", "return", "matched", "# Ignore absolute URLs that don't point to a static file (dynamic", "# CSS / JS?). Note that STATIC_URL cannot be empty.", "if", "url", ".", "startswith", "(", "'/'", ")", "and", "not", "url", ".", "startswith", "(", "settings", ".", "STATIC_URL", ")", ":", "return", "matched", "# Strip off the fragment so a path-like fragment won't interfere.", "url_path", ",", "fragment", "=", "urldefrag", "(", "url", ")", "if", "url_path", ".", "startswith", "(", "'/'", ")", ":", "# Otherwise the condition above would have returned prematurely.", "assert", "url_path", ".", "startswith", "(", "settings", ".", "STATIC_URL", ")", "target_name", "=", "url_path", "[", "len", "(", "settings", ".", "STATIC_URL", ")", ":", "]", "else", ":", "# We're using the posixpath module to mix paths and URLs conveniently.", "source_name", "=", "name", "if", "os", ".", "sep", "==", "'/'", "else", "name", ".", "replace", "(", "os", ".", "sep", ",", "'/'", ")", "target_name", "=", "posixpath", ".", "join", "(", "posixpath", ".", "dirname", "(", "source_name", ")", ",", "url_path", ")", "# Determine the hashed name of the target file with the storage backend.", "hashed_url", "=", "self", ".", "_url", "(", "self", ".", "_stored_name", ",", "unquote", "(", "target_name", ")", ",", "force", "=", "True", ",", "hashed_files", "=", "hashed_files", ",", ")", "transformed_url", "=", "'/'", ".", "join", "(", "url_path", ".", "split", "(", "'/'", ")", "[", ":", "-", "1", "]", "+", "hashed_url", ".", "split", "(", "'/'", ")", "[", "-", "1", ":", "]", ")", "# Restore the fragment that was stripped off earlier.", "if", "fragment", ":", "transformed_url", "+=", "(", "'?#'", "if", "'?#'", "in", "url", "else", "'#'", ")", "+", "fragment", "# Return the hashed version to the file", "return", "template", "%", "unquote", "(", "transformed_url", ")", "return", "converter" ]
[ 148, 4 ]
[ 200, 24 ]
python
en
['en', 'error', 'th']
False
HashedFilesMixin.post_process
(self, paths, dry_run=False, **options)
Post process the given dictionary of files (called from collectstatic). Processing is actually two separate operations: 1. renaming files to include a hash of their content for cache-busting, and copying those files to the target storage. 2. adjusting files which contain references to other files so they refer to the cache-busting filenames. If either of these are performed on a file, then that file is considered post-processed.
Post process the given dictionary of files (called from collectstatic).
def post_process(self, paths, dry_run=False, **options): """ Post process the given dictionary of files (called from collectstatic). Processing is actually two separate operations: 1. renaming files to include a hash of their content for cache-busting, and copying those files to the target storage. 2. adjusting files which contain references to other files so they refer to the cache-busting filenames. If either of these are performed on a file, then that file is considered post-processed. """ # don't even dare to process the files if we're in dry run mode if dry_run: return # where to store the new paths hashed_files = {} # build a list of adjustable files adjustable_paths = [ path for path in paths if matches_patterns(path, self._patterns) ] # Do a single pass first. Post-process all files once, then repeat for # adjustable files. for name, hashed_name, processed, _ in self._post_process(paths, adjustable_paths, hashed_files): yield name, hashed_name, processed paths = {path: paths[path] for path in adjustable_paths} for i in range(self.max_post_process_passes): substitutions = False for name, hashed_name, processed, subst in self._post_process(paths, adjustable_paths, hashed_files): yield name, hashed_name, processed substitutions = substitutions or subst if not substitutions: break if substitutions: yield 'All', None, RuntimeError('Max post-process passes exceeded.') # Store the processed paths self.hashed_files.update(hashed_files)
[ "def", "post_process", "(", "self", ",", "paths", ",", "dry_run", "=", "False", ",", "*", "*", "options", ")", ":", "# don't even dare to process the files if we're in dry run mode", "if", "dry_run", ":", "return", "# where to store the new paths", "hashed_files", "=", "{", "}", "# build a list of adjustable files", "adjustable_paths", "=", "[", "path", "for", "path", "in", "paths", "if", "matches_patterns", "(", "path", ",", "self", ".", "_patterns", ")", "]", "# Do a single pass first. Post-process all files once, then repeat for", "# adjustable files.", "for", "name", ",", "hashed_name", ",", "processed", ",", "_", "in", "self", ".", "_post_process", "(", "paths", ",", "adjustable_paths", ",", "hashed_files", ")", ":", "yield", "name", ",", "hashed_name", ",", "processed", "paths", "=", "{", "path", ":", "paths", "[", "path", "]", "for", "path", "in", "adjustable_paths", "}", "for", "i", "in", "range", "(", "self", ".", "max_post_process_passes", ")", ":", "substitutions", "=", "False", "for", "name", ",", "hashed_name", ",", "processed", ",", "subst", "in", "self", ".", "_post_process", "(", "paths", ",", "adjustable_paths", ",", "hashed_files", ")", ":", "yield", "name", ",", "hashed_name", ",", "processed", "substitutions", "=", "substitutions", "or", "subst", "if", "not", "substitutions", ":", "break", "if", "substitutions", ":", "yield", "'All'", ",", "None", ",", "RuntimeError", "(", "'Max post-process passes exceeded.'", ")", "# Store the processed paths", "self", ".", "hashed_files", ".", "update", "(", "hashed_files", ")" ]
[ 202, 4 ]
[ 248, 46 ]
python
en
['en', 'error', 'th']
False
Command.load_label
(self, fixture_label)
Load fixtures files for a given label.
Load fixtures files for a given label.
def load_label(self, fixture_label): """Load fixtures files for a given label.""" show_progress = self.verbosity >= 3 for fixture_file, fixture_dir, fixture_name in self.find_fixtures(fixture_label): _, ser_fmt, cmp_fmt = self.parse_name(os.path.basename(fixture_file)) open_method, mode = self.compression_formats[cmp_fmt] fixture = open_method(fixture_file, mode) try: self.fixture_count += 1 objects_in_fixture = 0 loaded_objects_in_fixture = 0 if self.verbosity >= 2: self.stdout.write( "Installing %s fixture '%s' from %s." % (ser_fmt, fixture_name, humanize(fixture_dir)) ) objects = serializers.deserialize( ser_fmt, fixture, using=self.using, ignorenonexistent=self.ignore, handle_forward_references=True, ) for obj in objects: objects_in_fixture += 1 if (obj.object._meta.app_config in self.excluded_apps or type(obj.object) in self.excluded_models): continue if router.allow_migrate_model(self.using, obj.object.__class__): loaded_objects_in_fixture += 1 self.models.add(obj.object.__class__) try: obj.save(using=self.using) if show_progress: self.stdout.write( '\rProcessed %i object(s).' % loaded_objects_in_fixture, ending='' ) # psycopg2 raises ValueError if data contains NUL chars. except (DatabaseError, IntegrityError, ValueError) as e: e.args = ("Could not load %(object_label)s(pk=%(pk)s): %(error_msg)s" % { 'object_label': obj.object._meta.label, 'pk': obj.object.pk, 'error_msg': e, },) raise if obj.deferred_fields: self.objs_with_deferred_fields.append(obj) if objects and show_progress: self.stdout.write() # Add a newline after progress indicator. self.loaded_object_count += loaded_objects_in_fixture self.fixture_object_count += objects_in_fixture except Exception as e: if not isinstance(e, CommandError): e.args = ("Problem installing fixture '%s': %s" % (fixture_file, e),) raise finally: fixture.close() # Warn if the fixture we loaded contains 0 objects. if objects_in_fixture == 0: warnings.warn( "No fixture data found for '%s'. (File format may be " "invalid.)" % fixture_name, RuntimeWarning )
[ "def", "load_label", "(", "self", ",", "fixture_label", ")", ":", "show_progress", "=", "self", ".", "verbosity", ">=", "3", "for", "fixture_file", ",", "fixture_dir", ",", "fixture_name", "in", "self", ".", "find_fixtures", "(", "fixture_label", ")", ":", "_", ",", "ser_fmt", ",", "cmp_fmt", "=", "self", ".", "parse_name", "(", "os", ".", "path", ".", "basename", "(", "fixture_file", ")", ")", "open_method", ",", "mode", "=", "self", ".", "compression_formats", "[", "cmp_fmt", "]", "fixture", "=", "open_method", "(", "fixture_file", ",", "mode", ")", "try", ":", "self", ".", "fixture_count", "+=", "1", "objects_in_fixture", "=", "0", "loaded_objects_in_fixture", "=", "0", "if", "self", ".", "verbosity", ">=", "2", ":", "self", ".", "stdout", ".", "write", "(", "\"Installing %s fixture '%s' from %s.\"", "%", "(", "ser_fmt", ",", "fixture_name", ",", "humanize", "(", "fixture_dir", ")", ")", ")", "objects", "=", "serializers", ".", "deserialize", "(", "ser_fmt", ",", "fixture", ",", "using", "=", "self", ".", "using", ",", "ignorenonexistent", "=", "self", ".", "ignore", ",", "handle_forward_references", "=", "True", ",", ")", "for", "obj", "in", "objects", ":", "objects_in_fixture", "+=", "1", "if", "(", "obj", ".", "object", ".", "_meta", ".", "app_config", "in", "self", ".", "excluded_apps", "or", "type", "(", "obj", ".", "object", ")", "in", "self", ".", "excluded_models", ")", ":", "continue", "if", "router", ".", "allow_migrate_model", "(", "self", ".", "using", ",", "obj", ".", "object", ".", "__class__", ")", ":", "loaded_objects_in_fixture", "+=", "1", "self", ".", "models", ".", "add", "(", "obj", ".", "object", ".", "__class__", ")", "try", ":", "obj", ".", "save", "(", "using", "=", "self", ".", "using", ")", "if", "show_progress", ":", "self", ".", "stdout", ".", "write", "(", "'\\rProcessed %i object(s).'", "%", "loaded_objects_in_fixture", ",", "ending", "=", "''", ")", "# psycopg2 raises ValueError if data contains NUL chars.", "except", "(", "DatabaseError", ",", "IntegrityError", ",", "ValueError", ")", "as", "e", ":", "e", ".", "args", "=", "(", "\"Could not load %(object_label)s(pk=%(pk)s): %(error_msg)s\"", "%", "{", "'object_label'", ":", "obj", ".", "object", ".", "_meta", ".", "label", ",", "'pk'", ":", "obj", ".", "object", ".", "pk", ",", "'error_msg'", ":", "e", ",", "}", ",", ")", "raise", "if", "obj", ".", "deferred_fields", ":", "self", ".", "objs_with_deferred_fields", ".", "append", "(", "obj", ")", "if", "objects", "and", "show_progress", ":", "self", ".", "stdout", ".", "write", "(", ")", "# Add a newline after progress indicator.", "self", ".", "loaded_object_count", "+=", "loaded_objects_in_fixture", "self", ".", "fixture_object_count", "+=", "objects_in_fixture", "except", "Exception", "as", "e", ":", "if", "not", "isinstance", "(", "e", ",", "CommandError", ")", ":", "e", ".", "args", "=", "(", "\"Problem installing fixture '%s': %s\"", "%", "(", "fixture_file", ",", "e", ")", ",", ")", "raise", "finally", ":", "fixture", ".", "close", "(", ")", "# Warn if the fixture we loaded contains 0 objects.", "if", "objects_in_fixture", "==", "0", ":", "warnings", ".", "warn", "(", "\"No fixture data found for '%s'. (File format may be \"", "\"invalid.)\"", "%", "fixture_name", ",", "RuntimeWarning", ")" ]
[ 158, 4 ]
[ 222, 17 ]
python
en
['en', 'en', 'en']
True
Command.find_fixtures
(self, fixture_label)
Find fixture files for a given label.
Find fixture files for a given label.
def find_fixtures(self, fixture_label): """Find fixture files for a given label.""" if fixture_label == READ_STDIN: return [(READ_STDIN, None, READ_STDIN)] fixture_name, ser_fmt, cmp_fmt = self.parse_name(fixture_label) databases = [self.using, None] cmp_fmts = list(self.compression_formats) if cmp_fmt is None else [cmp_fmt] ser_fmts = self.serialization_formats if ser_fmt is None else [ser_fmt] if self.verbosity >= 2: self.stdout.write("Loading '%s' fixtures..." % fixture_name) if os.path.isabs(fixture_name): fixture_dirs = [os.path.dirname(fixture_name)] fixture_name = os.path.basename(fixture_name) else: fixture_dirs = self.fixture_dirs if os.path.sep in os.path.normpath(fixture_name): fixture_dirs = [os.path.join(dir_, os.path.dirname(fixture_name)) for dir_ in fixture_dirs] fixture_name = os.path.basename(fixture_name) suffixes = ( '.'.join(ext for ext in combo if ext) for combo in product(databases, ser_fmts, cmp_fmts) ) targets = {'.'.join((fixture_name, suffix)) for suffix in suffixes} fixture_files = [] for fixture_dir in fixture_dirs: if self.verbosity >= 2: self.stdout.write("Checking %s for fixtures..." % humanize(fixture_dir)) fixture_files_in_dir = [] path = os.path.join(fixture_dir, fixture_name) for candidate in glob.iglob(glob.escape(path) + '*'): if os.path.basename(candidate) in targets: # Save the fixture_dir and fixture_name for future error messages. fixture_files_in_dir.append((candidate, fixture_dir, fixture_name)) if self.verbosity >= 2 and not fixture_files_in_dir: self.stdout.write("No fixture '%s' in %s." % (fixture_name, humanize(fixture_dir))) # Check kept for backwards-compatibility; it isn't clear why # duplicates are only allowed in different directories. if len(fixture_files_in_dir) > 1: raise CommandError( "Multiple fixtures named '%s' in %s. Aborting." % (fixture_name, humanize(fixture_dir))) fixture_files.extend(fixture_files_in_dir) if not fixture_files: raise CommandError("No fixture named '%s' found." % fixture_name) return fixture_files
[ "def", "find_fixtures", "(", "self", ",", "fixture_label", ")", ":", "if", "fixture_label", "==", "READ_STDIN", ":", "return", "[", "(", "READ_STDIN", ",", "None", ",", "READ_STDIN", ")", "]", "fixture_name", ",", "ser_fmt", ",", "cmp_fmt", "=", "self", ".", "parse_name", "(", "fixture_label", ")", "databases", "=", "[", "self", ".", "using", ",", "None", "]", "cmp_fmts", "=", "list", "(", "self", ".", "compression_formats", ")", "if", "cmp_fmt", "is", "None", "else", "[", "cmp_fmt", "]", "ser_fmts", "=", "self", ".", "serialization_formats", "if", "ser_fmt", "is", "None", "else", "[", "ser_fmt", "]", "if", "self", ".", "verbosity", ">=", "2", ":", "self", ".", "stdout", ".", "write", "(", "\"Loading '%s' fixtures...\"", "%", "fixture_name", ")", "if", "os", ".", "path", ".", "isabs", "(", "fixture_name", ")", ":", "fixture_dirs", "=", "[", "os", ".", "path", ".", "dirname", "(", "fixture_name", ")", "]", "fixture_name", "=", "os", ".", "path", ".", "basename", "(", "fixture_name", ")", "else", ":", "fixture_dirs", "=", "self", ".", "fixture_dirs", "if", "os", ".", "path", ".", "sep", "in", "os", ".", "path", ".", "normpath", "(", "fixture_name", ")", ":", "fixture_dirs", "=", "[", "os", ".", "path", ".", "join", "(", "dir_", ",", "os", ".", "path", ".", "dirname", "(", "fixture_name", ")", ")", "for", "dir_", "in", "fixture_dirs", "]", "fixture_name", "=", "os", ".", "path", ".", "basename", "(", "fixture_name", ")", "suffixes", "=", "(", "'.'", ".", "join", "(", "ext", "for", "ext", "in", "combo", "if", "ext", ")", "for", "combo", "in", "product", "(", "databases", ",", "ser_fmts", ",", "cmp_fmts", ")", ")", "targets", "=", "{", "'.'", ".", "join", "(", "(", "fixture_name", ",", "suffix", ")", ")", "for", "suffix", "in", "suffixes", "}", "fixture_files", "=", "[", "]", "for", "fixture_dir", "in", "fixture_dirs", ":", "if", "self", ".", "verbosity", ">=", "2", ":", "self", ".", "stdout", ".", "write", "(", "\"Checking %s for fixtures...\"", "%", "humanize", "(", "fixture_dir", ")", ")", "fixture_files_in_dir", "=", "[", "]", "path", "=", "os", ".", "path", ".", "join", "(", "fixture_dir", ",", "fixture_name", ")", "for", "candidate", "in", "glob", ".", "iglob", "(", "glob", ".", "escape", "(", "path", ")", "+", "'*'", ")", ":", "if", "os", ".", "path", ".", "basename", "(", "candidate", ")", "in", "targets", ":", "# Save the fixture_dir and fixture_name for future error messages.", "fixture_files_in_dir", ".", "append", "(", "(", "candidate", ",", "fixture_dir", ",", "fixture_name", ")", ")", "if", "self", ".", "verbosity", ">=", "2", "and", "not", "fixture_files_in_dir", ":", "self", ".", "stdout", ".", "write", "(", "\"No fixture '%s' in %s.\"", "%", "(", "fixture_name", ",", "humanize", "(", "fixture_dir", ")", ")", ")", "# Check kept for backwards-compatibility; it isn't clear why", "# duplicates are only allowed in different directories.", "if", "len", "(", "fixture_files_in_dir", ")", ">", "1", ":", "raise", "CommandError", "(", "\"Multiple fixtures named '%s' in %s. Aborting.\"", "%", "(", "fixture_name", ",", "humanize", "(", "fixture_dir", ")", ")", ")", "fixture_files", ".", "extend", "(", "fixture_files_in_dir", ")", "if", "not", "fixture_files", ":", "raise", "CommandError", "(", "\"No fixture named '%s' found.\"", "%", "fixture_name", ")", "return", "fixture_files" ]
[ 225, 4 ]
[ 280, 28 ]
python
en
['en', 'en', 'en']
True
Command.fixture_dirs
(self)
Return a list of fixture directories. The list contains the 'fixtures' subdirectory of each installed application, if it exists, the directories in FIXTURE_DIRS, and the current directory.
Return a list of fixture directories.
def fixture_dirs(self): """ Return a list of fixture directories. The list contains the 'fixtures' subdirectory of each installed application, if it exists, the directories in FIXTURE_DIRS, and the current directory. """ dirs = [] fixture_dirs = settings.FIXTURE_DIRS if len(fixture_dirs) != len(set(fixture_dirs)): raise ImproperlyConfigured("settings.FIXTURE_DIRS contains duplicates.") for app_config in apps.get_app_configs(): app_label = app_config.label app_dir = os.path.join(app_config.path, 'fixtures') if app_dir in fixture_dirs: raise ImproperlyConfigured( "'%s' is a default fixture directory for the '%s' app " "and cannot be listed in settings.FIXTURE_DIRS." % (app_dir, app_label) ) if self.app_label and app_label != self.app_label: continue if os.path.isdir(app_dir): dirs.append(app_dir) dirs.extend(fixture_dirs) dirs.append('') return [os.path.realpath(d) for d in dirs]
[ "def", "fixture_dirs", "(", "self", ")", ":", "dirs", "=", "[", "]", "fixture_dirs", "=", "settings", ".", "FIXTURE_DIRS", "if", "len", "(", "fixture_dirs", ")", "!=", "len", "(", "set", "(", "fixture_dirs", ")", ")", ":", "raise", "ImproperlyConfigured", "(", "\"settings.FIXTURE_DIRS contains duplicates.\"", ")", "for", "app_config", "in", "apps", ".", "get_app_configs", "(", ")", ":", "app_label", "=", "app_config", ".", "label", "app_dir", "=", "os", ".", "path", ".", "join", "(", "app_config", ".", "path", ",", "'fixtures'", ")", "if", "app_dir", "in", "fixture_dirs", ":", "raise", "ImproperlyConfigured", "(", "\"'%s' is a default fixture directory for the '%s' app \"", "\"and cannot be listed in settings.FIXTURE_DIRS.\"", "%", "(", "app_dir", ",", "app_label", ")", ")", "if", "self", ".", "app_label", "and", "app_label", "!=", "self", ".", "app_label", ":", "continue", "if", "os", ".", "path", ".", "isdir", "(", "app_dir", ")", ":", "dirs", ".", "append", "(", "app_dir", ")", "dirs", ".", "extend", "(", "fixture_dirs", ")", "dirs", ".", "append", "(", "''", ")", "return", "[", "os", ".", "path", ".", "realpath", "(", "d", ")", "for", "d", "in", "dirs", "]" ]
[ 283, 4 ]
[ 310, 50 ]
python
en
['en', 'error', 'th']
False
Command.parse_name
(self, fixture_name)
Split fixture name in name, serialization format, compression format.
Split fixture name in name, serialization format, compression format.
def parse_name(self, fixture_name): """ Split fixture name in name, serialization format, compression format. """ if fixture_name == READ_STDIN: if not self.format: raise CommandError('--format must be specified when reading from stdin.') return READ_STDIN, self.format, 'stdin' parts = fixture_name.rsplit('.', 2) if len(parts) > 1 and parts[-1] in self.compression_formats: cmp_fmt = parts[-1] parts = parts[:-1] else: cmp_fmt = None if len(parts) > 1: if parts[-1] in self.serialization_formats: ser_fmt = parts[-1] parts = parts[:-1] else: raise CommandError( "Problem installing fixture '%s': %s is not a known " "serialization format." % ('.'.join(parts[:-1]), parts[-1])) else: ser_fmt = None name = '.'.join(parts) return name, ser_fmt, cmp_fmt
[ "def", "parse_name", "(", "self", ",", "fixture_name", ")", ":", "if", "fixture_name", "==", "READ_STDIN", ":", "if", "not", "self", ".", "format", ":", "raise", "CommandError", "(", "'--format must be specified when reading from stdin.'", ")", "return", "READ_STDIN", ",", "self", ".", "format", ",", "'stdin'", "parts", "=", "fixture_name", ".", "rsplit", "(", "'.'", ",", "2", ")", "if", "len", "(", "parts", ")", ">", "1", "and", "parts", "[", "-", "1", "]", "in", "self", ".", "compression_formats", ":", "cmp_fmt", "=", "parts", "[", "-", "1", "]", "parts", "=", "parts", "[", ":", "-", "1", "]", "else", ":", "cmp_fmt", "=", "None", "if", "len", "(", "parts", ")", ">", "1", ":", "if", "parts", "[", "-", "1", "]", "in", "self", ".", "serialization_formats", ":", "ser_fmt", "=", "parts", "[", "-", "1", "]", "parts", "=", "parts", "[", ":", "-", "1", "]", "else", ":", "raise", "CommandError", "(", "\"Problem installing fixture '%s': %s is not a known \"", "\"serialization format.\"", "%", "(", "'.'", ".", "join", "(", "parts", "[", ":", "-", "1", "]", ")", ",", "parts", "[", "-", "1", "]", ")", ")", "else", ":", "ser_fmt", "=", "None", "name", "=", "'.'", ".", "join", "(", "parts", ")", "return", "name", ",", "ser_fmt", ",", "cmp_fmt" ]
[ 312, 4 ]
[ 342, 37 ]
python
en
['en', 'error', 'th']
False
inject_rename_contenttypes_operations
(plan=None, apps=global_apps, using=DEFAULT_DB_ALIAS, **kwargs)
Insert a `RenameContentType` operation after every planned `RenameModel` operation.
Insert a `RenameContentType` operation after every planned `RenameModel` operation.
def inject_rename_contenttypes_operations(plan=None, apps=global_apps, using=DEFAULT_DB_ALIAS, **kwargs): """ Insert a `RenameContentType` operation after every planned `RenameModel` operation. """ if plan is None: return # Determine whether or not the ContentType model is available. try: ContentType = apps.get_model('contenttypes', 'ContentType') except LookupError: available = False else: if not router.allow_migrate_model(using, ContentType): return available = True for migration, backward in plan: if (migration.app_label, migration.name) == ('contenttypes', '0001_initial'): # There's no point in going forward if the initial contenttypes # migration is unapplied as the ContentType model will be # unavailable from this point. if backward: break else: available = True continue # The ContentType model is not available yet. if not available: continue inserts = [] for index, operation in enumerate(migration.operations): if isinstance(operation, migrations.RenameModel): operation = RenameContentType( migration.app_label, operation.old_name_lower, operation.new_name_lower ) inserts.append((index + 1, operation)) for inserted, (index, operation) in enumerate(inserts): migration.operations.insert(inserted + index, operation)
[ "def", "inject_rename_contenttypes_operations", "(", "plan", "=", "None", ",", "apps", "=", "global_apps", ",", "using", "=", "DEFAULT_DB_ALIAS", ",", "*", "*", "kwargs", ")", ":", "if", "plan", "is", "None", ":", "return", "# Determine whether or not the ContentType model is available.", "try", ":", "ContentType", "=", "apps", ".", "get_model", "(", "'contenttypes'", ",", "'ContentType'", ")", "except", "LookupError", ":", "available", "=", "False", "else", ":", "if", "not", "router", ".", "allow_migrate_model", "(", "using", ",", "ContentType", ")", ":", "return", "available", "=", "True", "for", "migration", ",", "backward", "in", "plan", ":", "if", "(", "migration", ".", "app_label", ",", "migration", ".", "name", ")", "==", "(", "'contenttypes'", ",", "'0001_initial'", ")", ":", "# There's no point in going forward if the initial contenttypes", "# migration is unapplied as the ContentType model will be", "# unavailable from this point.", "if", "backward", ":", "break", "else", ":", "available", "=", "True", "continue", "# The ContentType model is not available yet.", "if", "not", "available", ":", "continue", "inserts", "=", "[", "]", "for", "index", ",", "operation", "in", "enumerate", "(", "migration", ".", "operations", ")", ":", "if", "isinstance", "(", "operation", ",", "migrations", ".", "RenameModel", ")", ":", "operation", "=", "RenameContentType", "(", "migration", ".", "app_label", ",", "operation", ".", "old_name_lower", ",", "operation", ".", "new_name_lower", ")", "inserts", ".", "append", "(", "(", "index", "+", "1", ",", "operation", ")", ")", "for", "inserted", ",", "(", "index", ",", "operation", ")", "in", "enumerate", "(", "inserts", ")", ":", "migration", ".", "operations", ".", "insert", "(", "inserted", "+", "index", ",", "operation", ")" ]
[ 45, 0 ]
[ 84, 68 ]
python
en
['en', 'error', 'th']
False
create_contenttypes
(app_config, verbosity=2, interactive=True, using=DEFAULT_DB_ALIAS, apps=global_apps, **kwargs)
Create content types for models in the given app.
Create content types for models in the given app.
def create_contenttypes(app_config, verbosity=2, interactive=True, using=DEFAULT_DB_ALIAS, apps=global_apps, **kwargs): """ Create content types for models in the given app. """ if not app_config.models_module: return app_label = app_config.label try: app_config = apps.get_app_config(app_label) ContentType = apps.get_model('contenttypes', 'ContentType') except LookupError: return content_types, app_models = get_contenttypes_and_models(app_config, using, ContentType) if not app_models: return cts = [ ContentType( app_label=app_label, model=model_name, ) for (model_name, model) in app_models.items() if model_name not in content_types ] ContentType.objects.using(using).bulk_create(cts) if verbosity >= 2: for ct in cts: print("Adding content type '%s | %s'" % (ct.app_label, ct.model))
[ "def", "create_contenttypes", "(", "app_config", ",", "verbosity", "=", "2", ",", "interactive", "=", "True", ",", "using", "=", "DEFAULT_DB_ALIAS", ",", "apps", "=", "global_apps", ",", "*", "*", "kwargs", ")", ":", "if", "not", "app_config", ".", "models_module", ":", "return", "app_label", "=", "app_config", ".", "label", "try", ":", "app_config", "=", "apps", ".", "get_app_config", "(", "app_label", ")", "ContentType", "=", "apps", ".", "get_model", "(", "'contenttypes'", ",", "'ContentType'", ")", "except", "LookupError", ":", "return", "content_types", ",", "app_models", "=", "get_contenttypes_and_models", "(", "app_config", ",", "using", ",", "ContentType", ")", "if", "not", "app_models", ":", "return", "cts", "=", "[", "ContentType", "(", "app_label", "=", "app_label", ",", "model", "=", "model_name", ",", ")", "for", "(", "model_name", ",", "model", ")", "in", "app_models", ".", "items", "(", ")", "if", "model_name", "not", "in", "content_types", "]", "ContentType", ".", "objects", ".", "using", "(", "using", ")", ".", "bulk_create", "(", "cts", ")", "if", "verbosity", ">=", "2", ":", "for", "ct", "in", "cts", ":", "print", "(", "\"Adding content type '%s | %s'\"", "%", "(", "ct", ".", "app_label", ",", "ct", ".", "model", ")", ")" ]
[ 104, 0 ]
[ 134, 77 ]
python
en
['en', 'error', 'th']
False
EmailBackend._get_filename
(self)
Return a unique file name.
Return a unique file name.
def _get_filename(self): """Return a unique file name.""" if self._fname is None: timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S") fname = "%s-%s.log" % (timestamp, abs(id(self))) self._fname = os.path.join(self.file_path, fname) return self._fname
[ "def", "_get_filename", "(", "self", ")", ":", "if", "self", ".", "_fname", "is", "None", ":", "timestamp", "=", "datetime", ".", "datetime", ".", "now", "(", ")", ".", "strftime", "(", "\"%Y%m%d-%H%M%S\"", ")", "fname", "=", "\"%s-%s.log\"", "%", "(", "timestamp", ",", "abs", "(", "id", "(", "self", ")", ")", ")", "self", ".", "_fname", "=", "os", ".", "path", ".", "join", "(", "self", ".", "file_path", ",", "fname", ")", "return", "self", ".", "_fname" ]
[ 44, 4 ]
[ 50, 26 ]
python
en
['fr', 'it', 'en']
False
is_known_charset
(charset)
Checks if the given charset is known to Python.
Checks if the given charset is known to Python.
def is_known_charset(charset): """Checks if the given charset is known to Python.""" try: codecs.lookup(charset) except LookupError: return False return True
[ "def", "is_known_charset", "(", "charset", ")", ":", "try", ":", "codecs", ".", "lookup", "(", "charset", ")", "except", "LookupError", ":", "return", "False", "return", "True" ]
[ 33, 0 ]
[ 39, 15 ]
python
en
['en', 'en', 'en']
True
ProtobufRequestMixin.parse_protobuf
(self, proto_type)
Parse the data into an instance of proto_type.
Parse the data into an instance of proto_type.
def parse_protobuf(self, proto_type): """Parse the data into an instance of proto_type.""" warnings.warn( "'werkzeug.contrib.wrappers.ProtobufRequestMixin' is" " deprecated as of version 0.15 and will be removed in" " version 1.0.", DeprecationWarning, stacklevel=2, ) if "protobuf" not in self.environ.get("CONTENT_TYPE", ""): raise BadRequest("Not a Protobuf request") obj = proto_type() try: obj.ParseFromString(self.data) except Exception: raise BadRequest("Unable to parse Protobuf request") # Fail if not all required fields are set if self.protobuf_check_initialization and not obj.IsInitialized(): raise BadRequest("Partial Protobuf request") return obj
[ "def", "parse_protobuf", "(", "self", ",", "proto_type", ")", ":", "warnings", ".", "warn", "(", "\"'werkzeug.contrib.wrappers.ProtobufRequestMixin' is\"", "\" deprecated as of version 0.15 and will be removed in\"", "\" version 1.0.\"", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ",", ")", "if", "\"protobuf\"", "not", "in", "self", ".", "environ", ".", "get", "(", "\"CONTENT_TYPE\"", ",", "\"\"", ")", ":", "raise", "BadRequest", "(", "\"Not a Protobuf request\"", ")", "obj", "=", "proto_type", "(", ")", "try", ":", "obj", ".", "ParseFromString", "(", "self", ".", "data", ")", "except", "Exception", ":", "raise", "BadRequest", "(", "\"Unable to parse Protobuf request\"", ")", "# Fail if not all required fields are set", "if", "self", ".", "protobuf_check_initialization", "and", "not", "obj", ".", "IsInitialized", "(", ")", ":", "raise", "BadRequest", "(", "\"Partial Protobuf request\"", ")", "return", "obj" ]
[ 81, 4 ]
[ 103, 18 ]
python
en
['en', 'en', 'en']
True
ReverseSlashBehaviorRequestMixin.path
(self)
Requested path as unicode. This works a bit like the regular path info in the WSGI environment but will not include a leading slash.
Requested path as unicode. This works a bit like the regular path info in the WSGI environment but will not include a leading slash.
def path(self): """Requested path as unicode. This works a bit like the regular path info in the WSGI environment but will not include a leading slash. """ warnings.warn( "'werkzeug.contrib.wrappers.ReverseSlashBehaviorRequestMixin'" " is deprecated as of version 0.15 and will be removed in" " version 1.0.", DeprecationWarning, stacklevel=2, ) path = wsgi_decoding_dance( self.environ.get("PATH_INFO") or "", self.charset, self.encoding_errors ) return path.lstrip("/")
[ "def", "path", "(", "self", ")", ":", "warnings", ".", "warn", "(", "\"'werkzeug.contrib.wrappers.ReverseSlashBehaviorRequestMixin'\"", "\" is deprecated as of version 0.15 and will be removed in\"", "\" version 1.0.\"", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ",", ")", "path", "=", "wsgi_decoding_dance", "(", "self", ".", "environ", ".", "get", "(", "\"PATH_INFO\"", ")", "or", "\"\"", ",", "self", ".", "charset", ",", "self", ".", "encoding_errors", ")", "return", "path", ".", "lstrip", "(", "\"/\"", ")" ]
[ 221, 4 ]
[ 235, 31 ]
python
en
['en', 'en', 'en']
True
ReverseSlashBehaviorRequestMixin.script_root
(self)
The root path of the script includling a trailing slash.
The root path of the script includling a trailing slash.
def script_root(self): """The root path of the script includling a trailing slash.""" warnings.warn( "'werkzeug.contrib.wrappers.ReverseSlashBehaviorRequestMixin'" " is deprecated as of version 0.15 and will be removed in" " version 1.0.", DeprecationWarning, stacklevel=2, ) path = wsgi_decoding_dance( self.environ.get("SCRIPT_NAME") or "", self.charset, self.encoding_errors ) return path.rstrip("/") + "/"
[ "def", "script_root", "(", "self", ")", ":", "warnings", ".", "warn", "(", "\"'werkzeug.contrib.wrappers.ReverseSlashBehaviorRequestMixin'\"", "\" is deprecated as of version 0.15 and will be removed in\"", "\" version 1.0.\"", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ",", ")", "path", "=", "wsgi_decoding_dance", "(", "self", ".", "environ", ".", "get", "(", "\"SCRIPT_NAME\"", ")", "or", "\"\"", ",", "self", ".", "charset", ",", "self", ".", "encoding_errors", ")", "return", "path", ".", "rstrip", "(", "\"/\"", ")", "+", "\"/\"" ]
[ 238, 4 ]
[ 250, 37 ]
python
en
['en', 'en', 'en']
True
DynamicCharsetRequestMixin.unknown_charset
(self, charset)
Called if a charset was provided but is not supported by the Python codecs module. By default latin1 is assumed then to not lose any information, you may override this method to change the behavior. :param charset: the charset that was not found. :return: the replacement charset.
Called if a charset was provided but is not supported by the Python codecs module. By default latin1 is assumed then to not lose any information, you may override this method to change the behavior.
def unknown_charset(self, charset): """Called if a charset was provided but is not supported by the Python codecs module. By default latin1 is assumed then to not lose any information, you may override this method to change the behavior. :param charset: the charset that was not found. :return: the replacement charset. """ return "latin1"
[ "def", "unknown_charset", "(", "self", ",", "charset", ")", ":", "return", "\"latin1\"" ]
[ 288, 4 ]
[ 297, 23 ]
python
en
['en', 'en', 'en']
True
DynamicCharsetRequestMixin.charset
(self)
The charset from the content type.
The charset from the content type.
def charset(self): """The charset from the content type.""" warnings.warn( "'werkzeug.contrib.wrappers.DynamicCharsetRequestMixin'" " is deprecated as of version 0.15 and will be removed in" " version 1.0.", DeprecationWarning, stacklevel=2, ) header = self.environ.get("CONTENT_TYPE") if header: ct, options = parse_options_header(header) charset = options.get("charset") if charset: if is_known_charset(charset): return charset return self.unknown_charset(charset) return self.default_charset
[ "def", "charset", "(", "self", ")", ":", "warnings", ".", "warn", "(", "\"'werkzeug.contrib.wrappers.DynamicCharsetRequestMixin'\"", "\" is deprecated as of version 0.15 and will be removed in\"", "\" version 1.0.\"", ",", "DeprecationWarning", ",", "stacklevel", "=", "2", ",", ")", "header", "=", "self", ".", "environ", ".", "get", "(", "\"CONTENT_TYPE\"", ")", "if", "header", ":", "ct", ",", "options", "=", "parse_options_header", "(", "header", ")", "charset", "=", "options", ".", "get", "(", "\"charset\"", ")", "if", "charset", ":", "if", "is_known_charset", "(", "charset", ")", ":", "return", "charset", "return", "self", ".", "unknown_charset", "(", "charset", ")", "return", "self", ".", "default_charset" ]
[ 300, 4 ]
[ 317, 35 ]
python
en
['en', 'en', 'en']
True
_WKBReader.read
(self, wkb)
Return a _pointer_ to C GEOS Geometry object from the given WKB.
Return a _pointer_ to C GEOS Geometry object from the given WKB.
def read(self, wkb): "Return a _pointer_ to C GEOS Geometry object from the given WKB." if isinstance(wkb, memoryview): wkb_s = bytes(wkb) return wkb_reader_read(self.ptr, wkb_s, len(wkb_s)) elif isinstance(wkb, (bytes, str)): return wkb_reader_read_hex(self.ptr, wkb, len(wkb)) else: raise TypeError
[ "def", "read", "(", "self", ",", "wkb", ")", ":", "if", "isinstance", "(", "wkb", ",", "memoryview", ")", ":", "wkb_s", "=", "bytes", "(", "wkb", ")", "return", "wkb_reader_read", "(", "self", ".", "ptr", ",", "wkb_s", ",", "len", "(", "wkb_s", ")", ")", "elif", "isinstance", "(", "wkb", ",", "(", "bytes", ",", "str", ")", ")", ":", "return", "wkb_reader_read_hex", "(", "self", ".", "ptr", ",", "wkb", ",", "len", "(", "wkb", ")", ")", "else", ":", "raise", "TypeError" ]
[ 146, 4 ]
[ 154, 27 ]
python
en
['en', 'en', 'en']
True
WKTWriter.write
(self, geom)
Return the WKT representation of the given geometry.
Return the WKT representation of the given geometry.
def write(self, geom): "Return the WKT representation of the given geometry." return wkt_writer_write(self.ptr, geom.ptr)
[ "def", "write", "(", "self", ",", "geom", ")", ":", "return", "wkt_writer_write", "(", "self", ".", "ptr", ",", "geom", ".", "ptr", ")" ]
[ 174, 4 ]
[ 176, 51 ]
python
en
['en', 'en', 'en']
True
WKBWriter.write
(self, geom)
Return the WKB representation of the given geometry.
Return the WKB representation of the given geometry.
def write(self, geom): "Return the WKB representation of the given geometry." from django.contrib.gis.geos import Polygon geom = self._handle_empty_point(geom) wkb = wkb_writer_write(self.ptr, geom.ptr, byref(c_size_t())) if self.geos_version < (3, 6, 1) and isinstance(geom, Polygon) and geom.empty: # Fix GEOS output for empty polygon. # See https://trac.osgeo.org/geos/ticket/680. wkb = wkb[:-8] + b'\0' * 4 return memoryview(wkb)
[ "def", "write", "(", "self", ",", "geom", ")", ":", "from", "django", ".", "contrib", ".", "gis", ".", "geos", "import", "Polygon", "geom", "=", "self", ".", "_handle_empty_point", "(", "geom", ")", "wkb", "=", "wkb_writer_write", "(", "self", ".", "ptr", ",", "geom", ".", "ptr", ",", "byref", "(", "c_size_t", "(", ")", ")", ")", "if", "self", ".", "geos_version", "<", "(", "3", ",", "6", ",", "1", ")", "and", "isinstance", "(", "geom", ",", "Polygon", ")", "and", "geom", ".", "empty", ":", "# Fix GEOS output for empty polygon.", "# See https://trac.osgeo.org/geos/ticket/680.", "wkb", "=", "wkb", "[", ":", "-", "8", "]", "+", "b'\\0'", "*", "4", "return", "memoryview", "(", "wkb", ")" ]
[ 233, 4 ]
[ 242, 30 ]
python
en
['en', 'en', 'en']
True
WKBWriter.write_hex
(self, geom)
Return the HEXEWKB representation of the given geometry.
Return the HEXEWKB representation of the given geometry.
def write_hex(self, geom): "Return the HEXEWKB representation of the given geometry." from django.contrib.gis.geos.polygon import Polygon geom = self._handle_empty_point(geom) wkb = wkb_writer_write_hex(self.ptr, geom.ptr, byref(c_size_t())) if self.geos_version < (3, 6, 1) and isinstance(geom, Polygon) and geom.empty: wkb = wkb[:-16] + b'0' * 8 return wkb
[ "def", "write_hex", "(", "self", ",", "geom", ")", ":", "from", "django", ".", "contrib", ".", "gis", ".", "geos", ".", "polygon", "import", "Polygon", "geom", "=", "self", ".", "_handle_empty_point", "(", "geom", ")", "wkb", "=", "wkb_writer_write_hex", "(", "self", ".", "ptr", ",", "geom", ".", "ptr", ",", "byref", "(", "c_size_t", "(", ")", ")", ")", "if", "self", ".", "geos_version", "<", "(", "3", ",", "6", ",", "1", ")", "and", "isinstance", "(", "geom", ",", "Polygon", ")", "and", "geom", ".", "empty", ":", "wkb", "=", "wkb", "[", ":", "-", "16", "]", "+", "b'0'", "*", "8", "return", "wkb" ]
[ 244, 4 ]
[ 251, 18 ]
python
en
['en', 'en', 'en']
True
Local._get_context_id
(self)
Get the ID we should use for looking up variables
Get the ID we should use for looking up variables
def _get_context_id(self): """ Get the ID we should use for looking up variables """ # Prevent a circular reference from .sync import AsyncToSync, SyncToAsync # First, pull the current task if we can context_id = SyncToAsync.get_current_task() context_is_async = True # OK, let's try for a thread ID if context_id is None: context_id = threading.current_thread() context_is_async = False # If we're thread-critical, we stop here, as we can't share contexts. if self._thread_critical: return context_id # Now, take those and see if we can resolve them through the launch maps for i in range(sys.getrecursionlimit()): try: if context_is_async: # Tasks have a source thread in AsyncToSync context_id = AsyncToSync.launch_map[context_id] context_is_async = False else: # Threads have a source task in SyncToAsync context_id = SyncToAsync.launch_map[context_id] context_is_async = True except KeyError: break else: # Catch infinite loops (they happen if you are screwing around # with AsyncToSync implementations) raise RuntimeError("Infinite launch_map loops") return context_id
[ "def", "_get_context_id", "(", "self", ")", ":", "# Prevent a circular reference", "from", ".", "sync", "import", "AsyncToSync", ",", "SyncToAsync", "# First, pull the current task if we can", "context_id", "=", "SyncToAsync", ".", "get_current_task", "(", ")", "context_is_async", "=", "True", "# OK, let's try for a thread ID", "if", "context_id", "is", "None", ":", "context_id", "=", "threading", ".", "current_thread", "(", ")", "context_is_async", "=", "False", "# If we're thread-critical, we stop here, as we can't share contexts.", "if", "self", ".", "_thread_critical", ":", "return", "context_id", "# Now, take those and see if we can resolve them through the launch maps", "for", "i", "in", "range", "(", "sys", ".", "getrecursionlimit", "(", ")", ")", ":", "try", ":", "if", "context_is_async", ":", "# Tasks have a source thread in AsyncToSync", "context_id", "=", "AsyncToSync", ".", "launch_map", "[", "context_id", "]", "context_is_async", "=", "False", "else", ":", "# Threads have a source task in SyncToAsync", "context_id", "=", "SyncToAsync", ".", "launch_map", "[", "context_id", "]", "context_is_async", "=", "True", "except", "KeyError", ":", "break", "else", ":", "# Catch infinite loops (they happen if you are screwing around", "# with AsyncToSync implementations)", "raise", "RuntimeError", "(", "\"Infinite launch_map loops\"", ")", "return", "context_id" ]
[ 45, 4 ]
[ 79, 25 ]
python
en
['en', 'error', 'th']
False
BaseEvaluation.__init__
(self, sep='\t', metrics=None, all_but_one_eval=False, verbose=True, as_table=False, table_sep='\t')
Class to be base for evaluation strategies :param sep: Delimiter for input files :type sep: str, default '\t' :param metrics: List of evaluation metrics :type metrics: list, default None :param all_but_one_eval: If True, considers only one pair (u, i) from the test set to evaluate the ranking :type all_but_one_eval: bool, default False :param verbose: Print the evaluation results :type verbose: bool, default True :param as_table: Print the evaluation results as table (only work with verbose=True) :type as_table: bool, default False :param table_sep: Delimiter for print results (only work with verbose=True and as_table=True) :type table_sep: str, default '\t'
Class to be base for evaluation strategies
def __init__(self, sep='\t', metrics=None, all_but_one_eval=False, verbose=True, as_table=False, table_sep='\t'): """ Class to be base for evaluation strategies :param sep: Delimiter for input files :type sep: str, default '\t' :param metrics: List of evaluation metrics :type metrics: list, default None :param all_but_one_eval: If True, considers only one pair (u, i) from the test set to evaluate the ranking :type all_but_one_eval: bool, default False :param verbose: Print the evaluation results :type verbose: bool, default True :param as_table: Print the evaluation results as table (only work with verbose=True) :type as_table: bool, default False :param table_sep: Delimiter for print results (only work with verbose=True and as_table=True) :type table_sep: str, default '\t' """ self.sep = sep self.all_but_one_eval = all_but_one_eval self.metrics = metrics self.verbose = verbose self.as_table = as_table self.table_sep = table_sep
[ "def", "__init__", "(", "self", ",", "sep", "=", "'\\t'", ",", "metrics", "=", "None", ",", "all_but_one_eval", "=", "False", ",", "verbose", "=", "True", ",", "as_table", "=", "False", ",", "table_sep", "=", "'\\t'", ")", ":", "self", ".", "sep", "=", "sep", "self", ".", "all_but_one_eval", "=", "all_but_one_eval", "self", ".", "metrics", "=", "metrics", "self", ".", "verbose", "=", "verbose", "self", ".", "as_table", "=", "as_table", "self", ".", "table_sep", "=", "table_sep" ]
[ 20, 4 ]
[ 48, 34 ]
python
en
['en', 'error', 'th']
False
BaseEvaluation.evaluate
(self, predictions, test_set)
Method to be implemented for each strategy using their respective metrics. Use read() in ReadFile to transform your file in a dict :param predictions: Dictionary with ranking information :type predictions: dict :param test_set: Dictionary with test set information. :type test_set: dict
Method to be implemented for each strategy using their respective metrics. Use read() in ReadFile to transform your file in a dict
def evaluate(self, predictions, test_set): """ Method to be implemented for each strategy using their respective metrics. Use read() in ReadFile to transform your file in a dict :param predictions: Dictionary with ranking information :type predictions: dict :param test_set: Dictionary with test set information. :type test_set: dict """ raise NotImplemented
[ "def", "evaluate", "(", "self", ",", "predictions", ",", "test_set", ")", ":", "raise", "NotImplemented" ]
[ 50, 4 ]
[ 62, 28 ]
python
en
['en', 'error', 'th']
False
BaseEvaluation.evaluate_with_files
(self, prediction_file, test_file)
Method to evaluate predictions using files :param prediction_file: Predictions file with at least 2 columns for item recommendation (eg. user item [score (optional)]) and 3 columns for rating prediction (eg. user item rating) :type prediction_file: str :param test_file: Test file :type test_file: str :return: Dictionary with all evaluation metrics and results :rtype: dict
Method to evaluate predictions using files
def evaluate_with_files(self, prediction_file, test_file): """ Method to evaluate predictions using files :param prediction_file: Predictions file with at least 2 columns for item recommendation (eg. user item [score (optional)]) and 3 columns for rating prediction (eg. user item rating) :type prediction_file: str :param test_file: Test file :type test_file: str :return: Dictionary with all evaluation metrics and results :rtype: dict """ predict = ReadFile(prediction_file, sep=self.sep).read() test_set = ReadFile(test_file, sep=self.sep).read() return self.evaluate(predict['feedback'], test_set)
[ "def", "evaluate_with_files", "(", "self", ",", "prediction_file", ",", "test_file", ")", ":", "predict", "=", "ReadFile", "(", "prediction_file", ",", "sep", "=", "self", ".", "sep", ")", ".", "read", "(", ")", "test_set", "=", "ReadFile", "(", "test_file", ",", "sep", "=", "self", ".", "sep", ")", ".", "read", "(", ")", "return", "self", ".", "evaluate", "(", "predict", "[", "'feedback'", "]", ",", "test_set", ")" ]
[ 64, 4 ]
[ 83, 59 ]
python
en
['en', 'error', 'th']
False
BaseEvaluation.evaluate_recommender
(self, predictions, test_set)
Method to evaluate recommender results. This method should be called by item recommender algorithms :param predictions: List with recommender output. e.g. [[user, item, score], [user, item2, score] ...] :type predictions: list :param test_set: Dictionary with test set information. :type test_set: dict :return: Dictionary with all evaluation metrics and results :rtype: dict
Method to evaluate recommender results. This method should be called by item recommender algorithms
def evaluate_recommender(self, predictions, test_set): """ Method to evaluate recommender results. This method should be called by item recommender algorithms :param predictions: List with recommender output. e.g. [[user, item, score], [user, item2, score] ...] :type predictions: list :param test_set: Dictionary with test set information. :type test_set: dict :return: Dictionary with all evaluation metrics and results :rtype: dict """ predictions_dict = {} for sample in predictions: predictions_dict.setdefault(sample[0], {}).update({sample[1]: sample[2]}) return self.evaluate(predictions_dict, test_set)
[ "def", "evaluate_recommender", "(", "self", ",", "predictions", ",", "test_set", ")", ":", "predictions_dict", "=", "{", "}", "for", "sample", "in", "predictions", ":", "predictions_dict", ".", "setdefault", "(", "sample", "[", "0", "]", ",", "{", "}", ")", ".", "update", "(", "{", "sample", "[", "1", "]", ":", "sample", "[", "2", "]", "}", ")", "return", "self", ".", "evaluate", "(", "predictions_dict", ",", "test_set", ")" ]
[ 85, 4 ]
[ 105, 56 ]
python
en
['en', 'error', 'th']
False
BaseEvaluation.evaluate_folds
(self, folds_dir, predictions_file_name, test_file_name, k_folds=10)
Evaluate ranking in a set of folds. The name of folds needs to be integer and start with 0. e.g. Exist a dir '/home/user/folds', in which contains folds 0, 1, ..., 10. :param folds_dir: Directory of folds :type folds_dir: str :param k_folds: Number of folds :type k_folds: int, default 10 :param predictions_file_name: Name of the ranking file :type predictions_file_name: str :param test_file_name: Name of the test file :type test_file_name: str :return: Dictionary with all evaluation metrics and results :rtype: dict
Evaluate ranking in a set of folds. The name of folds needs to be integer and start with 0. e.g. Exist a dir '/home/user/folds', in which contains folds 0, 1, ..., 10.
def evaluate_folds(self, folds_dir, predictions_file_name, test_file_name, k_folds=10): """ Evaluate ranking in a set of folds. The name of folds needs to be integer and start with 0. e.g. Exist a dir '/home/user/folds', in which contains folds 0, 1, ..., 10. :param folds_dir: Directory of folds :type folds_dir: str :param k_folds: Number of folds :type k_folds: int, default 10 :param predictions_file_name: Name of the ranking file :type predictions_file_name: str :param test_file_name: Name of the test file :type test_file_name: str :return: Dictionary with all evaluation metrics and results :rtype: dict """ folds_results = defaultdict() for fold in range(k_folds): predictions_file = folds_dir + str(fold) + '/' + predictions_file_name test_file = folds_dir + str(fold) + '/' + test_file_name for key, value in self.evaluate_with_files(predictions_file, test_file).items(): folds_results[key] = folds_results.get(key, 0) + value folds_results = {k: round(v / k_folds, 6) for k, v in folds_results.items()} if self.verbose: self.print_results(folds_results) return folds_results
[ "def", "evaluate_folds", "(", "self", ",", "folds_dir", ",", "predictions_file_name", ",", "test_file_name", ",", "k_folds", "=", "10", ")", ":", "folds_results", "=", "defaultdict", "(", ")", "for", "fold", "in", "range", "(", "k_folds", ")", ":", "predictions_file", "=", "folds_dir", "+", "str", "(", "fold", ")", "+", "'/'", "+", "predictions_file_name", "test_file", "=", "folds_dir", "+", "str", "(", "fold", ")", "+", "'/'", "+", "test_file_name", "for", "key", ",", "value", "in", "self", ".", "evaluate_with_files", "(", "predictions_file", ",", "test_file", ")", ".", "items", "(", ")", ":", "folds_results", "[", "key", "]", "=", "folds_results", ".", "get", "(", "key", ",", "0", ")", "+", "value", "folds_results", "=", "{", "k", ":", "round", "(", "v", "/", "k_folds", ",", "6", ")", "for", "k", ",", "v", "in", "folds_results", ".", "items", "(", ")", "}", "if", "self", ".", "verbose", ":", "self", ".", "print_results", "(", "folds_results", ")", "return", "folds_results" ]
[ 107, 4 ]
[ 143, 28 ]
python
en
['en', 'error', 'th']
False
BaseEvaluation.print_results
(self, evaluation_results)
Method to print the results :param evaluation_results: Dictionary with results. e.g. {metric: value} :type evaluation_results: dict
Method to print the results
def print_results(self, evaluation_results): """ Method to print the results :param evaluation_results: Dictionary with results. e.g. {metric: value} :type evaluation_results: dict """ if self.as_table: header = '' values = '' for metric in self.metrics: header += metric.upper() + self.table_sep values += str(evaluation_results[metric.upper()]) + self.table_sep print(header) print(values) else: evaluation = 'Eval:: ' for metrics in self.metrics: evaluation += metrics.upper() + ': ' + str(evaluation_results[metrics.upper()]) + ' ' print(evaluation)
[ "def", "print_results", "(", "self", ",", "evaluation_results", ")", ":", "if", "self", ".", "as_table", ":", "header", "=", "''", "values", "=", "''", "for", "metric", "in", "self", ".", "metrics", ":", "header", "+=", "metric", ".", "upper", "(", ")", "+", "self", ".", "table_sep", "values", "+=", "str", "(", "evaluation_results", "[", "metric", ".", "upper", "(", ")", "]", ")", "+", "self", ".", "table_sep", "print", "(", "header", ")", "print", "(", "values", ")", "else", ":", "evaluation", "=", "'Eval:: '", "for", "metrics", "in", "self", ".", "metrics", ":", "evaluation", "+=", "metrics", ".", "upper", "(", ")", "+", "': '", "+", "str", "(", "evaluation_results", "[", "metrics", ".", "upper", "(", ")", "]", ")", "+", "' '", "print", "(", "evaluation", ")" ]
[ 145, 4 ]
[ 167, 29 ]
python
en
['en', 'error', 'th']
False
MostPopular.__init__
(self, train_file=None, test_file=None, output_file=None, sep='\t', output_sep='\t')
Most Popular for Item Recommendation This algorithm predicts a rank for each user using the count of number of feedback of users and items Usage:: >> MostPopular(train, test).compute() :param train_file: File which contains the train set. This file needs to have at least 3 columns (user item feedback_value). :type train_file: str :param test_file: File which contains the test set. This file needs to have at least 3 columns (user item feedback_value). :type test_file: str, default None :param output_file: File with dir to write the final predictions :type output_file: str, default None :param sep: Delimiter for input files :type sep: str, default '\t' :param output_sep: Delimiter for output file :type output_sep: str, default '\t'
Most Popular for Item Recommendation
def __init__(self, train_file=None, test_file=None, output_file=None, sep='\t', output_sep='\t'): """ Most Popular for Item Recommendation This algorithm predicts a rank for each user using the count of number of feedback of users and items Usage:: >> MostPopular(train, test).compute() :param train_file: File which contains the train set. This file needs to have at least 3 columns (user item feedback_value). :type train_file: str :param test_file: File which contains the test set. This file needs to have at least 3 columns (user item feedback_value). :type test_file: str, default None :param output_file: File with dir to write the final predictions :type output_file: str, default None :param sep: Delimiter for input files :type sep: str, default '\t' :param output_sep: Delimiter for output file :type output_sep: str, default '\t' """ super(MostPopular, self).__init__(train_file=train_file, test_file=test_file, output_file=output_file, sep=sep, output_sep=output_sep) self.recommender_name = 'Most Popular'
[ "def", "__init__", "(", "self", ",", "train_file", "=", "None", ",", "test_file", "=", "None", ",", "output_file", "=", "None", ",", "sep", "=", "'\\t'", ",", "output_sep", "=", "'\\t'", ")", ":", "super", "(", "MostPopular", ",", "self", ")", ".", "__init__", "(", "train_file", "=", "train_file", ",", "test_file", "=", "test_file", ",", "output_file", "=", "output_file", ",", "sep", "=", "sep", ",", "output_sep", "=", "output_sep", ")", "self", ".", "recommender_name", "=", "'Most Popular'" ]
[ 19, 4 ]
[ 51, 46 ]
python
en
['en', 'error', 'th']
False
MostPopular.predict
(self)
This method predict final result, building an rank of each user of the train set.
This method predict final result, building an rank of each user of the train set.
def predict(self): """ This method predict final result, building an rank of each user of the train set. """ if self.test_file is not None: for user in self.test_set['users']: for item in self.test_set['feedback'][user]: count_value = 0 feedback_value = 0 for user_v in self.train_set['users_viewed_item'].get(item, []): feedback_value += self.train_set['feedback'][user_v][item] count_value += 1 if feedback_value == 0: feedback_value = np.mean(list(self.train_set['feedback'][user].values())) else: feedback_value /= count_value self.predictions.append((user, item, feedback_value)) else: raise NotImplemented
[ "def", "predict", "(", "self", ")", ":", "if", "self", ".", "test_file", "is", "not", "None", ":", "for", "user", "in", "self", ".", "test_set", "[", "'users'", "]", ":", "for", "item", "in", "self", ".", "test_set", "[", "'feedback'", "]", "[", "user", "]", ":", "count_value", "=", "0", "feedback_value", "=", "0", "for", "user_v", "in", "self", ".", "train_set", "[", "'users_viewed_item'", "]", ".", "get", "(", "item", ",", "[", "]", ")", ":", "feedback_value", "+=", "self", ".", "train_set", "[", "'feedback'", "]", "[", "user_v", "]", "[", "item", "]", "count_value", "+=", "1", "if", "feedback_value", "==", "0", ":", "feedback_value", "=", "np", ".", "mean", "(", "list", "(", "self", ".", "train_set", "[", "'feedback'", "]", "[", "user", "]", ".", "values", "(", ")", ")", ")", "else", ":", "feedback_value", "/=", "count_value", "self", ".", "predictions", ".", "append", "(", "(", "user", ",", "item", ",", "feedback_value", ")", ")", "else", ":", "raise", "NotImplemented" ]
[ 53, 4 ]
[ 77, 32 ]
python
en
['en', 'error', 'th']
False
MostPopular.compute
(self, verbose=True, metrics=None, verbose_evaluation=True, as_table=False, table_sep='\t')
Extends compute method from BaseItemRecommendation. Method to run recommender algorithm :param verbose: Print recommender and database information :type verbose: bool, default True :param metrics: List of evaluation measures :type metrics: list, default None :param verbose_evaluation: Print the evaluation results :type verbose_evaluation: bool, default True :param as_table: Print the evaluation results as table :type as_table: bool, default False :param table_sep: Delimiter for print results (only work with verbose=True and as_table=True) :type table_sep: str, default '\t'
Extends compute method from BaseItemRecommendation. Method to run recommender algorithm
def compute(self, verbose=True, metrics=None, verbose_evaluation=True, as_table=False, table_sep='\t'): """ Extends compute method from BaseItemRecommendation. Method to run recommender algorithm :param verbose: Print recommender and database information :type verbose: bool, default True :param metrics: List of evaluation measures :type metrics: list, default None :param verbose_evaluation: Print the evaluation results :type verbose_evaluation: bool, default True :param as_table: Print the evaluation results as table :type as_table: bool, default False :param table_sep: Delimiter for print results (only work with verbose=True and as_table=True) :type table_sep: str, default '\t' """ super(MostPopular, self).compute(verbose=verbose) if verbose: print("prediction_time:: %4f sec" % timed(self.predict)) print('\n') else: self.predict() self.write_predictions() if self.test_file is not None: self.evaluate(metrics, verbose_evaluation, as_table=as_table, table_sep=table_sep)
[ "def", "compute", "(", "self", ",", "verbose", "=", "True", ",", "metrics", "=", "None", ",", "verbose_evaluation", "=", "True", ",", "as_table", "=", "False", ",", "table_sep", "=", "'\\t'", ")", ":", "super", "(", "MostPopular", ",", "self", ")", ".", "compute", "(", "verbose", "=", "verbose", ")", "if", "verbose", ":", "print", "(", "\"prediction_time:: %4f sec\"", "%", "timed", "(", "self", ".", "predict", ")", ")", "print", "(", "'\\n'", ")", "else", ":", "self", ".", "predict", "(", ")", "self", ".", "write_predictions", "(", ")", "if", "self", ".", "test_file", "is", "not", "None", ":", "self", ".", "evaluate", "(", "metrics", ",", "verbose_evaluation", ",", "as_table", "=", "as_table", ",", "table_sep", "=", "table_sep", ")" ]
[ 79, 4 ]
[ 112, 94 ]
python
en
['en', 'error', 'th']
False
create_generic_related_manager
(superclass, rel)
Factory function to create a manager that subclasses another manager (generally the default manager of a given model) and adds behaviors specific to generic relations.
Factory function to create a manager that subclasses another manager (generally the default manager of a given model) and adds behaviors specific to generic relations.
def create_generic_related_manager(superclass, rel): """ Factory function to create a manager that subclasses another manager (generally the default manager of a given model) and adds behaviors specific to generic relations. """ class GenericRelatedObjectManager(superclass): def __init__(self, instance=None): super().__init__() self.instance = instance self.model = rel.model self.get_content_type = functools.partial( ContentType.objects.db_manager(instance._state.db).get_for_model, for_concrete_model=rel.field.for_concrete_model, ) self.content_type = self.get_content_type(instance) self.content_type_field_name = rel.field.content_type_field_name self.object_id_field_name = rel.field.object_id_field_name self.prefetch_cache_name = rel.field.attname self.pk_val = instance.pk self.core_filters = { '%s__pk' % self.content_type_field_name: self.content_type.id, self.object_id_field_name: self.pk_val, } def __call__(self, *, manager): manager = getattr(self.model, manager) manager_class = create_generic_related_manager(manager.__class__, rel) return manager_class(instance=self.instance) do_not_call_in_templates = True def __str__(self): return repr(self) def _apply_rel_filters(self, queryset): """ Filter the queryset for the instance this manager is bound to. """ db = self._db or router.db_for_read(self.model, instance=self.instance) return queryset.using(db).filter(**self.core_filters) def _remove_prefetched_objects(self): try: self.instance._prefetched_objects_cache.pop(self.prefetch_cache_name) except (AttributeError, KeyError): pass # nothing to clear from cache def get_queryset(self): try: return self.instance._prefetched_objects_cache[self.prefetch_cache_name] except (AttributeError, KeyError): queryset = super().get_queryset() return self._apply_rel_filters(queryset) def get_prefetch_queryset(self, instances, queryset=None): if queryset is None: queryset = super().get_queryset() queryset._add_hints(instance=instances[0]) queryset = queryset.using(queryset._db or self._db) # Group instances by content types. content_type_queries = ( models.Q(**{ '%s__pk' % self.content_type_field_name: content_type_id, '%s__in' % self.object_id_field_name: {obj.pk for obj in objs} }) for content_type_id, objs in itertools.groupby( sorted(instances, key=lambda obj: self.get_content_type(obj).pk), lambda obj: self.get_content_type(obj).pk, ) ) query = functools.reduce(operator.or_, content_type_queries) # We (possibly) need to convert object IDs to the type of the # instances' PK in order to match up instances: object_id_converter = instances[0]._meta.pk.to_python content_type_id_field_name = '%s_id' % self.content_type_field_name return ( queryset.filter(query), lambda relobj: ( object_id_converter(getattr(relobj, self.object_id_field_name)), getattr(relobj, content_type_id_field_name), ), lambda obj: (obj.pk, self.get_content_type(obj).pk), False, self.prefetch_cache_name, False, ) def add(self, *objs, bulk=True): self._remove_prefetched_objects() db = router.db_for_write(self.model, instance=self.instance) def check_and_update_obj(obj): if not isinstance(obj, self.model): raise TypeError("'%s' instance expected, got %r" % ( self.model._meta.object_name, obj )) setattr(obj, self.content_type_field_name, self.content_type) setattr(obj, self.object_id_field_name, self.pk_val) if bulk: pks = [] for obj in objs: if obj._state.adding or obj._state.db != db: raise ValueError( "%r instance isn't saved. Use bulk=False or save " "the object first." % obj ) check_and_update_obj(obj) pks.append(obj.pk) self.model._base_manager.using(db).filter(pk__in=pks).update(**{ self.content_type_field_name: self.content_type, self.object_id_field_name: self.pk_val, }) else: with transaction.atomic(using=db, savepoint=False): for obj in objs: check_and_update_obj(obj) obj.save() add.alters_data = True def remove(self, *objs, bulk=True): if not objs: return self._clear(self.filter(pk__in=[o.pk for o in objs]), bulk) remove.alters_data = True def clear(self, *, bulk=True): self._clear(self, bulk) clear.alters_data = True def _clear(self, queryset, bulk): self._remove_prefetched_objects() db = router.db_for_write(self.model, instance=self.instance) queryset = queryset.using(db) if bulk: # `QuerySet.delete()` creates its own atomic block which # contains the `pre_delete` and `post_delete` signal handlers. queryset.delete() else: with transaction.atomic(using=db, savepoint=False): for obj in queryset: obj.delete() _clear.alters_data = True def set(self, objs, *, bulk=True, clear=False): # Force evaluation of `objs` in case it's a queryset whose value # could be affected by `manager.clear()`. Refs #19816. objs = tuple(objs) db = router.db_for_write(self.model, instance=self.instance) with transaction.atomic(using=db, savepoint=False): if clear: self.clear() self.add(*objs, bulk=bulk) else: old_objs = set(self.using(db).all()) new_objs = [] for obj in objs: if obj in old_objs: old_objs.remove(obj) else: new_objs.append(obj) self.remove(*old_objs) self.add(*new_objs, bulk=bulk) set.alters_data = True def create(self, **kwargs): self._remove_prefetched_objects() kwargs[self.content_type_field_name] = self.content_type kwargs[self.object_id_field_name] = self.pk_val db = router.db_for_write(self.model, instance=self.instance) return super().using(db).create(**kwargs) create.alters_data = True def get_or_create(self, **kwargs): kwargs[self.content_type_field_name] = self.content_type kwargs[self.object_id_field_name] = self.pk_val db = router.db_for_write(self.model, instance=self.instance) return super().using(db).get_or_create(**kwargs) get_or_create.alters_data = True def update_or_create(self, **kwargs): kwargs[self.content_type_field_name] = self.content_type kwargs[self.object_id_field_name] = self.pk_val db = router.db_for_write(self.model, instance=self.instance) return super().using(db).update_or_create(**kwargs) update_or_create.alters_data = True return GenericRelatedObjectManager
[ "def", "create_generic_related_manager", "(", "superclass", ",", "rel", ")", ":", "class", "GenericRelatedObjectManager", "(", "superclass", ")", ":", "def", "__init__", "(", "self", ",", "instance", "=", "None", ")", ":", "super", "(", ")", ".", "__init__", "(", ")", "self", ".", "instance", "=", "instance", "self", ".", "model", "=", "rel", ".", "model", "self", ".", "get_content_type", "=", "functools", ".", "partial", "(", "ContentType", ".", "objects", ".", "db_manager", "(", "instance", ".", "_state", ".", "db", ")", ".", "get_for_model", ",", "for_concrete_model", "=", "rel", ".", "field", ".", "for_concrete_model", ",", ")", "self", ".", "content_type", "=", "self", ".", "get_content_type", "(", "instance", ")", "self", ".", "content_type_field_name", "=", "rel", ".", "field", ".", "content_type_field_name", "self", ".", "object_id_field_name", "=", "rel", ".", "field", ".", "object_id_field_name", "self", ".", "prefetch_cache_name", "=", "rel", ".", "field", ".", "attname", "self", ".", "pk_val", "=", "instance", ".", "pk", "self", ".", "core_filters", "=", "{", "'%s__pk'", "%", "self", ".", "content_type_field_name", ":", "self", ".", "content_type", ".", "id", ",", "self", ".", "object_id_field_name", ":", "self", ".", "pk_val", ",", "}", "def", "__call__", "(", "self", ",", "*", ",", "manager", ")", ":", "manager", "=", "getattr", "(", "self", ".", "model", ",", "manager", ")", "manager_class", "=", "create_generic_related_manager", "(", "manager", ".", "__class__", ",", "rel", ")", "return", "manager_class", "(", "instance", "=", "self", ".", "instance", ")", "do_not_call_in_templates", "=", "True", "def", "__str__", "(", "self", ")", ":", "return", "repr", "(", "self", ")", "def", "_apply_rel_filters", "(", "self", ",", "queryset", ")", ":", "\"\"\"\n Filter the queryset for the instance this manager is bound to.\n \"\"\"", "db", "=", "self", ".", "_db", "or", "router", ".", "db_for_read", "(", "self", ".", "model", ",", "instance", "=", "self", ".", "instance", ")", "return", "queryset", ".", "using", "(", "db", ")", ".", "filter", "(", "*", "*", "self", ".", "core_filters", ")", "def", "_remove_prefetched_objects", "(", "self", ")", ":", "try", ":", "self", ".", "instance", ".", "_prefetched_objects_cache", ".", "pop", "(", "self", ".", "prefetch_cache_name", ")", "except", "(", "AttributeError", ",", "KeyError", ")", ":", "pass", "# nothing to clear from cache", "def", "get_queryset", "(", "self", ")", ":", "try", ":", "return", "self", ".", "instance", ".", "_prefetched_objects_cache", "[", "self", ".", "prefetch_cache_name", "]", "except", "(", "AttributeError", ",", "KeyError", ")", ":", "queryset", "=", "super", "(", ")", ".", "get_queryset", "(", ")", "return", "self", ".", "_apply_rel_filters", "(", "queryset", ")", "def", "get_prefetch_queryset", "(", "self", ",", "instances", ",", "queryset", "=", "None", ")", ":", "if", "queryset", "is", "None", ":", "queryset", "=", "super", "(", ")", ".", "get_queryset", "(", ")", "queryset", ".", "_add_hints", "(", "instance", "=", "instances", "[", "0", "]", ")", "queryset", "=", "queryset", ".", "using", "(", "queryset", ".", "_db", "or", "self", ".", "_db", ")", "# Group instances by content types.", "content_type_queries", "=", "(", "models", ".", "Q", "(", "*", "*", "{", "'%s__pk'", "%", "self", ".", "content_type_field_name", ":", "content_type_id", ",", "'%s__in'", "%", "self", ".", "object_id_field_name", ":", "{", "obj", ".", "pk", "for", "obj", "in", "objs", "}", "}", ")", "for", "content_type_id", ",", "objs", "in", "itertools", ".", "groupby", "(", "sorted", "(", "instances", ",", "key", "=", "lambda", "obj", ":", "self", ".", "get_content_type", "(", "obj", ")", ".", "pk", ")", ",", "lambda", "obj", ":", "self", ".", "get_content_type", "(", "obj", ")", ".", "pk", ",", ")", ")", "query", "=", "functools", ".", "reduce", "(", "operator", ".", "or_", ",", "content_type_queries", ")", "# We (possibly) need to convert object IDs to the type of the", "# instances' PK in order to match up instances:", "object_id_converter", "=", "instances", "[", "0", "]", ".", "_meta", ".", "pk", ".", "to_python", "content_type_id_field_name", "=", "'%s_id'", "%", "self", ".", "content_type_field_name", "return", "(", "queryset", ".", "filter", "(", "query", ")", ",", "lambda", "relobj", ":", "(", "object_id_converter", "(", "getattr", "(", "relobj", ",", "self", ".", "object_id_field_name", ")", ")", ",", "getattr", "(", "relobj", ",", "content_type_id_field_name", ")", ",", ")", ",", "lambda", "obj", ":", "(", "obj", ".", "pk", ",", "self", ".", "get_content_type", "(", "obj", ")", ".", "pk", ")", ",", "False", ",", "self", ".", "prefetch_cache_name", ",", "False", ",", ")", "def", "add", "(", "self", ",", "*", "objs", ",", "bulk", "=", "True", ")", ":", "self", ".", "_remove_prefetched_objects", "(", ")", "db", "=", "router", ".", "db_for_write", "(", "self", ".", "model", ",", "instance", "=", "self", ".", "instance", ")", "def", "check_and_update_obj", "(", "obj", ")", ":", "if", "not", "isinstance", "(", "obj", ",", "self", ".", "model", ")", ":", "raise", "TypeError", "(", "\"'%s' instance expected, got %r\"", "%", "(", "self", ".", "model", ".", "_meta", ".", "object_name", ",", "obj", ")", ")", "setattr", "(", "obj", ",", "self", ".", "content_type_field_name", ",", "self", ".", "content_type", ")", "setattr", "(", "obj", ",", "self", ".", "object_id_field_name", ",", "self", ".", "pk_val", ")", "if", "bulk", ":", "pks", "=", "[", "]", "for", "obj", "in", "objs", ":", "if", "obj", ".", "_state", ".", "adding", "or", "obj", ".", "_state", ".", "db", "!=", "db", ":", "raise", "ValueError", "(", "\"%r instance isn't saved. Use bulk=False or save \"", "\"the object first.\"", "%", "obj", ")", "check_and_update_obj", "(", "obj", ")", "pks", ".", "append", "(", "obj", ".", "pk", ")", "self", ".", "model", ".", "_base_manager", ".", "using", "(", "db", ")", ".", "filter", "(", "pk__in", "=", "pks", ")", ".", "update", "(", "*", "*", "{", "self", ".", "content_type_field_name", ":", "self", ".", "content_type", ",", "self", ".", "object_id_field_name", ":", "self", ".", "pk_val", ",", "}", ")", "else", ":", "with", "transaction", ".", "atomic", "(", "using", "=", "db", ",", "savepoint", "=", "False", ")", ":", "for", "obj", "in", "objs", ":", "check_and_update_obj", "(", "obj", ")", "obj", ".", "save", "(", ")", "add", ".", "alters_data", "=", "True", "def", "remove", "(", "self", ",", "*", "objs", ",", "bulk", "=", "True", ")", ":", "if", "not", "objs", ":", "return", "self", ".", "_clear", "(", "self", ".", "filter", "(", "pk__in", "=", "[", "o", ".", "pk", "for", "o", "in", "objs", "]", ")", ",", "bulk", ")", "remove", ".", "alters_data", "=", "True", "def", "clear", "(", "self", ",", "*", ",", "bulk", "=", "True", ")", ":", "self", ".", "_clear", "(", "self", ",", "bulk", ")", "clear", ".", "alters_data", "=", "True", "def", "_clear", "(", "self", ",", "queryset", ",", "bulk", ")", ":", "self", ".", "_remove_prefetched_objects", "(", ")", "db", "=", "router", ".", "db_for_write", "(", "self", ".", "model", ",", "instance", "=", "self", ".", "instance", ")", "queryset", "=", "queryset", ".", "using", "(", "db", ")", "if", "bulk", ":", "# `QuerySet.delete()` creates its own atomic block which", "# contains the `pre_delete` and `post_delete` signal handlers.", "queryset", ".", "delete", "(", ")", "else", ":", "with", "transaction", ".", "atomic", "(", "using", "=", "db", ",", "savepoint", "=", "False", ")", ":", "for", "obj", "in", "queryset", ":", "obj", ".", "delete", "(", ")", "_clear", ".", "alters_data", "=", "True", "def", "set", "(", "self", ",", "objs", ",", "*", ",", "bulk", "=", "True", ",", "clear", "=", "False", ")", ":", "# Force evaluation of `objs` in case it's a queryset whose value", "# could be affected by `manager.clear()`. Refs #19816.", "objs", "=", "tuple", "(", "objs", ")", "db", "=", "router", ".", "db_for_write", "(", "self", ".", "model", ",", "instance", "=", "self", ".", "instance", ")", "with", "transaction", ".", "atomic", "(", "using", "=", "db", ",", "savepoint", "=", "False", ")", ":", "if", "clear", ":", "self", ".", "clear", "(", ")", "self", ".", "add", "(", "*", "objs", ",", "bulk", "=", "bulk", ")", "else", ":", "old_objs", "=", "set", "(", "self", ".", "using", "(", "db", ")", ".", "all", "(", ")", ")", "new_objs", "=", "[", "]", "for", "obj", "in", "objs", ":", "if", "obj", "in", "old_objs", ":", "old_objs", ".", "remove", "(", "obj", ")", "else", ":", "new_objs", ".", "append", "(", "obj", ")", "self", ".", "remove", "(", "*", "old_objs", ")", "self", ".", "add", "(", "*", "new_objs", ",", "bulk", "=", "bulk", ")", "set", ".", "alters_data", "=", "True", "def", "create", "(", "self", ",", "*", "*", "kwargs", ")", ":", "self", ".", "_remove_prefetched_objects", "(", ")", "kwargs", "[", "self", ".", "content_type_field_name", "]", "=", "self", ".", "content_type", "kwargs", "[", "self", ".", "object_id_field_name", "]", "=", "self", ".", "pk_val", "db", "=", "router", ".", "db_for_write", "(", "self", ".", "model", ",", "instance", "=", "self", ".", "instance", ")", "return", "super", "(", ")", ".", "using", "(", "db", ")", ".", "create", "(", "*", "*", "kwargs", ")", "create", ".", "alters_data", "=", "True", "def", "get_or_create", "(", "self", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "self", ".", "content_type_field_name", "]", "=", "self", ".", "content_type", "kwargs", "[", "self", ".", "object_id_field_name", "]", "=", "self", ".", "pk_val", "db", "=", "router", ".", "db_for_write", "(", "self", ".", "model", ",", "instance", "=", "self", ".", "instance", ")", "return", "super", "(", ")", ".", "using", "(", "db", ")", ".", "get_or_create", "(", "*", "*", "kwargs", ")", "get_or_create", ".", "alters_data", "=", "True", "def", "update_or_create", "(", "self", ",", "*", "*", "kwargs", ")", ":", "kwargs", "[", "self", ".", "content_type_field_name", "]", "=", "self", ".", "content_type", "kwargs", "[", "self", ".", "object_id_field_name", "]", "=", "self", ".", "pk_val", "db", "=", "router", ".", "db_for_write", "(", "self", ".", "model", ",", "instance", "=", "self", ".", "instance", ")", "return", "super", "(", ")", ".", "using", "(", "db", ")", ".", "update_or_create", "(", "*", "*", "kwargs", ")", "update_or_create", ".", "alters_data", "=", "True", "return", "GenericRelatedObjectManager" ]
[ 507, 0 ]
[ 702, 38 ]
python
en
['en', 'error', 'th']
False
GenericForeignKey.get_filter_kwargs_for_object
(self, obj)
See corresponding method on Field
See corresponding method on Field
def get_filter_kwargs_for_object(self, obj): """See corresponding method on Field""" return { self.fk_field: getattr(obj, self.fk_field), self.ct_field: getattr(obj, self.ct_field), }
[ "def", "get_filter_kwargs_for_object", "(", "self", ",", "obj", ")", ":", "return", "{", "self", ".", "fk_field", ":", "getattr", "(", "obj", ",", "self", ".", "fk_field", ")", ",", "self", ".", "ct_field", ":", "getattr", "(", "obj", ",", "self", ".", "ct_field", ")", ",", "}" ]
[ 56, 4 ]
[ 61, 9 ]
python
en
['en', 'af', 'en']
True
GenericForeignKey.get_forward_related_filter
(self, obj)
See corresponding method on RelatedField
See corresponding method on RelatedField
def get_forward_related_filter(self, obj): """See corresponding method on RelatedField""" return { self.fk_field: obj.pk, self.ct_field: ContentType.objects.get_for_model(obj).pk, }
[ "def", "get_forward_related_filter", "(", "self", ",", "obj", ")", ":", "return", "{", "self", ".", "fk_field", ":", "obj", ".", "pk", ",", "self", ".", "ct_field", ":", "ContentType", ".", "objects", ".", "get_for_model", "(", "obj", ")", ".", "pk", ",", "}" ]
[ 63, 4 ]
[ 68, 9 ]
python
en
['en', 'sr', 'en']
True
GenericForeignKey._check_content_type_field
(self)
Check if field named `field_name` in model `model` exists and is a valid content_type field (is a ForeignKey to ContentType).
Check if field named `field_name` in model `model` exists and is a valid content_type field (is a ForeignKey to ContentType).
def _check_content_type_field(self): """ Check if field named `field_name` in model `model` exists and is a valid content_type field (is a ForeignKey to ContentType). """ try: field = self.model._meta.get_field(self.ct_field) except FieldDoesNotExist: return [ checks.Error( "The GenericForeignKey content type references the " "nonexistent field '%s.%s'." % ( self.model._meta.object_name, self.ct_field ), obj=self, id='contenttypes.E002', ) ] else: if not isinstance(field, models.ForeignKey): return [ checks.Error( "'%s.%s' is not a ForeignKey." % ( self.model._meta.object_name, self.ct_field ), hint=( "GenericForeignKeys must use a ForeignKey to " "'contenttypes.ContentType' as the 'content_type' field." ), obj=self, id='contenttypes.E003', ) ] elif field.remote_field.model != ContentType: return [ checks.Error( "'%s.%s' is not a ForeignKey to 'contenttypes.ContentType'." % ( self.model._meta.object_name, self.ct_field ), hint=( "GenericForeignKeys must use a ForeignKey to " "'contenttypes.ContentType' as the 'content_type' field." ), obj=self, id='contenttypes.E004', ) ] else: return []
[ "def", "_check_content_type_field", "(", "self", ")", ":", "try", ":", "field", "=", "self", ".", "model", ".", "_meta", ".", "get_field", "(", "self", ".", "ct_field", ")", "except", "FieldDoesNotExist", ":", "return", "[", "checks", ".", "Error", "(", "\"The GenericForeignKey content type references the \"", "\"nonexistent field '%s.%s'.\"", "%", "(", "self", ".", "model", ".", "_meta", ".", "object_name", ",", "self", ".", "ct_field", ")", ",", "obj", "=", "self", ",", "id", "=", "'contenttypes.E002'", ",", ")", "]", "else", ":", "if", "not", "isinstance", "(", "field", ",", "models", ".", "ForeignKey", ")", ":", "return", "[", "checks", ".", "Error", "(", "\"'%s.%s' is not a ForeignKey.\"", "%", "(", "self", ".", "model", ".", "_meta", ".", "object_name", ",", "self", ".", "ct_field", ")", ",", "hint", "=", "(", "\"GenericForeignKeys must use a ForeignKey to \"", "\"'contenttypes.ContentType' as the 'content_type' field.\"", ")", ",", "obj", "=", "self", ",", "id", "=", "'contenttypes.E003'", ",", ")", "]", "elif", "field", ".", "remote_field", ".", "model", "!=", "ContentType", ":", "return", "[", "checks", ".", "Error", "(", "\"'%s.%s' is not a ForeignKey to 'contenttypes.ContentType'.\"", "%", "(", "self", ".", "model", ".", "_meta", ".", "object_name", ",", "self", ".", "ct_field", ")", ",", "hint", "=", "(", "\"GenericForeignKeys must use a ForeignKey to \"", "\"'contenttypes.ContentType' as the 'content_type' field.\"", ")", ",", "obj", "=", "self", ",", "id", "=", "'contenttypes.E004'", ",", ")", "]", "else", ":", "return", "[", "]" ]
[ 108, 4 ]
[ 156, 25 ]
python
en
['en', 'error', 'th']
False
GenericRelation._is_matching_generic_foreign_key
(self, field)
Return True if field is a GenericForeignKey whose content type and object id fields correspond to the equivalent attributes on this GenericRelation.
Return True if field is a GenericForeignKey whose content type and object id fields correspond to the equivalent attributes on this GenericRelation.
def _is_matching_generic_foreign_key(self, field): """ Return True if field is a GenericForeignKey whose content type and object id fields correspond to the equivalent attributes on this GenericRelation. """ return ( isinstance(field, GenericForeignKey) and field.ct_field == self.content_type_field_name and field.fk_field == self.object_id_field_name )
[ "def", "_is_matching_generic_foreign_key", "(", "self", ",", "field", ")", ":", "return", "(", "isinstance", "(", "field", ",", "GenericForeignKey", ")", "and", "field", ".", "ct_field", "==", "self", ".", "content_type_field_name", "and", "field", ".", "fk_field", "==", "self", ".", "object_id_field_name", ")" ]
[ 322, 4 ]
[ 332, 9 ]
python
en
['en', 'error', 'th']
False
GenericRelation._get_path_info_with_parent
(self, filtered_relation)
Return the path that joins the current model through any parent models. The idea is that if you have a GFK defined on a parent model then we need to join the parent model first, then the child model.
Return the path that joins the current model through any parent models. The idea is that if you have a GFK defined on a parent model then we need to join the parent model first, then the child model.
def _get_path_info_with_parent(self, filtered_relation): """ Return the path that joins the current model through any parent models. The idea is that if you have a GFK defined on a parent model then we need to join the parent model first, then the child model. """ # With an inheritance chain ChildTag -> Tag and Tag defines the # GenericForeignKey, and a TaggedItem model has a GenericRelation to # ChildTag, then we need to generate a join from TaggedItem to Tag # (as Tag.object_id == TaggedItem.pk), and another join from Tag to # ChildTag (as that is where the relation is to). Do this by first # generating a join to the parent model, then generating joins to the # child models. path = [] opts = self.remote_field.model._meta.concrete_model._meta parent_opts = opts.get_field(self.object_id_field_name).model._meta target = parent_opts.pk path.append(PathInfo( from_opts=self.model._meta, to_opts=parent_opts, target_fields=(target,), join_field=self.remote_field, m2m=True, direct=False, filtered_relation=filtered_relation, )) # Collect joins needed for the parent -> child chain. This is easiest # to do if we collect joins for the child -> parent chain and then # reverse the direction (call to reverse() and use of # field.remote_field.get_path_info()). parent_field_chain = [] while parent_opts != opts: field = opts.get_ancestor_link(parent_opts.model) parent_field_chain.append(field) opts = field.remote_field.model._meta parent_field_chain.reverse() for field in parent_field_chain: path.extend(field.remote_field.get_path_info()) return path
[ "def", "_get_path_info_with_parent", "(", "self", ",", "filtered_relation", ")", ":", "# With an inheritance chain ChildTag -> Tag and Tag defines the", "# GenericForeignKey, and a TaggedItem model has a GenericRelation to", "# ChildTag, then we need to generate a join from TaggedItem to Tag", "# (as Tag.object_id == TaggedItem.pk), and another join from Tag to", "# ChildTag (as that is where the relation is to). Do this by first", "# generating a join to the parent model, then generating joins to the", "# child models.", "path", "=", "[", "]", "opts", "=", "self", ".", "remote_field", ".", "model", ".", "_meta", ".", "concrete_model", ".", "_meta", "parent_opts", "=", "opts", ".", "get_field", "(", "self", ".", "object_id_field_name", ")", ".", "model", ".", "_meta", "target", "=", "parent_opts", ".", "pk", "path", ".", "append", "(", "PathInfo", "(", "from_opts", "=", "self", ".", "model", ".", "_meta", ",", "to_opts", "=", "parent_opts", ",", "target_fields", "=", "(", "target", ",", ")", ",", "join_field", "=", "self", ".", "remote_field", ",", "m2m", "=", "True", ",", "direct", "=", "False", ",", "filtered_relation", "=", "filtered_relation", ",", ")", ")", "# Collect joins needed for the parent -> child chain. This is easiest", "# to do if we collect joins for the child -> parent chain and then", "# reverse the direction (call to reverse() and use of", "# field.remote_field.get_path_info()).", "parent_field_chain", "=", "[", "]", "while", "parent_opts", "!=", "opts", ":", "field", "=", "opts", ".", "get_ancestor_link", "(", "parent_opts", ".", "model", ")", "parent_field_chain", ".", "append", "(", "field", ")", "opts", "=", "field", ".", "remote_field", ".", "model", ".", "_meta", "parent_field_chain", ".", "reverse", "(", ")", "for", "field", "in", "parent_field_chain", ":", "path", ".", "extend", "(", "field", ".", "remote_field", ".", "get_path_info", "(", ")", ")", "return", "path" ]
[ 357, 4 ]
[ 395, 19 ]
python
en
['en', 'error', 'th']
False
GenericRelation.get_content_type
(self)
Return the content type associated with this field's model.
Return the content type associated with this field's model.
def get_content_type(self): """ Return the content type associated with this field's model. """ return ContentType.objects.get_for_model(self.model, for_concrete_model=self.for_concrete_model)
[ "def", "get_content_type", "(", "self", ")", ":", "return", "ContentType", ".", "objects", ".", "get_for_model", "(", "self", ".", "model", ",", "for_concrete_model", "=", "self", ".", "for_concrete_model", ")" ]
[ 460, 4 ]
[ 465, 92 ]
python
en
['en', 'error', 'th']
False
GenericRelation.bulk_related_objects
(self, objs, using=DEFAULT_DB_ALIAS)
Return all objects related to ``objs`` via this ``GenericRelation``.
Return all objects related to ``objs`` via this ``GenericRelation``.
def bulk_related_objects(self, objs, using=DEFAULT_DB_ALIAS): """ Return all objects related to ``objs`` via this ``GenericRelation``. """ return self.remote_field.model._base_manager.db_manager(using).filter(**{ "%s__pk" % self.content_type_field_name: ContentType.objects.db_manager(using).get_for_model( self.model, for_concrete_model=self.for_concrete_model).pk, "%s__in" % self.object_id_field_name: [obj.pk for obj in objs] })
[ "def", "bulk_related_objects", "(", "self", ",", "objs", ",", "using", "=", "DEFAULT_DB_ALIAS", ")", ":", "return", "self", ".", "remote_field", ".", "model", ".", "_base_manager", ".", "db_manager", "(", "using", ")", ".", "filter", "(", "*", "*", "{", "\"%s__pk\"", "%", "self", ".", "content_type_field_name", ":", "ContentType", ".", "objects", ".", "db_manager", "(", "using", ")", ".", "get_for_model", "(", "self", ".", "model", ",", "for_concrete_model", "=", "self", ".", "for_concrete_model", ")", ".", "pk", ",", "\"%s__in\"", "%", "self", ".", "object_id_field_name", ":", "[", "obj", ".", "pk", "for", "obj", "in", "objs", "]", "}", ")" ]
[ 475, 4 ]
[ 483, 10 ]
python
en
['en', 'error', 'th']
False
Storage.__init__
(self, service_name, user_name)
Constructor. Args: service_name: string, The name of the service under which the credentials are stored. user_name: string, The name of the user to store credentials for.
Constructor.
def __init__(self, service_name, user_name): """Constructor. Args: service_name: string, The name of the service under which the credentials are stored. user_name: string, The name of the user to store credentials for. """ super(Storage, self).__init__(lock=threading.Lock()) self._service_name = service_name self._user_name = user_name
[ "def", "__init__", "(", "self", ",", "service_name", ",", "user_name", ")", ":", "super", "(", "Storage", ",", "self", ")", ".", "__init__", "(", "lock", "=", "threading", ".", "Lock", "(", ")", ")", "self", ".", "_service_name", "=", "service_name", "self", ".", "_user_name", "=", "user_name" ]
[ 49, 4 ]
[ 59, 35 ]
python
en
['en', 'en', 'en']
False
Storage.locked_get
(self)
Retrieve Credential from file. Returns: oauth2client.client.Credentials
Retrieve Credential from file.
def locked_get(self): """Retrieve Credential from file. Returns: oauth2client.client.Credentials """ credentials = None content = keyring.get_password(self._service_name, self._user_name) if content is not None: try: credentials = client.Credentials.new_from_json(content) credentials.set_store(self) except ValueError: pass return credentials
[ "def", "locked_get", "(", "self", ")", ":", "credentials", "=", "None", "content", "=", "keyring", ".", "get_password", "(", "self", ".", "_service_name", ",", "self", ".", "_user_name", ")", "if", "content", "is", "not", "None", ":", "try", ":", "credentials", "=", "client", ".", "Credentials", ".", "new_from_json", "(", "content", ")", "credentials", ".", "set_store", "(", "self", ")", "except", "ValueError", ":", "pass", "return", "credentials" ]
[ 61, 4 ]
[ 77, 26 ]
python
en
['en', 'en', 'en']
True
Storage.locked_put
(self, credentials)
Write Credentials to file. Args: credentials: Credentials, the credentials to store.
Write Credentials to file.
def locked_put(self, credentials): """Write Credentials to file. Args: credentials: Credentials, the credentials to store. """ keyring.set_password(self._service_name, self._user_name, credentials.to_json())
[ "def", "locked_put", "(", "self", ",", "credentials", ")", ":", "keyring", ".", "set_password", "(", "self", ".", "_service_name", ",", "self", ".", "_user_name", ",", "credentials", ".", "to_json", "(", ")", ")" ]
[ 79, 4 ]
[ 86, 51 ]
python
en
['en', 'en', 'en']
True
Storage.locked_delete
(self)
Delete Credentials file. Args: credentials: Credentials, the credentials to store.
Delete Credentials file.
def locked_delete(self): """Delete Credentials file. Args: credentials: Credentials, the credentials to store. """ keyring.set_password(self._service_name, self._user_name, '')
[ "def", "locked_delete", "(", "self", ")", ":", "keyring", ".", "set_password", "(", "self", ".", "_service_name", ",", "self", ".", "_user_name", ",", "''", ")" ]
[ 88, 4 ]
[ 94, 69 ]
python
de
['de', 'it', 'en']
False
get_version
(version=None)
Return a PEP 440-compliant version number from VERSION.
Return a PEP 440-compliant version number from VERSION.
def get_version(version=None): """Return a PEP 440-compliant version number from VERSION.""" version = get_complete_version(version) # Now build the two parts of the version number: # main = X.Y[.Z] # sub = .devN - for pre-alpha releases # | {a|b|rc}N - for alpha, beta, and rc releases main = get_main_version(version) sub = '' if version[3] == 'alpha' and version[4] == 0: git_changeset = get_git_changeset() if git_changeset: sub = '.dev%s' % git_changeset elif version[3] != 'final': mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'rc'} sub = mapping[version[3]] + str(version[4]) return main + sub
[ "def", "get_version", "(", "version", "=", "None", ")", ":", "version", "=", "get_complete_version", "(", "version", ")", "# Now build the two parts of the version number:", "# main = X.Y[.Z]", "# sub = .devN - for pre-alpha releases", "# | {a|b|rc}N - for alpha, beta, and rc releases", "main", "=", "get_main_version", "(", "version", ")", "sub", "=", "''", "if", "version", "[", "3", "]", "==", "'alpha'", "and", "version", "[", "4", "]", "==", "0", ":", "git_changeset", "=", "get_git_changeset", "(", ")", "if", "git_changeset", ":", "sub", "=", "'.dev%s'", "%", "git_changeset", "elif", "version", "[", "3", "]", "!=", "'final'", ":", "mapping", "=", "{", "'alpha'", ":", "'a'", ",", "'beta'", ":", "'b'", ",", "'rc'", ":", "'rc'", "}", "sub", "=", "mapping", "[", "version", "[", "3", "]", "]", "+", "str", "(", "version", "[", "4", "]", ")", "return", "main", "+", "sub" ]
[ 17, 0 ]
[ 38, 21 ]
python
en
['en', 'en', 'en']
True
get_main_version
(version=None)
Return main version (X.Y[.Z]) from VERSION.
Return main version (X.Y[.Z]) from VERSION.
def get_main_version(version=None): """Return main version (X.Y[.Z]) from VERSION.""" version = get_complete_version(version) parts = 2 if version[2] == 0 else 3 return '.'.join(str(x) for x in version[:parts])
[ "def", "get_main_version", "(", "version", "=", "None", ")", ":", "version", "=", "get_complete_version", "(", "version", ")", "parts", "=", "2", "if", "version", "[", "2", "]", "==", "0", "else", "3", "return", "'.'", ".", "join", "(", "str", "(", "x", ")", "for", "x", "in", "version", "[", ":", "parts", "]", ")" ]
[ 41, 0 ]
[ 45, 52 ]
python
en
['en', 'en', 'en']
True
get_complete_version
(version=None)
Return a tuple of the django version. If version argument is non-empty, check for correctness of the tuple provided.
Return a tuple of the django version. If version argument is non-empty, check for correctness of the tuple provided.
def get_complete_version(version=None): """ Return a tuple of the django version. If version argument is non-empty, check for correctness of the tuple provided. """ if version is None: from django import VERSION as version else: assert len(version) == 5 assert version[3] in ('alpha', 'beta', 'rc', 'final') return version
[ "def", "get_complete_version", "(", "version", "=", "None", ")", ":", "if", "version", "is", "None", ":", "from", "django", "import", "VERSION", "as", "version", "else", ":", "assert", "len", "(", "version", ")", "==", "5", "assert", "version", "[", "3", "]", "in", "(", "'alpha'", ",", "'beta'", ",", "'rc'", ",", "'final'", ")", "return", "version" ]
[ 48, 0 ]
[ 59, 18 ]
python
en
['en', 'error', 'th']
False
get_git_changeset
()
Return a numeric identifier of the latest git changeset. The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format. This value isn't guaranteed to be unique, but collisions are very unlikely, so it's sufficient for generating the development version numbers.
Return a numeric identifier of the latest git changeset.
def get_git_changeset(): """Return a numeric identifier of the latest git changeset. The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format. This value isn't guaranteed to be unique, but collisions are very unlikely, so it's sufficient for generating the development version numbers. """ repo_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) git_log = subprocess.run( ['git', 'log', '--pretty=format:%ct', '--quiet', '-1', 'HEAD'], stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, cwd=repo_dir, universal_newlines=True, ) timestamp = git_log.stdout try: timestamp = datetime.datetime.utcfromtimestamp(int(timestamp)) except ValueError: return None return timestamp.strftime('%Y%m%d%H%M%S')
[ "def", "get_git_changeset", "(", ")", ":", "repo_dir", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "__file__", ")", ")", ")", "git_log", "=", "subprocess", ".", "run", "(", "[", "'git'", ",", "'log'", ",", "'--pretty=format:%ct'", ",", "'--quiet'", ",", "'-1'", ",", "'HEAD'", "]", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", "shell", "=", "True", ",", "cwd", "=", "repo_dir", ",", "universal_newlines", "=", "True", ",", ")", "timestamp", "=", "git_log", ".", "stdout", "try", ":", "timestamp", "=", "datetime", ".", "datetime", ".", "utcfromtimestamp", "(", "int", "(", "timestamp", ")", ")", "except", "ValueError", ":", "return", "None", "return", "timestamp", ".", "strftime", "(", "'%Y%m%d%H%M%S'", ")" ]
[ 71, 0 ]
[ 89, 45 ]
python
en
['en', 'en', 'en']
True
get_version_tuple
(version)
Return a tuple of version numbers (e.g. (1, 2, 3)) from the version string (e.g. '1.2.3').
Return a tuple of version numbers (e.g. (1, 2, 3)) from the version string (e.g. '1.2.3').
def get_version_tuple(version): """ Return a tuple of version numbers (e.g. (1, 2, 3)) from the version string (e.g. '1.2.3'). """ loose_version = LooseVersion(version) version_numbers = [] for item in loose_version.version: if not isinstance(item, int): break version_numbers.append(item) return tuple(version_numbers)
[ "def", "get_version_tuple", "(", "version", ")", ":", "loose_version", "=", "LooseVersion", "(", "version", ")", "version_numbers", "=", "[", "]", "for", "item", "in", "loose_version", ".", "version", ":", "if", "not", "isinstance", "(", "item", ",", "int", ")", ":", "break", "version_numbers", ".", "append", "(", "item", ")", "return", "tuple", "(", "version_numbers", ")" ]
[ 92, 0 ]
[ 103, 33 ]
python
en
['en', 'error', 'th']
False
Deserializer
(stream_or_string, **options)
Deserialize a stream or string of JSON data.
Deserialize a stream or string of JSON data.
def Deserializer(stream_or_string, **options): """Deserialize a stream or string of JSON data.""" if isinstance(stream_or_string, bytes): stream_or_string = stream_or_string.decode() if isinstance(stream_or_string, (bytes, str)): stream_or_string = stream_or_string.split("\n") for line in stream_or_string: if not line.strip(): continue try: yield from PythonDeserializer([json.loads(line)], **options) except (GeneratorExit, DeserializationError): raise except Exception as exc: raise DeserializationError() from exc
[ "def", "Deserializer", "(", "stream_or_string", ",", "*", "*", "options", ")", ":", "if", "isinstance", "(", "stream_or_string", ",", "bytes", ")", ":", "stream_or_string", "=", "stream_or_string", ".", "decode", "(", ")", "if", "isinstance", "(", "stream_or_string", ",", "(", "bytes", ",", "str", ")", ")", ":", "stream_or_string", "=", "stream_or_string", ".", "split", "(", "\"\\n\"", ")", "for", "line", "in", "stream_or_string", ":", "if", "not", "line", ".", "strip", "(", ")", ":", "continue", "try", ":", "yield", "from", "PythonDeserializer", "(", "[", "json", ".", "loads", "(", "line", ")", "]", ",", "*", "*", "options", ")", "except", "(", "GeneratorExit", ",", "DeserializationError", ")", ":", "raise", "except", "Exception", "as", "exc", ":", "raise", "DeserializationError", "(", ")", "from", "exc" ]
[ 41, 0 ]
[ 56, 49 ]
python
en
['en', 'en', 'en']
True
make_setuptools_shim_args
( setup_py_path, # type: str global_options=None, # type: Sequence[str] no_user_config=False, # type: bool unbuffered_output=False, # type: bool )
Get setuptools command arguments with shim wrapped setup file invocation. :param setup_py_path: The path to setup.py to be wrapped. :param global_options: Additional global options. :param no_user_config: If True, disables personal user configuration. :param unbuffered_output: If True, adds the unbuffered switch to the argument list.
Get setuptools command arguments with shim wrapped setup file invocation.
def make_setuptools_shim_args( setup_py_path, # type: str global_options=None, # type: Sequence[str] no_user_config=False, # type: bool unbuffered_output=False, # type: bool ): # type: (...) -> List[str] """ Get setuptools command arguments with shim wrapped setup file invocation. :param setup_py_path: The path to setup.py to be wrapped. :param global_options: Additional global options. :param no_user_config: If True, disables personal user configuration. :param unbuffered_output: If True, adds the unbuffered switch to the argument list. """ args = [sys.executable] if unbuffered_output: args += ["-u"] args += ["-c", _SETUPTOOLS_SHIM.format(setup_py_path)] if global_options: args += global_options if no_user_config: args += ["--no-user-cfg"] return args
[ "def", "make_setuptools_shim_args", "(", "setup_py_path", ",", "# type: str", "global_options", "=", "None", ",", "# type: Sequence[str]", "no_user_config", "=", "False", ",", "# type: bool", "unbuffered_output", "=", "False", ",", "# type: bool", ")", ":", "# type: (...) -> List[str]", "args", "=", "[", "sys", ".", "executable", "]", "if", "unbuffered_output", ":", "args", "+=", "[", "\"-u\"", "]", "args", "+=", "[", "\"-c\"", ",", "_SETUPTOOLS_SHIM", ".", "format", "(", "setup_py_path", ")", "]", "if", "global_options", ":", "args", "+=", "global_options", "if", "no_user_config", ":", "args", "+=", "[", "\"--no-user-cfg\"", "]", "return", "args" ]
[ 20, 0 ]
[ 44, 15 ]
python
en
['en', 'error', 'th']
False
TestDataLoader.__init__
(self, in_path = "./", sampling_mode = 'link', type_constrain = True)
for link prediction
for link prediction
def __init__(self, in_path = "./", sampling_mode = 'link', type_constrain = True): base_file = os.path.abspath(os.path.join(os.path.dirname(__file__), "../release/Base.so")) self.lib = ctypes.cdll.LoadLibrary(base_file) """for link prediction""" self.lib.getHeadBatch.argtypes = [ ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ] self.lib.getTailBatch.argtypes = [ ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ] """for triple classification""" self.lib.getTestBatch.argtypes = [ ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ] """set essential parameters""" self.in_path = in_path self.sampling_mode = sampling_mode self.type_constrain = type_constrain self.read()
[ "def", "__init__", "(", "self", ",", "in_path", "=", "\"./\"", ",", "sampling_mode", "=", "'link'", ",", "type_constrain", "=", "True", ")", ":", "base_file", "=", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "__file__", ")", ",", "\"../release/Base.so\"", ")", ")", "self", ".", "lib", "=", "ctypes", ".", "cdll", ".", "LoadLibrary", "(", "base_file", ")", "self", ".", "lib", ".", "getHeadBatch", ".", "argtypes", "=", "[", "ctypes", ".", "c_void_p", ",", "ctypes", ".", "c_void_p", ",", "ctypes", ".", "c_void_p", ",", "]", "self", ".", "lib", ".", "getTailBatch", ".", "argtypes", "=", "[", "ctypes", ".", "c_void_p", ",", "ctypes", ".", "c_void_p", ",", "ctypes", ".", "c_void_p", ",", "]", "\"\"\"for triple classification\"\"\"", "self", ".", "lib", ".", "getTestBatch", ".", "argtypes", "=", "[", "ctypes", ".", "c_void_p", ",", "ctypes", ".", "c_void_p", ",", "ctypes", ".", "c_void_p", ",", "ctypes", ".", "c_void_p", ",", "ctypes", ".", "c_void_p", ",", "ctypes", ".", "c_void_p", ",", "]", "\"\"\"set essential parameters\"\"\"", "self", ".", "in_path", "=", "in_path", "self", ".", "sampling_mode", "=", "sampling_mode", "self", ".", "type_constrain", "=", "type_constrain", "self", ".", "read", "(", ")" ]
[ 26, 1 ]
[ 53, 13 ]
python
en
['en', 'en', 'en']
True
TimestampSigner.get_timestamp
(self)
Returns the current timestamp. The function must return an integer.
Returns the current timestamp. The function must return an integer.
def get_timestamp(self): """Returns the current timestamp. The function must return an integer. """ return int(time.time())
[ "def", "get_timestamp", "(", "self", ")", ":", "return", "int", "(", "time", ".", "time", "(", ")", ")" ]
[ 23, 4 ]
[ 27, 31 ]
python
en
['en', 'en', 'en']
True
TimestampSigner.timestamp_to_datetime
(self, ts)
Used to convert the timestamp from :meth:`get_timestamp` into a datetime object.
Used to convert the timestamp from :meth:`get_timestamp` into a datetime object.
def timestamp_to_datetime(self, ts): """Used to convert the timestamp from :meth:`get_timestamp` into a datetime object. """ return datetime.utcfromtimestamp(ts)
[ "def", "timestamp_to_datetime", "(", "self", ",", "ts", ")", ":", "return", "datetime", ".", "utcfromtimestamp", "(", "ts", ")" ]
[ 29, 4 ]
[ 33, 44 ]
python
en
['en', 'en', 'en']
True
TimestampSigner.sign
(self, value)
Signs the given string and also attaches time information.
Signs the given string and also attaches time information.
def sign(self, value): """Signs the given string and also attaches time information.""" value = want_bytes(value) timestamp = base64_encode(int_to_bytes(self.get_timestamp())) sep = want_bytes(self.sep) value = value + sep + timestamp return value + sep + self.get_signature(value)
[ "def", "sign", "(", "self", ",", "value", ")", ":", "value", "=", "want_bytes", "(", "value", ")", "timestamp", "=", "base64_encode", "(", "int_to_bytes", "(", "self", ".", "get_timestamp", "(", ")", ")", ")", "sep", "=", "want_bytes", "(", "self", ".", "sep", ")", "value", "=", "value", "+", "sep", "+", "timestamp", "return", "value", "+", "sep", "+", "self", ".", "get_signature", "(", "value", ")" ]
[ 35, 4 ]
[ 41, 54 ]
python
en
['en', 'en', 'en']
True
TimestampSigner.unsign
(self, value, max_age=None, return_timestamp=False)
Works like the regular :meth:`.Signer.unsign` but can also validate the time. See the base docstring of the class for the general behavior. If ``return_timestamp`` is ``True`` the timestamp of the signature will be returned as a naive :class:`datetime.datetime` object in UTC.
Works like the regular :meth:`.Signer.unsign` but can also validate the time. See the base docstring of the class for the general behavior. If ``return_timestamp`` is ``True`` the timestamp of the signature will be returned as a naive :class:`datetime.datetime` object in UTC.
def unsign(self, value, max_age=None, return_timestamp=False): """Works like the regular :meth:`.Signer.unsign` but can also validate the time. See the base docstring of the class for the general behavior. If ``return_timestamp`` is ``True`` the timestamp of the signature will be returned as a naive :class:`datetime.datetime` object in UTC. """ try: result = Signer.unsign(self, value) sig_error = None except BadSignature as e: sig_error = e result = e.payload or b"" sep = want_bytes(self.sep) # If there is no timestamp in the result there is something # seriously wrong. In case there was a signature error, we raise # that one directly, otherwise we have a weird situation in # which we shouldn't have come except someone uses a time-based # serializer on non-timestamp data, so catch that. if sep not in result: if sig_error: raise sig_error raise BadTimeSignature("timestamp missing", payload=result) value, timestamp = result.rsplit(sep, 1) try: timestamp = bytes_to_int(base64_decode(timestamp)) except Exception: timestamp = None # Signature is *not* okay. Raise a proper error now that we have # split the value and the timestamp. if sig_error is not None: raise BadTimeSignature( text_type(sig_error), payload=value, date_signed=timestamp ) # Signature was okay but the timestamp is actually not there or # malformed. Should not happen, but we handle it anyway. if timestamp is None: raise BadTimeSignature("Malformed timestamp", payload=value) # Check timestamp is not older than max_age if max_age is not None: age = self.get_timestamp() - timestamp if age > max_age: raise SignatureExpired( "Signature age %s > %s seconds" % (age, max_age), payload=value, date_signed=self.timestamp_to_datetime(timestamp), ) if return_timestamp: return value, self.timestamp_to_datetime(timestamp) return value
[ "def", "unsign", "(", "self", ",", "value", ",", "max_age", "=", "None", ",", "return_timestamp", "=", "False", ")", ":", "try", ":", "result", "=", "Signer", ".", "unsign", "(", "self", ",", "value", ")", "sig_error", "=", "None", "except", "BadSignature", "as", "e", ":", "sig_error", "=", "e", "result", "=", "e", ".", "payload", "or", "b\"\"", "sep", "=", "want_bytes", "(", "self", ".", "sep", ")", "# If there is no timestamp in the result there is something", "# seriously wrong. In case there was a signature error, we raise", "# that one directly, otherwise we have a weird situation in", "# which we shouldn't have come except someone uses a time-based", "# serializer on non-timestamp data, so catch that.", "if", "sep", "not", "in", "result", ":", "if", "sig_error", ":", "raise", "sig_error", "raise", "BadTimeSignature", "(", "\"timestamp missing\"", ",", "payload", "=", "result", ")", "value", ",", "timestamp", "=", "result", ".", "rsplit", "(", "sep", ",", "1", ")", "try", ":", "timestamp", "=", "bytes_to_int", "(", "base64_decode", "(", "timestamp", ")", ")", "except", "Exception", ":", "timestamp", "=", "None", "# Signature is *not* okay. Raise a proper error now that we have", "# split the value and the timestamp.", "if", "sig_error", "is", "not", "None", ":", "raise", "BadTimeSignature", "(", "text_type", "(", "sig_error", ")", ",", "payload", "=", "value", ",", "date_signed", "=", "timestamp", ")", "# Signature was okay but the timestamp is actually not there or", "# malformed. Should not happen, but we handle it anyway.", "if", "timestamp", "is", "None", ":", "raise", "BadTimeSignature", "(", "\"Malformed timestamp\"", ",", "payload", "=", "value", ")", "# Check timestamp is not older than max_age", "if", "max_age", "is", "not", "None", ":", "age", "=", "self", ".", "get_timestamp", "(", ")", "-", "timestamp", "if", "age", ">", "max_age", ":", "raise", "SignatureExpired", "(", "\"Signature age %s > %s seconds\"", "%", "(", "age", ",", "max_age", ")", ",", "payload", "=", "value", ",", "date_signed", "=", "self", ".", "timestamp_to_datetime", "(", "timestamp", ")", ",", ")", "if", "return_timestamp", ":", "return", "value", ",", "self", ".", "timestamp_to_datetime", "(", "timestamp", ")", "return", "value" ]
[ 43, 4 ]
[ 98, 20 ]
python
en
['en', 'en', 'en']
True
TimestampSigner.validate
(self, signed_value, max_age=None)
Only validates the given signed value. Returns ``True`` if the signature exists and is valid.
Only validates the given signed value. Returns ``True`` if the signature exists and is valid.
def validate(self, signed_value, max_age=None): """Only validates the given signed value. Returns ``True`` if the signature exists and is valid.""" try: self.unsign(signed_value, max_age=max_age) return True except BadSignature: return False
[ "def", "validate", "(", "self", ",", "signed_value", ",", "max_age", "=", "None", ")", ":", "try", ":", "self", ".", "unsign", "(", "signed_value", ",", "max_age", "=", "max_age", ")", "return", "True", "except", "BadSignature", ":", "return", "False" ]
[ 100, 4 ]
[ 107, 24 ]
python
en
['en', 'en', 'en']
True
TimedSerializer.loads
(self, s, max_age=None, return_timestamp=False, salt=None)
Reverse of :meth:`dumps`, raises :exc:`.BadSignature` if the signature validation fails. If a ``max_age`` is provided it will ensure the signature is not older than that time in seconds. In case the signature is outdated, :exc:`.SignatureExpired` is raised. All arguments are forwarded to the signer's :meth:`~TimestampSigner.unsign` method.
Reverse of :meth:`dumps`, raises :exc:`.BadSignature` if the signature validation fails. If a ``max_age`` is provided it will ensure the signature is not older than that time in seconds. In case the signature is outdated, :exc:`.SignatureExpired` is raised. All arguments are forwarded to the signer's :meth:`~TimestampSigner.unsign` method.
def loads(self, s, max_age=None, return_timestamp=False, salt=None): """Reverse of :meth:`dumps`, raises :exc:`.BadSignature` if the signature validation fails. If a ``max_age`` is provided it will ensure the signature is not older than that time in seconds. In case the signature is outdated, :exc:`.SignatureExpired` is raised. All arguments are forwarded to the signer's :meth:`~TimestampSigner.unsign` method. """ s = want_bytes(s) last_exception = None for signer in self.iter_unsigners(salt): try: base64d, timestamp = signer.unsign(s, max_age, return_timestamp=True) payload = self.load_payload(base64d) if return_timestamp: return payload, timestamp return payload # If we get a signature expired it means we could read the # signature but it's invalid. In that case we do not want to # try the next signer. except SignatureExpired: raise except BadSignature as err: last_exception = err raise last_exception
[ "def", "loads", "(", "self", ",", "s", ",", "max_age", "=", "None", ",", "return_timestamp", "=", "False", ",", "salt", "=", "None", ")", ":", "s", "=", "want_bytes", "(", "s", ")", "last_exception", "=", "None", "for", "signer", "in", "self", ".", "iter_unsigners", "(", "salt", ")", ":", "try", ":", "base64d", ",", "timestamp", "=", "signer", ".", "unsign", "(", "s", ",", "max_age", ",", "return_timestamp", "=", "True", ")", "payload", "=", "self", ".", "load_payload", "(", "base64d", ")", "if", "return_timestamp", ":", "return", "payload", ",", "timestamp", "return", "payload", "# If we get a signature expired it means we could read the", "# signature but it's invalid. In that case we do not want to", "# try the next signer.", "except", "SignatureExpired", ":", "raise", "except", "BadSignature", "as", "err", ":", "last_exception", "=", "err", "raise", "last_exception" ]
[ 117, 4 ]
[ 141, 28 ]
python
en
['en', 'la', 'en']
True
_glibc_version_string_confstr
()
Primary implementation of glibc_version_string using os.confstr.
Primary implementation of glibc_version_string using os.confstr.
def _glibc_version_string_confstr() -> Optional[str]: """ Primary implementation of glibc_version_string using os.confstr. """ # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely # to be broken or missing. This strategy is used in the standard library # platform module. # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183 try: # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17". version_string = os.confstr("CS_GNU_LIBC_VERSION") assert version_string is not None _, version = version_string.split() except (AssertionError, AttributeError, OSError, ValueError): # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)... return None return version
[ "def", "_glibc_version_string_confstr", "(", ")", "->", "Optional", "[", "str", "]", ":", "# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely", "# to be broken or missing. This strategy is used in the standard library", "# platform module.", "# https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183", "try", ":", "# os.confstr(\"CS_GNU_LIBC_VERSION\") returns a string like \"glibc 2.17\".", "version_string", "=", "os", ".", "confstr", "(", "\"CS_GNU_LIBC_VERSION\"", ")", "assert", "version_string", "is", "not", "None", "_", ",", "version", "=", "version_string", ".", "split", "(", ")", "except", "(", "AssertionError", ",", "AttributeError", ",", "OSError", ",", "ValueError", ")", ":", "# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...", "return", "None", "return", "version" ]
[ 134, 0 ]
[ 150, 18 ]
python
en
['en', 'error', 'th']
False
_glibc_version_string_ctypes
()
Fallback implementation of glibc_version_string using ctypes.
Fallback implementation of glibc_version_string using ctypes.
def _glibc_version_string_ctypes() -> Optional[str]: """ Fallback implementation of glibc_version_string using ctypes. """ try: import ctypes except ImportError: return None # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen # manpage says, "If filename is NULL, then the returned handle is for the # main program". This way we can let the linker do the work to figure out # which libc our process is actually using. # # We must also handle the special case where the executable is not a # dynamically linked executable. This can occur when using musl libc, # for example. In this situation, dlopen() will error, leading to an # OSError. Interestingly, at least in the case of musl, there is no # errno set on the OSError. The single string argument used to construct # OSError comes from libc itself and is therefore not portable to # hard code here. In any case, failure to call dlopen() means we # can proceed, so we bail on our attempt. try: process_namespace = ctypes.CDLL(None) except OSError: return None try: gnu_get_libc_version = process_namespace.gnu_get_libc_version except AttributeError: # Symbol doesn't exist -> therefore, we are not linked to # glibc. return None # Call gnu_get_libc_version, which returns a string like "2.5" gnu_get_libc_version.restype = ctypes.c_char_p version_str: str = gnu_get_libc_version() # py2 / py3 compatibility: if not isinstance(version_str, str): version_str = version_str.decode("ascii") return version_str
[ "def", "_glibc_version_string_ctypes", "(", ")", "->", "Optional", "[", "str", "]", ":", "try", ":", "import", "ctypes", "except", "ImportError", ":", "return", "None", "# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen", "# manpage says, \"If filename is NULL, then the returned handle is for the", "# main program\". This way we can let the linker do the work to figure out", "# which libc our process is actually using.", "#", "# We must also handle the special case where the executable is not a", "# dynamically linked executable. This can occur when using musl libc,", "# for example. In this situation, dlopen() will error, leading to an", "# OSError. Interestingly, at least in the case of musl, there is no", "# errno set on the OSError. The single string argument used to construct", "# OSError comes from libc itself and is therefore not portable to", "# hard code here. In any case, failure to call dlopen() means we", "# can proceed, so we bail on our attempt.", "try", ":", "process_namespace", "=", "ctypes", ".", "CDLL", "(", "None", ")", "except", "OSError", ":", "return", "None", "try", ":", "gnu_get_libc_version", "=", "process_namespace", ".", "gnu_get_libc_version", "except", "AttributeError", ":", "# Symbol doesn't exist -> therefore, we are not linked to", "# glibc.", "return", "None", "# Call gnu_get_libc_version, which returns a string like \"2.5\"", "gnu_get_libc_version", ".", "restype", "=", "ctypes", ".", "c_char_p", "version_str", ":", "str", "=", "gnu_get_libc_version", "(", ")", "# py2 / py3 compatibility:", "if", "not", "isinstance", "(", "version_str", ",", "str", ")", ":", "version_str", "=", "version_str", ".", "decode", "(", "\"ascii\"", ")", "return", "version_str" ]
[ 153, 0 ]
[ 194, 22 ]
python
en
['en', 'error', 'th']
False
_glibc_version_string
()
Returns glibc version string, or None if not using glibc.
Returns glibc version string, or None if not using glibc.
def _glibc_version_string() -> Optional[str]: """Returns glibc version string, or None if not using glibc.""" return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
[ "def", "_glibc_version_string", "(", ")", "->", "Optional", "[", "str", "]", ":", "return", "_glibc_version_string_confstr", "(", ")", "or", "_glibc_version_string_ctypes", "(", ")" ]
[ 197, 0 ]
[ 199, 76 ]
python
en
['en', 'en', 'en']
True
_parse_glibc_version
(version_str: str)
Parse glibc version. We use a regexp instead of str.split because we want to discard any random junk that might come after the minor version -- this might happen in patched/forked versions of glibc (e.g. Linaro's version of glibc uses version strings like "2.20-2014.11"). See gh-3588.
Parse glibc version.
def _parse_glibc_version(version_str: str) -> Tuple[int, int]: """Parse glibc version. We use a regexp instead of str.split because we want to discard any random junk that might come after the minor version -- this might happen in patched/forked versions of glibc (e.g. Linaro's version of glibc uses version strings like "2.20-2014.11"). See gh-3588. """ m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str) if not m: warnings.warn( "Expected glibc version with 2 components major.minor," " got: %s" % version_str, RuntimeWarning, ) return -1, -1 return int(m.group("major")), int(m.group("minor"))
[ "def", "_parse_glibc_version", "(", "version_str", ":", "str", ")", "->", "Tuple", "[", "int", ",", "int", "]", ":", "m", "=", "re", ".", "match", "(", "r\"(?P<major>[0-9]+)\\.(?P<minor>[0-9]+)\"", ",", "version_str", ")", "if", "not", "m", ":", "warnings", ".", "warn", "(", "\"Expected glibc version with 2 components major.minor,\"", "\" got: %s\"", "%", "version_str", ",", "RuntimeWarning", ",", ")", "return", "-", "1", ",", "-", "1", "return", "int", "(", "m", ".", "group", "(", "\"major\"", ")", ")", ",", "int", "(", "m", ".", "group", "(", "\"minor\"", ")", ")" ]
[ 202, 0 ]
[ 218, 55 ]
python
it
['it', 'it', 'it']
True
Left.__init__
(self, expression, length, **extra)
expression: the name of a field, or an expression returning a string length: the number of characters to return from the start of the string
expression: the name of a field, or an expression returning a string length: the number of characters to return from the start of the string
def __init__(self, expression, length, **extra): """ expression: the name of a field, or an expression returning a string length: the number of characters to return from the start of the string """ if not hasattr(length, 'resolve_expression'): if length < 1: raise ValueError("'length' must be greater than 0.") super().__init__(expression, length, **extra)
[ "def", "__init__", "(", "self", ",", "expression", ",", "length", ",", "*", "*", "extra", ")", ":", "if", "not", "hasattr", "(", "length", ",", "'resolve_expression'", ")", ":", "if", "length", "<", "1", ":", "raise", "ValueError", "(", "\"'length' must be greater than 0.\"", ")", "super", "(", ")", ".", "__init__", "(", "expression", ",", "length", ",", "*", "*", "extra", ")" ]
[ 123, 4 ]
[ 131, 53 ]
python
en
['en', 'error', 'th']
False
Substr.__init__
(self, expression, pos, length=None, **extra)
expression: the name of a field, or an expression returning a string pos: an integer > 0, or an expression returning an integer length: an optional number of characters to return
expression: the name of a field, or an expression returning a string pos: an integer > 0, or an expression returning an integer length: an optional number of characters to return
def __init__(self, expression, pos, length=None, **extra): """ expression: the name of a field, or an expression returning a string pos: an integer > 0, or an expression returning an integer length: an optional number of characters to return """ if not hasattr(pos, 'resolve_expression'): if pos < 1: raise ValueError("'pos' must be greater than 0") expressions = [expression, pos] if length is not None: expressions.append(length) super().__init__(*expressions, **extra)
[ "def", "__init__", "(", "self", ",", "expression", ",", "pos", ",", "length", "=", "None", ",", "*", "*", "extra", ")", ":", "if", "not", "hasattr", "(", "pos", ",", "'resolve_expression'", ")", ":", "if", "pos", "<", "1", ":", "raise", "ValueError", "(", "\"'pos' must be greater than 0\"", ")", "expressions", "=", "[", "expression", ",", "pos", "]", "if", "length", "is", "not", "None", ":", "expressions", ".", "append", "(", "length", ")", "super", "(", ")", ".", "__init__", "(", "*", "expressions", ",", "*", "*", "extra", ")" ]
[ 294, 4 ]
[ 306, 47 ]
python
en
['en', 'error', 'th']
False
_get_dist
(metadata_directory: str)
Return a pkg_resources.Distribution for the provided metadata directory.
Return a pkg_resources.Distribution for the provided metadata directory.
def _get_dist(metadata_directory: str) -> Distribution: """Return a pkg_resources.Distribution for the provided metadata directory. """ dist_dir = metadata_directory.rstrip(os.sep) # Build a PathMetadata object, from path to metadata. :wink: base_dir, dist_dir_name = os.path.split(dist_dir) metadata = pkg_resources.PathMetadata(base_dir, dist_dir) # Determine the correct Distribution object type. if dist_dir.endswith(".egg-info"): dist_cls = pkg_resources.Distribution dist_name = os.path.splitext(dist_dir_name)[0] else: assert dist_dir.endswith(".dist-info") dist_cls = pkg_resources.DistInfoDistribution dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0] return dist_cls( base_dir, project_name=dist_name, metadata=metadata, )
[ "def", "_get_dist", "(", "metadata_directory", ":", "str", ")", "->", "Distribution", ":", "dist_dir", "=", "metadata_directory", ".", "rstrip", "(", "os", ".", "sep", ")", "# Build a PathMetadata object, from path to metadata. :wink:", "base_dir", ",", "dist_dir_name", "=", "os", ".", "path", ".", "split", "(", "dist_dir", ")", "metadata", "=", "pkg_resources", ".", "PathMetadata", "(", "base_dir", ",", "dist_dir", ")", "# Determine the correct Distribution object type.", "if", "dist_dir", ".", "endswith", "(", "\".egg-info\"", ")", ":", "dist_cls", "=", "pkg_resources", ".", "Distribution", "dist_name", "=", "os", ".", "path", ".", "splitext", "(", "dist_dir_name", ")", "[", "0", "]", "else", ":", "assert", "dist_dir", ".", "endswith", "(", "\".dist-info\"", ")", "dist_cls", "=", "pkg_resources", ".", "DistInfoDistribution", "dist_name", "=", "os", ".", "path", ".", "splitext", "(", "dist_dir_name", ")", "[", "0", "]", ".", "split", "(", "\"-\"", ")", "[", "0", "]", "return", "dist_cls", "(", "base_dir", ",", "project_name", "=", "dist_name", ",", "metadata", "=", "metadata", ",", ")" ]
[ 59, 0 ]
[ 82, 5 ]
python
en
['en', 'en', 'en']
True
InstallRequirement.format_debug
(self)
An un-tested helper for getting state, for debugging.
An un-tested helper for getting state, for debugging.
def format_debug(self) -> str: """An un-tested helper for getting state, for debugging. """ attributes = vars(self) names = sorted(attributes) state = ( "{}={!r}".format(attr, attributes[attr]) for attr in sorted(names) ) return '<{name} object: {{{state}}}>'.format( name=self.__class__.__name__, state=", ".join(state), )
[ "def", "format_debug", "(", "self", ")", "->", "str", ":", "attributes", "=", "vars", "(", "self", ")", "names", "=", "sorted", "(", "attributes", ")", "state", "=", "(", "\"{}={!r}\"", ".", "format", "(", "attr", ",", "attributes", "[", "attr", "]", ")", "for", "attr", "in", "sorted", "(", "names", ")", ")", "return", "'<{name} object: {{{state}}}>'", ".", "format", "(", "name", "=", "self", ".", "__class__", ".", "__name__", ",", "state", "=", "\", \"", ".", "join", "(", "state", ")", ",", ")" ]
[ 224, 4 ]
[ 236, 9 ]
python
en
['da', 'en', 'en']
True
InstallRequirement.is_pinned
(self)
Return whether I am pinned to an exact version. For example, some-package==1.2 is pinned; some-package>1.2 is not.
Return whether I am pinned to an exact version.
def is_pinned(self) -> bool: """Return whether I am pinned to an exact version. For example, some-package==1.2 is pinned; some-package>1.2 is not. """ specifiers = self.specifier return (len(specifiers) == 1 and next(iter(specifiers)).operator in {'==', '==='})
[ "def", "is_pinned", "(", "self", ")", "->", "bool", ":", "specifiers", "=", "self", ".", "specifier", "return", "(", "len", "(", "specifiers", ")", "==", "1", "and", "next", "(", "iter", "(", "specifiers", ")", ")", ".", "operator", "in", "{", "'=='", ",", "'==='", "}", ")" ]
[ 250, 4 ]
[ 257, 65 ]
python
en
['en', 'en', 'en']
True
InstallRequirement.has_hash_options
(self)
Return whether any known-good hashes are specified as options. These activate --require-hashes mode; hashes specified as part of a URL do not.
Return whether any known-good hashes are specified as options.
def has_hash_options(self) -> bool: """Return whether any known-good hashes are specified as options. These activate --require-hashes mode; hashes specified as part of a URL do not. """ return bool(self.hash_options)
[ "def", "has_hash_options", "(", "self", ")", "->", "bool", ":", "return", "bool", "(", "self", ".", "hash_options", ")" ]
[ 272, 4 ]
[ 279, 38 ]
python
en
['en', 'en', 'en']
True
InstallRequirement.hashes
(self, trust_internet: bool = True)
Return a hash-comparer that considers my option- and URL-based hashes to be known-good. Hashes in URLs--ones embedded in the requirements file, not ones downloaded from an index server--are almost peers with ones from flags. They satisfy --require-hashes (whether it was implicitly or explicitly activated) but do not activate it. md5 and sha224 are not allowed in flags, which should nudge people toward good algos. We always OR all hashes together, even ones from URLs. :param trust_internet: Whether to trust URL-based (#md5=...) hashes downloaded from the internet, as by populate_link()
Return a hash-comparer that considers my option- and URL-based hashes to be known-good.
def hashes(self, trust_internet: bool = True) -> Hashes: """Return a hash-comparer that considers my option- and URL-based hashes to be known-good. Hashes in URLs--ones embedded in the requirements file, not ones downloaded from an index server--are almost peers with ones from flags. They satisfy --require-hashes (whether it was implicitly or explicitly activated) but do not activate it. md5 and sha224 are not allowed in flags, which should nudge people toward good algos. We always OR all hashes together, even ones from URLs. :param trust_internet: Whether to trust URL-based (#md5=...) hashes downloaded from the internet, as by populate_link() """ good_hashes = self.hash_options.copy() link = self.link if trust_internet else self.original_link if link and link.hash: good_hashes.setdefault(link.hash_name, []).append(link.hash) return Hashes(good_hashes)
[ "def", "hashes", "(", "self", ",", "trust_internet", ":", "bool", "=", "True", ")", "->", "Hashes", ":", "good_hashes", "=", "self", ".", "hash_options", ".", "copy", "(", ")", "link", "=", "self", ".", "link", "if", "trust_internet", "else", "self", ".", "original_link", "if", "link", "and", "link", ".", "hash", ":", "good_hashes", ".", "setdefault", "(", "link", ".", "hash_name", ",", "[", "]", ")", ".", "append", "(", "link", ".", "hash", ")", "return", "Hashes", "(", "good_hashes", ")" ]
[ 281, 4 ]
[ 300, 34 ]
python
en
['en', 'en', 'en']
True
InstallRequirement.from_path
(self)
Format a nice indicator to show where this "comes from"
Format a nice indicator to show where this "comes from"
def from_path(self) -> Optional[str]: """Format a nice indicator to show where this "comes from" """ if self.req is None: return None s = str(self.req) if self.comes_from: if isinstance(self.comes_from, str): comes_from = self.comes_from else: comes_from = self.comes_from.from_path() if comes_from: s += '->' + comes_from return s
[ "def", "from_path", "(", "self", ")", "->", "Optional", "[", "str", "]", ":", "if", "self", ".", "req", "is", "None", ":", "return", "None", "s", "=", "str", "(", "self", ".", "req", ")", "if", "self", ".", "comes_from", ":", "if", "isinstance", "(", "self", ".", "comes_from", ",", "str", ")", ":", "comes_from", "=", "self", ".", "comes_from", "else", ":", "comes_from", "=", "self", ".", "comes_from", ".", "from_path", "(", ")", "if", "comes_from", ":", "s", "+=", "'->'", "+", "comes_from", "return", "s" ]
[ 302, 4 ]
[ 315, 16 ]
python
en
['en', 'en', 'en']
True
InstallRequirement._set_requirement
(self)
Set requirement after generating metadata.
Set requirement after generating metadata.
def _set_requirement(self) -> None: """Set requirement after generating metadata. """ assert self.req is None assert self.metadata is not None assert self.source_dir is not None # Construct a Requirement object from the generated metadata if isinstance(parse_version(self.metadata["Version"]), Version): op = "==" else: op = "===" self.req = Requirement( "".join([ self.metadata["Name"], op, self.metadata["Version"], ]) )
[ "def", "_set_requirement", "(", "self", ")", "->", "None", ":", "assert", "self", ".", "req", "is", "None", "assert", "self", ".", "metadata", "is", "not", "None", "assert", "self", ".", "source_dir", "is", "not", "None", "# Construct a Requirement object from the generated metadata", "if", "isinstance", "(", "parse_version", "(", "self", ".", "metadata", "[", "\"Version\"", "]", ")", ",", "Version", ")", ":", "op", "=", "\"==\"", "else", ":", "op", "=", "\"===\"", "self", ".", "req", "=", "Requirement", "(", "\"\"", ".", "join", "(", "[", "self", ".", "metadata", "[", "\"Name\"", "]", ",", "op", ",", "self", ".", "metadata", "[", "\"Version\"", "]", ",", "]", ")", ")" ]
[ 360, 4 ]
[ 379, 9 ]
python
en
['da', 'en', 'en']
True
InstallRequirement.check_if_exists
(self, use_user_site: bool)
Find an installed distribution that satisfies or conflicts with this requirement, and set self.satisfied_by or self.should_reinstall appropriately.
Find an installed distribution that satisfies or conflicts with this requirement, and set self.satisfied_by or self.should_reinstall appropriately.
def check_if_exists(self, use_user_site: bool) -> None: """Find an installed distribution that satisfies or conflicts with this requirement, and set self.satisfied_by or self.should_reinstall appropriately. """ if self.req is None: return existing_dist = get_distribution(self.req.name) if not existing_dist: return # pkg_resouces may contain a different copy of packaging.version from # pip in if the downstream distributor does a poor job debundling pip. # We avoid existing_dist.parsed_version and let SpecifierSet.contains # parses the version instead. existing_version = existing_dist.version version_compatible = ( existing_version is not None and self.req.specifier.contains(existing_version, prereleases=True) ) if not version_compatible: self.satisfied_by = None if use_user_site: if dist_in_usersite(existing_dist): self.should_reinstall = True elif (running_under_virtualenv() and dist_in_site_packages(existing_dist)): raise InstallationError( "Will not install to the user site because it will " "lack sys.path precedence to {} in {}".format( existing_dist.project_name, existing_dist.location) ) else: self.should_reinstall = True else: if self.editable: self.should_reinstall = True # when installing editables, nothing pre-existing should ever # satisfy self.satisfied_by = None else: self.satisfied_by = existing_dist
[ "def", "check_if_exists", "(", "self", ",", "use_user_site", ":", "bool", ")", "->", "None", ":", "if", "self", ".", "req", "is", "None", ":", "return", "existing_dist", "=", "get_distribution", "(", "self", ".", "req", ".", "name", ")", "if", "not", "existing_dist", ":", "return", "# pkg_resouces may contain a different copy of packaging.version from", "# pip in if the downstream distributor does a poor job debundling pip.", "# We avoid existing_dist.parsed_version and let SpecifierSet.contains", "# parses the version instead.", "existing_version", "=", "existing_dist", ".", "version", "version_compatible", "=", "(", "existing_version", "is", "not", "None", "and", "self", ".", "req", ".", "specifier", ".", "contains", "(", "existing_version", ",", "prereleases", "=", "True", ")", ")", "if", "not", "version_compatible", ":", "self", ".", "satisfied_by", "=", "None", "if", "use_user_site", ":", "if", "dist_in_usersite", "(", "existing_dist", ")", ":", "self", ".", "should_reinstall", "=", "True", "elif", "(", "running_under_virtualenv", "(", ")", "and", "dist_in_site_packages", "(", "existing_dist", ")", ")", ":", "raise", "InstallationError", "(", "\"Will not install to the user site because it will \"", "\"lack sys.path precedence to {} in {}\"", ".", "format", "(", "existing_dist", ".", "project_name", ",", "existing_dist", ".", "location", ")", ")", "else", ":", "self", ".", "should_reinstall", "=", "True", "else", ":", "if", "self", ".", "editable", ":", "self", ".", "should_reinstall", "=", "True", "# when installing editables, nothing pre-existing should ever", "# satisfy", "self", ".", "satisfied_by", "=", "None", "else", ":", "self", ".", "satisfied_by", "=", "existing_dist" ]
[ 396, 4 ]
[ 437, 49 ]
python
en
['en', 'en', 'en']
True
InstallRequirement.load_pyproject_toml
(self)
Load the pyproject.toml file. After calling this routine, all of the attributes related to PEP 517 processing for this requirement have been set. In particular, the use_pep517 attribute can be used to determine whether we should follow the PEP 517 or legacy (setup.py) code path.
Load the pyproject.toml file.
def load_pyproject_toml(self) -> None: """Load the pyproject.toml file. After calling this routine, all of the attributes related to PEP 517 processing for this requirement have been set. In particular, the use_pep517 attribute can be used to determine whether we should follow the PEP 517 or legacy (setup.py) code path. """ pyproject_toml_data = load_pyproject_toml( self.use_pep517, self.pyproject_toml_path, self.setup_py_path, str(self) ) if pyproject_toml_data is None: self.use_pep517 = False return self.use_pep517 = True requires, backend, check, backend_path = pyproject_toml_data self.requirements_to_check = check self.pyproject_requires = requires self.pep517_backend = Pep517HookCaller( self.unpacked_source_directory, backend, backend_path=backend_path, )
[ "def", "load_pyproject_toml", "(", "self", ")", "->", "None", ":", "pyproject_toml_data", "=", "load_pyproject_toml", "(", "self", ".", "use_pep517", ",", "self", ".", "pyproject_toml_path", ",", "self", ".", "setup_py_path", ",", "str", "(", "self", ")", ")", "if", "pyproject_toml_data", "is", "None", ":", "self", ".", "use_pep517", "=", "False", "return", "self", ".", "use_pep517", "=", "True", "requires", ",", "backend", ",", "check", ",", "backend_path", "=", "pyproject_toml_data", "self", ".", "requirements_to_check", "=", "check", "self", ".", "pyproject_requires", "=", "requires", "self", ".", "pep517_backend", "=", "Pep517HookCaller", "(", "self", ".", "unpacked_source_directory", ",", "backend", ",", "backend_path", "=", "backend_path", ",", ")" ]
[ 465, 4 ]
[ 490, 9 ]
python
en
['en', 'en', 'en']
True
InstallRequirement._generate_metadata
(self)
Invokes metadata generator functions, with the required arguments.
Invokes metadata generator functions, with the required arguments.
def _generate_metadata(self) -> str: """Invokes metadata generator functions, with the required arguments. """ if not self.use_pep517: assert self.unpacked_source_directory if not os.path.exists(self.setup_py_path): raise InstallationError( f'File "setup.py" not found for legacy project {self}.' ) return generate_metadata_legacy( build_env=self.build_env, setup_py_path=self.setup_py_path, source_dir=self.unpacked_source_directory, isolated=self.isolated, details=self.name or f"from {self.link}" ) assert self.pep517_backend is not None return generate_metadata( build_env=self.build_env, backend=self.pep517_backend, )
[ "def", "_generate_metadata", "(", "self", ")", "->", "str", ":", "if", "not", "self", ".", "use_pep517", ":", "assert", "self", ".", "unpacked_source_directory", "if", "not", "os", ".", "path", ".", "exists", "(", "self", ".", "setup_py_path", ")", ":", "raise", "InstallationError", "(", "f'File \"setup.py\" not found for legacy project {self}.'", ")", "return", "generate_metadata_legacy", "(", "build_env", "=", "self", ".", "build_env", ",", "setup_py_path", "=", "self", ".", "setup_py_path", ",", "source_dir", "=", "self", ".", "unpacked_source_directory", ",", "isolated", "=", "self", ".", "isolated", ",", "details", "=", "self", ".", "name", "or", "f\"from {self.link}\"", ")", "assert", "self", ".", "pep517_backend", "is", "not", "None", "return", "generate_metadata", "(", "build_env", "=", "self", ".", "build_env", ",", "backend", "=", "self", ".", "pep517_backend", ",", ")" ]
[ 492, 4 ]
[ 516, 9 ]
python
en
['en', 'en', 'en']
True
InstallRequirement.prepare_metadata
(self)
Ensure that project metadata is available. Under PEP 517, call the backend hook to prepare the metadata. Under legacy processing, call setup.py egg-info.
Ensure that project metadata is available.
def prepare_metadata(self) -> None: """Ensure that project metadata is available. Under PEP 517, call the backend hook to prepare the metadata. Under legacy processing, call setup.py egg-info. """ assert self.source_dir with indent_log(): self.metadata_directory = self._generate_metadata() # Act on the newly generated metadata, based on the name and version. if not self.name: self._set_requirement() else: self.warn_on_mismatching_name() self.assert_source_matches_version()
[ "def", "prepare_metadata", "(", "self", ")", "->", "None", ":", "assert", "self", ".", "source_dir", "with", "indent_log", "(", ")", ":", "self", ".", "metadata_directory", "=", "self", ".", "_generate_metadata", "(", ")", "# Act on the newly generated metadata, based on the name and version.", "if", "not", "self", ".", "name", ":", "self", ".", "_set_requirement", "(", ")", "else", ":", "self", ".", "warn_on_mismatching_name", "(", ")", "self", ".", "assert_source_matches_version", "(", ")" ]
[ 518, 4 ]
[ 535, 44 ]
python
en
['en', 'en', 'en']
True
InstallRequirement.ensure_has_source_dir
( self, parent_dir: str, autodelete: bool = False, parallel_builds: bool = False, )
Ensure that a source_dir is set. This will create a temporary build dir if the name of the requirement isn't known yet. :param parent_dir: The ideal pip parent_dir for the source_dir. Generally src_dir for editables and build_dir for sdists. :return: self.source_dir
Ensure that a source_dir is set.
def ensure_has_source_dir( self, parent_dir: str, autodelete: bool = False, parallel_builds: bool = False, ) -> None: """Ensure that a source_dir is set. This will create a temporary build dir if the name of the requirement isn't known yet. :param parent_dir: The ideal pip parent_dir for the source_dir. Generally src_dir for editables and build_dir for sdists. :return: self.source_dir """ if self.source_dir is None: self.source_dir = self.ensure_build_location( parent_dir, autodelete=autodelete, parallel_builds=parallel_builds, )
[ "def", "ensure_has_source_dir", "(", "self", ",", "parent_dir", ":", "str", ",", "autodelete", ":", "bool", "=", "False", ",", "parallel_builds", ":", "bool", "=", "False", ",", ")", "->", "None", ":", "if", "self", ".", "source_dir", "is", "None", ":", "self", ".", "source_dir", "=", "self", ".", "ensure_build_location", "(", "parent_dir", ",", "autodelete", "=", "autodelete", ",", "parallel_builds", "=", "parallel_builds", ",", ")" ]
[ 565, 4 ]
[ 585, 13 ]
python
en
['en', 'fr', 'en']
True
InstallRequirement.uninstall
( self, auto_confirm: bool = False, verbose: bool = False )
Uninstall the distribution currently satisfying this requirement. Prompts before removing or modifying files unless ``auto_confirm`` is True. Refuses to delete or modify files outside of ``sys.prefix`` - thus uninstallation within a virtual environment can only modify that virtual environment, even if the virtualenv is linked to global site-packages.
Uninstall the distribution currently satisfying this requirement.
def uninstall( self, auto_confirm: bool = False, verbose: bool = False ) -> Optional[UninstallPathSet]: """ Uninstall the distribution currently satisfying this requirement. Prompts before removing or modifying files unless ``auto_confirm`` is True. Refuses to delete or modify files outside of ``sys.prefix`` - thus uninstallation within a virtual environment can only modify that virtual environment, even if the virtualenv is linked to global site-packages. """ assert self.req dist = get_distribution(self.req.name) if not dist: logger.warning("Skipping %s as it is not installed.", self.name) return None logger.info('Found existing installation: %s', dist) uninstalled_pathset = UninstallPathSet.from_dist(dist) uninstalled_pathset.remove(auto_confirm, verbose) return uninstalled_pathset
[ "def", "uninstall", "(", "self", ",", "auto_confirm", ":", "bool", "=", "False", ",", "verbose", ":", "bool", "=", "False", ")", "->", "Optional", "[", "UninstallPathSet", "]", ":", "assert", "self", ".", "req", "dist", "=", "get_distribution", "(", "self", ".", "req", ".", "name", ")", "if", "not", "dist", ":", "logger", ".", "warning", "(", "\"Skipping %s as it is not installed.\"", ",", "self", ".", "name", ")", "return", "None", "logger", ".", "info", "(", "'Found existing installation: %s'", ",", "dist", ")", "uninstalled_pathset", "=", "UninstallPathSet", ".", "from_dist", "(", "dist", ")", "uninstalled_pathset", ".", "remove", "(", "auto_confirm", ",", "verbose", ")", "return", "uninstalled_pathset" ]
[ 609, 4 ]
[ 633, 34 ]
python
en
['en', 'error', 'th']
False
InstallRequirement.archive
(self, build_dir: Optional[str])
Saves archive to provided build_dir. Used for saving downloaded VCS requirements as part of `pip download`.
Saves archive to provided build_dir.
def archive(self, build_dir: Optional[str]) -> None: """Saves archive to provided build_dir. Used for saving downloaded VCS requirements as part of `pip download`. """ assert self.source_dir if build_dir is None: return create_archive = True archive_name = '{}-{}.zip'.format(self.name, self.metadata["version"]) archive_path = os.path.join(build_dir, archive_name) if os.path.exists(archive_path): response = ask_path_exists( 'The file {} exists. (i)gnore, (w)ipe, ' '(b)ackup, (a)bort '.format( display_path(archive_path)), ('i', 'w', 'b', 'a')) if response == 'i': create_archive = False elif response == 'w': logger.warning('Deleting %s', display_path(archive_path)) os.remove(archive_path) elif response == 'b': dest_file = backup_dir(archive_path) logger.warning( 'Backing up %s to %s', display_path(archive_path), display_path(dest_file), ) shutil.move(archive_path, dest_file) elif response == 'a': sys.exit(-1) if not create_archive: return zip_output = zipfile.ZipFile( archive_path, 'w', zipfile.ZIP_DEFLATED, allowZip64=True, ) with zip_output: dir = os.path.normcase( os.path.abspath(self.unpacked_source_directory) ) for dirpath, dirnames, filenames in os.walk(dir): for dirname in dirnames: dir_arcname = self._get_archive_name( dirname, parentdir=dirpath, rootdir=dir, ) zipdir = zipfile.ZipInfo(dir_arcname + '/') zipdir.external_attr = 0x1ED << 16 # 0o755 zip_output.writestr(zipdir, '') for filename in filenames: file_arcname = self._get_archive_name( filename, parentdir=dirpath, rootdir=dir, ) filename = os.path.join(dirpath, filename) zip_output.write(filename, file_arcname) logger.info('Saved %s', display_path(archive_path))
[ "def", "archive", "(", "self", ",", "build_dir", ":", "Optional", "[", "str", "]", ")", "->", "None", ":", "assert", "self", ".", "source_dir", "if", "build_dir", "is", "None", ":", "return", "create_archive", "=", "True", "archive_name", "=", "'{}-{}.zip'", ".", "format", "(", "self", ".", "name", ",", "self", ".", "metadata", "[", "\"version\"", "]", ")", "archive_path", "=", "os", ".", "path", ".", "join", "(", "build_dir", ",", "archive_name", ")", "if", "os", ".", "path", ".", "exists", "(", "archive_path", ")", ":", "response", "=", "ask_path_exists", "(", "'The file {} exists. (i)gnore, (w)ipe, '", "'(b)ackup, (a)bort '", ".", "format", "(", "display_path", "(", "archive_path", ")", ")", ",", "(", "'i'", ",", "'w'", ",", "'b'", ",", "'a'", ")", ")", "if", "response", "==", "'i'", ":", "create_archive", "=", "False", "elif", "response", "==", "'w'", ":", "logger", ".", "warning", "(", "'Deleting %s'", ",", "display_path", "(", "archive_path", ")", ")", "os", ".", "remove", "(", "archive_path", ")", "elif", "response", "==", "'b'", ":", "dest_file", "=", "backup_dir", "(", "archive_path", ")", "logger", ".", "warning", "(", "'Backing up %s to %s'", ",", "display_path", "(", "archive_path", ")", ",", "display_path", "(", "dest_file", ")", ",", ")", "shutil", ".", "move", "(", "archive_path", ",", "dest_file", ")", "elif", "response", "==", "'a'", ":", "sys", ".", "exit", "(", "-", "1", ")", "if", "not", "create_archive", ":", "return", "zip_output", "=", "zipfile", ".", "ZipFile", "(", "archive_path", ",", "'w'", ",", "zipfile", ".", "ZIP_DEFLATED", ",", "allowZip64", "=", "True", ",", ")", "with", "zip_output", ":", "dir", "=", "os", ".", "path", ".", "normcase", "(", "os", ".", "path", ".", "abspath", "(", "self", ".", "unpacked_source_directory", ")", ")", "for", "dirpath", ",", "dirnames", ",", "filenames", "in", "os", ".", "walk", "(", "dir", ")", ":", "for", "dirname", "in", "dirnames", ":", "dir_arcname", "=", "self", ".", "_get_archive_name", "(", "dirname", ",", "parentdir", "=", "dirpath", ",", "rootdir", "=", "dir", ",", ")", "zipdir", "=", "zipfile", ".", "ZipInfo", "(", "dir_arcname", "+", "'/'", ")", "zipdir", ".", "external_attr", "=", "0x1ED", "<<", "16", "# 0o755", "zip_output", ".", "writestr", "(", "zipdir", ",", "''", ")", "for", "filename", "in", "filenames", ":", "file_arcname", "=", "self", ".", "_get_archive_name", "(", "filename", ",", "parentdir", "=", "dirpath", ",", "rootdir", "=", "dir", ",", ")", "filename", "=", "os", ".", "path", ".", "join", "(", "dirpath", ",", "filename", ")", "zip_output", ".", "write", "(", "filename", ",", "file_arcname", ")", "logger", ".", "info", "(", "'Saved %s'", ",", "display_path", "(", "archive_path", ")", ")" ]
[ 649, 4 ]
[ 709, 59 ]
python
en
['en', 'en', 'en']
True
split_template_path
(template)
Split a path into segments and perform a sanity check. If it detects '..' in the path it will raise a `TemplateNotFound` error.
Split a path into segments and perform a sanity check. If it detects '..' in the path it will raise a `TemplateNotFound` error.
def split_template_path(template): """Split a path into segments and perform a sanity check. If it detects '..' in the path it will raise a `TemplateNotFound` error. """ pieces = [] for piece in template.split('/'): if path.sep in piece \ or (path.altsep and path.altsep in piece) or \ piece == path.pardir: raise TemplateNotFound(template) elif piece and piece != '.': pieces.append(piece) return pieces
[ "def", "split_template_path", "(", "template", ")", ":", "pieces", "=", "[", "]", "for", "piece", "in", "template", ".", "split", "(", "'/'", ")", ":", "if", "path", ".", "sep", "in", "piece", "or", "(", "path", ".", "altsep", "and", "path", ".", "altsep", "in", "piece", ")", "or", "piece", "==", "path", ".", "pardir", ":", "raise", "TemplateNotFound", "(", "template", ")", "elif", "piece", "and", "piece", "!=", "'.'", ":", "pieces", ".", "append", "(", "piece", ")", "return", "pieces" ]
[ 21, 0 ]
[ 33, 17 ]
python
en
['en', 'en', 'en']
True
BaseLoader.get_source
(self, environment, template)
Get the template source, filename and reload helper for a template. It's passed the environment and template name and has to return a tuple in the form ``(source, filename, uptodate)`` or raise a `TemplateNotFound` error if it can't locate the template. The source part of the returned tuple must be the source of the template as unicode string or a ASCII bytestring. The filename should be the name of the file on the filesystem if it was loaded from there, otherwise `None`. The filename is used by python for the tracebacks if no loader extension is used. The last item in the tuple is the `uptodate` function. If auto reloading is enabled it's always called to check if the template changed. No arguments are passed so the function must store the old state somewhere (for example in a closure). If it returns `False` the template will be reloaded.
Get the template source, filename and reload helper for a template. It's passed the environment and template name and has to return a tuple in the form ``(source, filename, uptodate)`` or raise a `TemplateNotFound` error if it can't locate the template.
def get_source(self, environment, template): """Get the template source, filename and reload helper for a template. It's passed the environment and template name and has to return a tuple in the form ``(source, filename, uptodate)`` or raise a `TemplateNotFound` error if it can't locate the template. The source part of the returned tuple must be the source of the template as unicode string or a ASCII bytestring. The filename should be the name of the file on the filesystem if it was loaded from there, otherwise `None`. The filename is used by python for the tracebacks if no loader extension is used. The last item in the tuple is the `uptodate` function. If auto reloading is enabled it's always called to check if the template changed. No arguments are passed so the function must store the old state somewhere (for example in a closure). If it returns `False` the template will be reloaded. """ if not self.has_source_access: raise RuntimeError('%s cannot provide access to the source' % self.__class__.__name__) raise TemplateNotFound(template)
[ "def", "get_source", "(", "self", ",", "environment", ",", "template", ")", ":", "if", "not", "self", ".", "has_source_access", ":", "raise", "RuntimeError", "(", "'%s cannot provide access to the source'", "%", "self", ".", "__class__", ".", "__name__", ")", "raise", "TemplateNotFound", "(", "template", ")" ]
[ 69, 4 ]
[ 90, 40 ]
python
en
['en', 'en', 'en']
True
BaseLoader.list_templates
(self)
Iterates over all templates. If the loader does not support that it should raise a :exc:`TypeError` which is the default behavior.
Iterates over all templates. If the loader does not support that it should raise a :exc:`TypeError` which is the default behavior.
def list_templates(self): """Iterates over all templates. If the loader does not support that it should raise a :exc:`TypeError` which is the default behavior. """ raise TypeError('this loader cannot iterate over all templates')
[ "def", "list_templates", "(", "self", ")", ":", "raise", "TypeError", "(", "'this loader cannot iterate over all templates'", ")" ]
[ 92, 4 ]
[ 96, 72 ]
python
en
['en', 'en', 'en']
True
BaseLoader.load
(self, environment, name, globals=None)
Loads a template. This method looks up the template in the cache or loads one by calling :meth:`get_source`. Subclasses should not override this method as loaders working on collections of other loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`) will not call this method but `get_source` directly.
Loads a template. This method looks up the template in the cache or loads one by calling :meth:`get_source`. Subclasses should not override this method as loaders working on collections of other loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`) will not call this method but `get_source` directly.
def load(self, environment, name, globals=None): """Loads a template. This method looks up the template in the cache or loads one by calling :meth:`get_source`. Subclasses should not override this method as loaders working on collections of other loaders (such as :class:`PrefixLoader` or :class:`ChoiceLoader`) will not call this method but `get_source` directly. """ code = None if globals is None: globals = {} # first we try to get the source for this template together # with the filename and the uptodate function. source, filename, uptodate = self.get_source(environment, name) # try to load the code from the bytecode cache if there is a # bytecode cache configured. bcc = environment.bytecode_cache if bcc is not None: bucket = bcc.get_bucket(environment, name, filename, source) code = bucket.code # if we don't have code so far (not cached, no longer up to # date) etc. we compile the template if code is None: code = environment.compile(source, name, filename) # if the bytecode cache is available and the bucket doesn't # have a code so far, we give the bucket the new code and put # it back to the bytecode cache. if bcc is not None and bucket.code is None: bucket.code = code bcc.set_bucket(bucket) return environment.template_class.from_code(environment, code, globals, uptodate)
[ "def", "load", "(", "self", ",", "environment", ",", "name", ",", "globals", "=", "None", ")", ":", "code", "=", "None", "if", "globals", "is", "None", ":", "globals", "=", "{", "}", "# first we try to get the source for this template together", "# with the filename and the uptodate function.", "source", ",", "filename", ",", "uptodate", "=", "self", ".", "get_source", "(", "environment", ",", "name", ")", "# try to load the code from the bytecode cache if there is a", "# bytecode cache configured.", "bcc", "=", "environment", ".", "bytecode_cache", "if", "bcc", "is", "not", "None", ":", "bucket", "=", "bcc", ".", "get_bucket", "(", "environment", ",", "name", ",", "filename", ",", "source", ")", "code", "=", "bucket", ".", "code", "# if we don't have code so far (not cached, no longer up to", "# date) etc. we compile the template", "if", "code", "is", "None", ":", "code", "=", "environment", ".", "compile", "(", "source", ",", "name", ",", "filename", ")", "# if the bytecode cache is available and the bucket doesn't", "# have a code so far, we give the bucket the new code and put", "# it back to the bytecode cache.", "if", "bcc", "is", "not", "None", "and", "bucket", ".", "code", "is", "None", ":", "bucket", ".", "code", "=", "code", "bcc", ".", "set_bucket", "(", "bucket", ")", "return", "environment", ".", "template_class", ".", "from_code", "(", "environment", ",", "code", ",", "globals", ",", "uptodate", ")" ]
[ 99, 4 ]
[ 134, 70 ]
python
en
['en', 'en', 'en']
True
BaseDatabaseFeatures.supports_explaining_query_execution
(self)
Does this backend support explaining query execution?
Does this backend support explaining query execution?
def supports_explaining_query_execution(self): """Does this backend support explaining query execution?""" return self.connection.ops.explain_prefix is not None
[ "def", "supports_explaining_query_execution", "(", "self", ")", ":", "return", "self", ".", "connection", ".", "ops", ".", "explain_prefix", "is", "not", "None" ]
[ 340, 4 ]
[ 342, 61 ]
python
en
['en', 'en', 'en']
True
BaseDatabaseFeatures.supports_transactions
(self)
Confirm support for transactions.
Confirm support for transactions.
def supports_transactions(self): """Confirm support for transactions.""" with self.connection.cursor() as cursor: cursor.execute('CREATE TABLE ROLLBACK_TEST (X INT)') self.connection.set_autocommit(False) cursor.execute('INSERT INTO ROLLBACK_TEST (X) VALUES (8)') self.connection.rollback() self.connection.set_autocommit(True) cursor.execute('SELECT COUNT(X) FROM ROLLBACK_TEST') count, = cursor.fetchone() cursor.execute('DROP TABLE ROLLBACK_TEST') return count == 0
[ "def", "supports_transactions", "(", "self", ")", ":", "with", "self", ".", "connection", ".", "cursor", "(", ")", "as", "cursor", ":", "cursor", ".", "execute", "(", "'CREATE TABLE ROLLBACK_TEST (X INT)'", ")", "self", ".", "connection", ".", "set_autocommit", "(", "False", ")", "cursor", ".", "execute", "(", "'INSERT INTO ROLLBACK_TEST (X) VALUES (8)'", ")", "self", ".", "connection", ".", "rollback", "(", ")", "self", ".", "connection", ".", "set_autocommit", "(", "True", ")", "cursor", ".", "execute", "(", "'SELECT COUNT(X) FROM ROLLBACK_TEST'", ")", "count", ",", "=", "cursor", ".", "fetchone", "(", ")", "cursor", ".", "execute", "(", "'DROP TABLE ROLLBACK_TEST'", ")", "return", "count", "==", "0" ]
[ 345, 4 ]
[ 356, 25 ]
python
en
['en', 'en', 'en']
True
HTTPConnection.host
(self)
Getter method to remove any trailing dots that indicate the hostname is an FQDN. In general, SSL certificates don't include the trailing dot indicating a fully-qualified domain name, and thus, they don't validate properly when checked against a domain name that includes the dot. In addition, some servers may not expect to receive the trailing dot when provided. However, the hostname with trailing dot is critical to DNS resolution; doing a lookup with the trailing dot will properly only resolve the appropriate FQDN, whereas a lookup without a trailing dot will search the system's search domain list. Thus, it's important to keep the original host around for use only in those cases where it's appropriate (i.e., when doing DNS lookup to establish the actual TCP connection across which we're going to send HTTP requests).
Getter method to remove any trailing dots that indicate the hostname is an FQDN.
def host(self): """ Getter method to remove any trailing dots that indicate the hostname is an FQDN. In general, SSL certificates don't include the trailing dot indicating a fully-qualified domain name, and thus, they don't validate properly when checked against a domain name that includes the dot. In addition, some servers may not expect to receive the trailing dot when provided. However, the hostname with trailing dot is critical to DNS resolution; doing a lookup with the trailing dot will properly only resolve the appropriate FQDN, whereas a lookup without a trailing dot will search the system's search domain list. Thus, it's important to keep the original host around for use only in those cases where it's appropriate (i.e., when doing DNS lookup to establish the actual TCP connection across which we're going to send HTTP requests). """ return self._dns_host.rstrip(".")
[ "def", "host", "(", "self", ")", ":", "return", "self", ".", "_dns_host", ".", "rstrip", "(", "\".\"", ")" ]
[ 127, 4 ]
[ 143, 41 ]
python
en
['en', 'error', 'th']
False
HTTPConnection.host
(self, value)
Setter for the `host` property. We assume that only urllib3 uses the _dns_host attribute; httplib itself only uses `host`, and it seems reasonable that other libraries follow suit.
Setter for the `host` property.
def host(self, value): """ Setter for the `host` property. We assume that only urllib3 uses the _dns_host attribute; httplib itself only uses `host`, and it seems reasonable that other libraries follow suit. """ self._dns_host = value
[ "def", "host", "(", "self", ",", "value", ")", ":", "self", ".", "_dns_host", "=", "value" ]
[ 146, 4 ]
[ 153, 30 ]
python
en
['en', 'error', 'th']
False
HTTPConnection._new_conn
(self)
Establish a socket connection and set nodelay settings on it. :return: New socket connection.
Establish a socket connection and set nodelay settings on it.
def _new_conn(self): """Establish a socket connection and set nodelay settings on it. :return: New socket connection. """ extra_kw = {} if self.source_address: extra_kw["source_address"] = self.source_address if self.socket_options: extra_kw["socket_options"] = self.socket_options try: conn = connection.create_connection( (self._dns_host, self.port), self.timeout, **extra_kw ) except SocketTimeout: raise ConnectTimeoutError( self, "Connection to %s timed out. (connect timeout=%s)" % (self.host, self.timeout), ) except SocketError as e: raise NewConnectionError( self, "Failed to establish a new connection: %s" % e ) return conn
[ "def", "_new_conn", "(", "self", ")", ":", "extra_kw", "=", "{", "}", "if", "self", ".", "source_address", ":", "extra_kw", "[", "\"source_address\"", "]", "=", "self", ".", "source_address", "if", "self", ".", "socket_options", ":", "extra_kw", "[", "\"socket_options\"", "]", "=", "self", ".", "socket_options", "try", ":", "conn", "=", "connection", ".", "create_connection", "(", "(", "self", ".", "_dns_host", ",", "self", ".", "port", ")", ",", "self", ".", "timeout", ",", "*", "*", "extra_kw", ")", "except", "SocketTimeout", ":", "raise", "ConnectTimeoutError", "(", "self", ",", "\"Connection to %s timed out. (connect timeout=%s)\"", "%", "(", "self", ".", "host", ",", "self", ".", "timeout", ")", ",", ")", "except", "SocketError", "as", "e", ":", "raise", "NewConnectionError", "(", "self", ",", "\"Failed to establish a new connection: %s\"", "%", "e", ")", "return", "conn" ]
[ 155, 4 ]
[ 184, 19 ]
python
en
['en', 'st', 'en']
True
HTTPConnection.request_chunked
(self, method, url, body=None, headers=None)
Alternative to the common request method, which sends the body with chunked encoding and not as one block
Alternative to the common request method, which sends the body with chunked encoding and not as one block
def request_chunked(self, method, url, body=None, headers=None): """ Alternative to the common request method, which sends the body with chunked encoding and not as one block """ headers = headers or {} header_keys = set([six.ensure_str(k.lower()) for k in headers]) skip_accept_encoding = "accept-encoding" in header_keys skip_host = "host" in header_keys self.putrequest( method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host ) if "user-agent" not in header_keys: self.putheader("User-Agent", _get_default_user_agent()) for header, value in headers.items(): self.putheader(header, value) if "transfer-encoding" not in header_keys: self.putheader("Transfer-Encoding", "chunked") self.endheaders() if body is not None: stringish_types = six.string_types + (bytes,) if isinstance(body, stringish_types): body = (body,) for chunk in body: if not chunk: continue if not isinstance(chunk, bytes): chunk = chunk.encode("utf8") len_str = hex(len(chunk))[2:] to_send = bytearray(len_str.encode()) to_send += b"\r\n" to_send += chunk to_send += b"\r\n" self.send(to_send) # After the if clause, to always have a closed body self.send(b"0\r\n\r\n")
[ "def", "request_chunked", "(", "self", ",", "method", ",", "url", ",", "body", "=", "None", ",", "headers", "=", "None", ")", ":", "headers", "=", "headers", "or", "{", "}", "header_keys", "=", "set", "(", "[", "six", ".", "ensure_str", "(", "k", ".", "lower", "(", ")", ")", "for", "k", "in", "headers", "]", ")", "skip_accept_encoding", "=", "\"accept-encoding\"", "in", "header_keys", "skip_host", "=", "\"host\"", "in", "header_keys", "self", ".", "putrequest", "(", "method", ",", "url", ",", "skip_accept_encoding", "=", "skip_accept_encoding", ",", "skip_host", "=", "skip_host", ")", "if", "\"user-agent\"", "not", "in", "header_keys", ":", "self", ".", "putheader", "(", "\"User-Agent\"", ",", "_get_default_user_agent", "(", ")", ")", "for", "header", ",", "value", "in", "headers", ".", "items", "(", ")", ":", "self", ".", "putheader", "(", "header", ",", "value", ")", "if", "\"transfer-encoding\"", "not", "in", "header_keys", ":", "self", ".", "putheader", "(", "\"Transfer-Encoding\"", ",", "\"chunked\"", ")", "self", ".", "endheaders", "(", ")", "if", "body", "is", "not", "None", ":", "stringish_types", "=", "six", ".", "string_types", "+", "(", "bytes", ",", ")", "if", "isinstance", "(", "body", ",", "stringish_types", ")", ":", "body", "=", "(", "body", ",", ")", "for", "chunk", "in", "body", ":", "if", "not", "chunk", ":", "continue", "if", "not", "isinstance", "(", "chunk", ",", "bytes", ")", ":", "chunk", "=", "chunk", ".", "encode", "(", "\"utf8\"", ")", "len_str", "=", "hex", "(", "len", "(", "chunk", ")", ")", "[", "2", ":", "]", "to_send", "=", "bytearray", "(", "len_str", ".", "encode", "(", ")", ")", "to_send", "+=", "b\"\\r\\n\"", "to_send", "+=", "chunk", "to_send", "+=", "b\"\\r\\n\"", "self", ".", "send", "(", "to_send", ")", "# After the if clause, to always have a closed body", "self", ".", "send", "(", "b\"0\\r\\n\\r\\n\"", ")" ]
[ 235, 4 ]
[ 272, 31 ]
python
en
['en', 'error', 'th']
False
HTTPSConnection.set_cert
( self, key_file=None, cert_file=None, cert_reqs=None, key_password=None, ca_certs=None, assert_hostname=None, assert_fingerprint=None, ca_cert_dir=None, ca_cert_data=None, )
This method should only be called once, before the connection is used.
This method should only be called once, before the connection is used.
def set_cert( self, key_file=None, cert_file=None, cert_reqs=None, key_password=None, ca_certs=None, assert_hostname=None, assert_fingerprint=None, ca_cert_dir=None, ca_cert_data=None, ): """ This method should only be called once, before the connection is used. """ # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also # have an SSLContext object in which case we'll use its verify_mode. if cert_reqs is None: if self.ssl_context is not None: cert_reqs = self.ssl_context.verify_mode else: cert_reqs = resolve_cert_reqs(None) self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs self.key_password = key_password self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint self.ca_certs = ca_certs and os.path.expanduser(ca_certs) self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir) self.ca_cert_data = ca_cert_data
[ "def", "set_cert", "(", "self", ",", "key_file", "=", "None", ",", "cert_file", "=", "None", ",", "cert_reqs", "=", "None", ",", "key_password", "=", "None", ",", "ca_certs", "=", "None", ",", "assert_hostname", "=", "None", ",", "assert_fingerprint", "=", "None", ",", "ca_cert_dir", "=", "None", ",", "ca_cert_data", "=", "None", ",", ")", ":", "# If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also", "# have an SSLContext object in which case we'll use its verify_mode.", "if", "cert_reqs", "is", "None", ":", "if", "self", ".", "ssl_context", "is", "not", "None", ":", "cert_reqs", "=", "self", ".", "ssl_context", ".", "verify_mode", "else", ":", "cert_reqs", "=", "resolve_cert_reqs", "(", "None", ")", "self", ".", "key_file", "=", "key_file", "self", ".", "cert_file", "=", "cert_file", "self", ".", "cert_reqs", "=", "cert_reqs", "self", ".", "key_password", "=", "key_password", "self", ".", "assert_hostname", "=", "assert_hostname", "self", ".", "assert_fingerprint", "=", "assert_fingerprint", "self", ".", "ca_certs", "=", "ca_certs", "and", "os", ".", "path", ".", "expanduser", "(", "ca_certs", ")", "self", ".", "ca_cert_dir", "=", "ca_cert_dir", "and", "os", ".", "path", ".", "expanduser", "(", "ca_cert_dir", ")", "self", ".", "ca_cert_data", "=", "ca_cert_data" ]
[ 317, 4 ]
[ 348, 40 ]
python
en
['en', 'error', 'th']
False
HTTPSConnection._connect_tls_proxy
(self, hostname, conn)
Establish a TLS connection to the proxy using the provided SSL context.
Establish a TLS connection to the proxy using the provided SSL context.
def _connect_tls_proxy(self, hostname, conn): """ Establish a TLS connection to the proxy using the provided SSL context. """ proxy_config = self.proxy_config ssl_context = proxy_config.ssl_context if ssl_context: # If the user provided a proxy context, we assume CA and client # certificates have already been set return ssl_wrap_socket( sock=conn, server_hostname=hostname, ssl_context=ssl_context, ) ssl_context = create_proxy_ssl_context( self.ssl_version, self.cert_reqs, self.ca_certs, self.ca_cert_dir, self.ca_cert_data, ) # By default urllib3's SSLContext disables `check_hostname` and uses # a custom check. For proxies we're good with relying on the default # verification. ssl_context.check_hostname = True # If no cert was provided, use only the default options for server # certificate validation return ssl_wrap_socket( sock=conn, ca_certs=self.ca_certs, ca_cert_dir=self.ca_cert_dir, ca_cert_data=self.ca_cert_data, server_hostname=hostname, ssl_context=ssl_context, )
[ "def", "_connect_tls_proxy", "(", "self", ",", "hostname", ",", "conn", ")", ":", "proxy_config", "=", "self", ".", "proxy_config", "ssl_context", "=", "proxy_config", ".", "ssl_context", "if", "ssl_context", ":", "# If the user provided a proxy context, we assume CA and client", "# certificates have already been set", "return", "ssl_wrap_socket", "(", "sock", "=", "conn", ",", "server_hostname", "=", "hostname", ",", "ssl_context", "=", "ssl_context", ",", ")", "ssl_context", "=", "create_proxy_ssl_context", "(", "self", ".", "ssl_version", ",", "self", ".", "cert_reqs", ",", "self", ".", "ca_certs", ",", "self", ".", "ca_cert_dir", ",", "self", ".", "ca_cert_data", ",", ")", "# By default urllib3's SSLContext disables `check_hostname` and uses", "# a custom check. For proxies we're good with relying on the default", "# verification.", "ssl_context", ".", "check_hostname", "=", "True", "# If no cert was provided, use only the default options for server", "# certificate validation", "return", "ssl_wrap_socket", "(", "sock", "=", "conn", ",", "ca_certs", "=", "self", ".", "ca_certs", ",", "ca_cert_dir", "=", "self", ".", "ca_cert_dir", ",", "ca_cert_data", "=", "self", ".", "ca_cert_data", ",", "server_hostname", "=", "hostname", ",", "ssl_context", "=", "ssl_context", ",", ")" ]
[ 470, 4 ]
[ 506, 9 ]
python
en
['en', 'error', 'th']
False
request
(method, url, **kwargs)
Constructs and sends a :class:`Request <Request>`. :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary, list of tuples or bytes to send in the query string for the :class:`Request`. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers to add for the file. :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How many seconds to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) <timeouts>` tuple. :type timeout: float or tuple :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use. Defaults to ``True``. :param stream: (optional) if ``False``, the response content will be immediately downloaded. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. :return: :class:`Response <Response>` object :rtype: requests.Response Usage:: >>> import requests >>> req = requests.request('GET', 'https://httpbin.org/get') >>> req <Response [200]>
Constructs and sends a :class:`Request <Request>`.
def request(method, url, **kwargs): """Constructs and sends a :class:`Request <Request>`. :param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary, list of tuples or bytes to send in the query string for the :class:`Request`. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload. ``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')`` or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers to add for the file. :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How many seconds to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) <timeouts>` tuple. :type timeout: float or tuple :param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``. :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use. Defaults to ``True``. :param stream: (optional) if ``False``, the response content will be immediately downloaded. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. :return: :class:`Response <Response>` object :rtype: requests.Response Usage:: >>> import requests >>> req = requests.request('GET', 'https://httpbin.org/get') >>> req <Response [200]> """ # By using the 'with' statement we are sure the session is closed, thus we # avoid leaving sockets open which can trigger a ResourceWarning in some # cases, and look like a memory leak in others. with sessions.Session() as session: return session.request(method=method, url=url, **kwargs)
[ "def", "request", "(", "method", ",", "url", ",", "*", "*", "kwargs", ")", ":", "# By using the 'with' statement we are sure the session is closed, thus we", "# avoid leaving sockets open which can trigger a ResourceWarning in some", "# cases, and look like a memory leak in others.", "with", "sessions", ".", "Session", "(", ")", "as", "session", ":", "return", "session", ".", "request", "(", "method", "=", "method", ",", "url", "=", "url", ",", "*", "*", "kwargs", ")" ]
[ 15, 0 ]
[ 60, 64 ]
python
en
['en', 'en', 'en']
True
get
(url, params=None, **kwargs)
r"""Sends a GET request. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary, list of tuples or bytes to send in the query string for the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response
r"""Sends a GET request.
def get(url, params=None, **kwargs): r"""Sends a GET request. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary, list of tuples or bytes to send in the query string for the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :return: :class:`Response <Response>` object :rtype: requests.Response """ return request('get', url, params=params, **kwargs)
[ "def", "get", "(", "url", ",", "params", "=", "None", ",", "*", "*", "kwargs", ")", ":", "return", "request", "(", "'get'", ",", "url", ",", "params", "=", "params", ",", "*", "*", "kwargs", ")" ]
[ 63, 0 ]
[ 74, 55 ]
python
en
['en', 'co', 'en']
True