Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
compatible_tags | () |
Return (pyver, abi, arch) tuples compatible with this Python.
|
Return (pyver, abi, arch) tuples compatible with this Python.
| def compatible_tags():
"""
Return (pyver, abi, arch) tuples compatible with this Python.
"""
versions = [VER_SUFFIX]
major = VER_SUFFIX[0]
for minor in range(sys.version_info[1] - 1, - 1, -1):
versions.append(''.join([major, str(minor)]))
abis = []
for suffix, _, _ in imp.get_suffixes():
if suffix.startswith('.abi'):
abis.append(suffix.split('.', 2)[1])
abis.sort()
if ABI != 'none':
abis.insert(0, ABI)
abis.append('none')
result = []
arches = [ARCH]
if sys.platform == 'darwin':
m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH)
if m:
name, major, minor, arch = m.groups()
minor = int(minor)
matches = [arch]
if arch in ('i386', 'ppc'):
matches.append('fat')
if arch in ('i386', 'ppc', 'x86_64'):
matches.append('fat3')
if arch in ('ppc64', 'x86_64'):
matches.append('fat64')
if arch in ('i386', 'x86_64'):
matches.append('intel')
if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'):
matches.append('universal')
while minor >= 0:
for match in matches:
s = '%s_%s_%s_%s' % (name, major, minor, match)
if s != ARCH: # already there
arches.append(s)
minor -= 1
# Most specific - our Python version, ABI and arch
for abi in abis:
for arch in arches:
result.append((''.join((IMP_PREFIX, versions[0])), abi, arch))
# manylinux
if abi != 'none' and sys.platform.startswith('linux'):
arch = arch.replace('linux_', '')
parts = _get_glibc_version()
if len(parts) == 2:
if parts >= (2, 5):
result.append((''.join((IMP_PREFIX, versions[0])), abi,
'manylinux1_%s' % arch))
if parts >= (2, 12):
result.append((''.join((IMP_PREFIX, versions[0])), abi,
'manylinux2010_%s' % arch))
if parts >= (2, 17):
result.append((''.join((IMP_PREFIX, versions[0])), abi,
'manylinux2014_%s' % arch))
result.append((''.join((IMP_PREFIX, versions[0])), abi,
'manylinux_%s_%s_%s' % (parts[0], parts[1],
arch)))
# where no ABI / arch dependency, but IMP_PREFIX dependency
for i, version in enumerate(versions):
result.append((''.join((IMP_PREFIX, version)), 'none', 'any'))
if i == 0:
result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any'))
# no IMP_PREFIX, ABI or arch dependency
for i, version in enumerate(versions):
result.append((''.join(('py', version)), 'none', 'any'))
if i == 0:
result.append((''.join(('py', version[0])), 'none', 'any'))
return set(result) | [
"def",
"compatible_tags",
"(",
")",
":",
"versions",
"=",
"[",
"VER_SUFFIX",
"]",
"major",
"=",
"VER_SUFFIX",
"[",
"0",
"]",
"for",
"minor",
"in",
"range",
"(",
"sys",
".",
"version_info",
"[",
"1",
"]",
"-",
"1",
",",
"-",
"1",
",",
"-",
"1",
")",
":",
"versions",
".",
"append",
"(",
"''",
".",
"join",
"(",
"[",
"major",
",",
"str",
"(",
"minor",
")",
"]",
")",
")",
"abis",
"=",
"[",
"]",
"for",
"suffix",
",",
"_",
",",
"_",
"in",
"imp",
".",
"get_suffixes",
"(",
")",
":",
"if",
"suffix",
".",
"startswith",
"(",
"'.abi'",
")",
":",
"abis",
".",
"append",
"(",
"suffix",
".",
"split",
"(",
"'.'",
",",
"2",
")",
"[",
"1",
"]",
")",
"abis",
".",
"sort",
"(",
")",
"if",
"ABI",
"!=",
"'none'",
":",
"abis",
".",
"insert",
"(",
"0",
",",
"ABI",
")",
"abis",
".",
"append",
"(",
"'none'",
")",
"result",
"=",
"[",
"]",
"arches",
"=",
"[",
"ARCH",
"]",
"if",
"sys",
".",
"platform",
"==",
"'darwin'",
":",
"m",
"=",
"re",
".",
"match",
"(",
"r'(\\w+)_(\\d+)_(\\d+)_(\\w+)$'",
",",
"ARCH",
")",
"if",
"m",
":",
"name",
",",
"major",
",",
"minor",
",",
"arch",
"=",
"m",
".",
"groups",
"(",
")",
"minor",
"=",
"int",
"(",
"minor",
")",
"matches",
"=",
"[",
"arch",
"]",
"if",
"arch",
"in",
"(",
"'i386'",
",",
"'ppc'",
")",
":",
"matches",
".",
"append",
"(",
"'fat'",
")",
"if",
"arch",
"in",
"(",
"'i386'",
",",
"'ppc'",
",",
"'x86_64'",
")",
":",
"matches",
".",
"append",
"(",
"'fat3'",
")",
"if",
"arch",
"in",
"(",
"'ppc64'",
",",
"'x86_64'",
")",
":",
"matches",
".",
"append",
"(",
"'fat64'",
")",
"if",
"arch",
"in",
"(",
"'i386'",
",",
"'x86_64'",
")",
":",
"matches",
".",
"append",
"(",
"'intel'",
")",
"if",
"arch",
"in",
"(",
"'i386'",
",",
"'x86_64'",
",",
"'intel'",
",",
"'ppc'",
",",
"'ppc64'",
")",
":",
"matches",
".",
"append",
"(",
"'universal'",
")",
"while",
"minor",
">=",
"0",
":",
"for",
"match",
"in",
"matches",
":",
"s",
"=",
"'%s_%s_%s_%s'",
"%",
"(",
"name",
",",
"major",
",",
"minor",
",",
"match",
")",
"if",
"s",
"!=",
"ARCH",
":",
"# already there",
"arches",
".",
"append",
"(",
"s",
")",
"minor",
"-=",
"1",
"# Most specific - our Python version, ABI and arch",
"for",
"abi",
"in",
"abis",
":",
"for",
"arch",
"in",
"arches",
":",
"result",
".",
"append",
"(",
"(",
"''",
".",
"join",
"(",
"(",
"IMP_PREFIX",
",",
"versions",
"[",
"0",
"]",
")",
")",
",",
"abi",
",",
"arch",
")",
")",
"# manylinux",
"if",
"abi",
"!=",
"'none'",
"and",
"sys",
".",
"platform",
".",
"startswith",
"(",
"'linux'",
")",
":",
"arch",
"=",
"arch",
".",
"replace",
"(",
"'linux_'",
",",
"''",
")",
"parts",
"=",
"_get_glibc_version",
"(",
")",
"if",
"len",
"(",
"parts",
")",
"==",
"2",
":",
"if",
"parts",
">=",
"(",
"2",
",",
"5",
")",
":",
"result",
".",
"append",
"(",
"(",
"''",
".",
"join",
"(",
"(",
"IMP_PREFIX",
",",
"versions",
"[",
"0",
"]",
")",
")",
",",
"abi",
",",
"'manylinux1_%s'",
"%",
"arch",
")",
")",
"if",
"parts",
">=",
"(",
"2",
",",
"12",
")",
":",
"result",
".",
"append",
"(",
"(",
"''",
".",
"join",
"(",
"(",
"IMP_PREFIX",
",",
"versions",
"[",
"0",
"]",
")",
")",
",",
"abi",
",",
"'manylinux2010_%s'",
"%",
"arch",
")",
")",
"if",
"parts",
">=",
"(",
"2",
",",
"17",
")",
":",
"result",
".",
"append",
"(",
"(",
"''",
".",
"join",
"(",
"(",
"IMP_PREFIX",
",",
"versions",
"[",
"0",
"]",
")",
")",
",",
"abi",
",",
"'manylinux2014_%s'",
"%",
"arch",
")",
")",
"result",
".",
"append",
"(",
"(",
"''",
".",
"join",
"(",
"(",
"IMP_PREFIX",
",",
"versions",
"[",
"0",
"]",
")",
")",
",",
"abi",
",",
"'manylinux_%s_%s_%s'",
"%",
"(",
"parts",
"[",
"0",
"]",
",",
"parts",
"[",
"1",
"]",
",",
"arch",
")",
")",
")",
"# where no ABI / arch dependency, but IMP_PREFIX dependency",
"for",
"i",
",",
"version",
"in",
"enumerate",
"(",
"versions",
")",
":",
"result",
".",
"append",
"(",
"(",
"''",
".",
"join",
"(",
"(",
"IMP_PREFIX",
",",
"version",
")",
")",
",",
"'none'",
",",
"'any'",
")",
")",
"if",
"i",
"==",
"0",
":",
"result",
".",
"append",
"(",
"(",
"''",
".",
"join",
"(",
"(",
"IMP_PREFIX",
",",
"version",
"[",
"0",
"]",
")",
")",
",",
"'none'",
",",
"'any'",
")",
")",
"# no IMP_PREFIX, ABI or arch dependency",
"for",
"i",
",",
"version",
"in",
"enumerate",
"(",
"versions",
")",
":",
"result",
".",
"append",
"(",
"(",
"''",
".",
"join",
"(",
"(",
"'py'",
",",
"version",
")",
")",
",",
"'none'",
",",
"'any'",
")",
")",
"if",
"i",
"==",
"0",
":",
"result",
".",
"append",
"(",
"(",
"''",
".",
"join",
"(",
"(",
"'py'",
",",
"version",
"[",
"0",
"]",
")",
")",
",",
"'none'",
",",
"'any'",
")",
")",
"return",
"set",
"(",
"result",
")"
] | [
960,
0
] | [
1037,
22
] | python | en | ['en', 'error', 'th'] | False |
Wheel.__init__ | (self, filename=None, sign=False, verify=False) |
Initialise an instance using a (valid) filename.
|
Initialise an instance using a (valid) filename.
| def __init__(self, filename=None, sign=False, verify=False):
"""
Initialise an instance using a (valid) filename.
"""
self.sign = sign
self.should_verify = verify
self.buildver = ''
self.pyver = [PYVER]
self.abi = ['none']
self.arch = ['any']
self.dirname = os.getcwd()
if filename is None:
self.name = 'dummy'
self.version = '0.1'
self._filename = self.filename
else:
m = NAME_VERSION_RE.match(filename)
if m:
info = m.groupdict('')
self.name = info['nm']
# Reinstate the local version separator
self.version = info['vn'].replace('_', '-')
self.buildver = info['bn']
self._filename = self.filename
else:
dirname, filename = os.path.split(filename)
m = FILENAME_RE.match(filename)
if not m:
raise DistlibException('Invalid name or '
'filename: %r' % filename)
if dirname:
self.dirname = os.path.abspath(dirname)
self._filename = filename
info = m.groupdict('')
self.name = info['nm']
self.version = info['vn']
self.buildver = info['bn']
self.pyver = info['py'].split('.')
self.abi = info['bi'].split('.')
self.arch = info['ar'].split('.') | [
"def",
"__init__",
"(",
"self",
",",
"filename",
"=",
"None",
",",
"sign",
"=",
"False",
",",
"verify",
"=",
"False",
")",
":",
"self",
".",
"sign",
"=",
"sign",
"self",
".",
"should_verify",
"=",
"verify",
"self",
".",
"buildver",
"=",
"''",
"self",
".",
"pyver",
"=",
"[",
"PYVER",
"]",
"self",
".",
"abi",
"=",
"[",
"'none'",
"]",
"self",
".",
"arch",
"=",
"[",
"'any'",
"]",
"self",
".",
"dirname",
"=",
"os",
".",
"getcwd",
"(",
")",
"if",
"filename",
"is",
"None",
":",
"self",
".",
"name",
"=",
"'dummy'",
"self",
".",
"version",
"=",
"'0.1'",
"self",
".",
"_filename",
"=",
"self",
".",
"filename",
"else",
":",
"m",
"=",
"NAME_VERSION_RE",
".",
"match",
"(",
"filename",
")",
"if",
"m",
":",
"info",
"=",
"m",
".",
"groupdict",
"(",
"''",
")",
"self",
".",
"name",
"=",
"info",
"[",
"'nm'",
"]",
"# Reinstate the local version separator",
"self",
".",
"version",
"=",
"info",
"[",
"'vn'",
"]",
".",
"replace",
"(",
"'_'",
",",
"'-'",
")",
"self",
".",
"buildver",
"=",
"info",
"[",
"'bn'",
"]",
"self",
".",
"_filename",
"=",
"self",
".",
"filename",
"else",
":",
"dirname",
",",
"filename",
"=",
"os",
".",
"path",
".",
"split",
"(",
"filename",
")",
"m",
"=",
"FILENAME_RE",
".",
"match",
"(",
"filename",
")",
"if",
"not",
"m",
":",
"raise",
"DistlibException",
"(",
"'Invalid name or '",
"'filename: %r'",
"%",
"filename",
")",
"if",
"dirname",
":",
"self",
".",
"dirname",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"dirname",
")",
"self",
".",
"_filename",
"=",
"filename",
"info",
"=",
"m",
".",
"groupdict",
"(",
"''",
")",
"self",
".",
"name",
"=",
"info",
"[",
"'nm'",
"]",
"self",
".",
"version",
"=",
"info",
"[",
"'vn'",
"]",
"self",
".",
"buildver",
"=",
"info",
"[",
"'bn'",
"]",
"self",
".",
"pyver",
"=",
"info",
"[",
"'py'",
"]",
".",
"split",
"(",
"'.'",
")",
"self",
".",
"abi",
"=",
"info",
"[",
"'bi'",
"]",
".",
"split",
"(",
"'.'",
")",
"self",
".",
"arch",
"=",
"info",
"[",
"'ar'",
"]",
".",
"split",
"(",
"'.'",
")"
] | [
147,
4
] | [
186,
49
] | python | en | ['en', 'error', 'th'] | False |
Wheel.filename | (self) |
Build and return a filename from the various components.
|
Build and return a filename from the various components.
| def filename(self):
"""
Build and return a filename from the various components.
"""
if self.buildver:
buildver = '-' + self.buildver
else:
buildver = ''
pyver = '.'.join(self.pyver)
abi = '.'.join(self.abi)
arch = '.'.join(self.arch)
# replace - with _ as a local version separator
version = self.version.replace('-', '_')
return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver,
pyver, abi, arch) | [
"def",
"filename",
"(",
"self",
")",
":",
"if",
"self",
".",
"buildver",
":",
"buildver",
"=",
"'-'",
"+",
"self",
".",
"buildver",
"else",
":",
"buildver",
"=",
"''",
"pyver",
"=",
"'.'",
".",
"join",
"(",
"self",
".",
"pyver",
")",
"abi",
"=",
"'.'",
".",
"join",
"(",
"self",
".",
"abi",
")",
"arch",
"=",
"'.'",
".",
"join",
"(",
"self",
".",
"arch",
")",
"# replace - with _ as a local version separator",
"version",
"=",
"self",
".",
"version",
".",
"replace",
"(",
"'-'",
",",
"'_'",
")",
"return",
"'%s-%s%s-%s-%s-%s.whl'",
"%",
"(",
"self",
".",
"name",
",",
"version",
",",
"buildver",
",",
"pyver",
",",
"abi",
",",
"arch",
")"
] | [
189,
4
] | [
203,
58
] | python | en | ['en', 'error', 'th'] | False |
Wheel.build | (self, paths, tags=None, wheel_version=None) |
Build a wheel from files in specified paths, and use any specified tags
when determining the name of the wheel.
|
Build a wheel from files in specified paths, and use any specified tags
when determining the name of the wheel.
| def build(self, paths, tags=None, wheel_version=None):
"""
Build a wheel from files in specified paths, and use any specified tags
when determining the name of the wheel.
"""
if tags is None:
tags = {}
libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0]
if libkey == 'platlib':
is_pure = 'false'
default_pyver = [IMPVER]
default_abi = [ABI]
default_arch = [ARCH]
else:
is_pure = 'true'
default_pyver = [PYVER]
default_abi = ['none']
default_arch = ['any']
self.pyver = tags.get('pyver', default_pyver)
self.abi = tags.get('abi', default_abi)
self.arch = tags.get('arch', default_arch)
libdir = paths[libkey]
name_ver = '%s-%s' % (self.name, self.version)
data_dir = '%s.data' % name_ver
info_dir = '%s.dist-info' % name_ver
archive_paths = []
# First, stuff which is not in site-packages
for key in ('data', 'headers', 'scripts'):
if key not in paths:
continue
path = paths[key]
if os.path.isdir(path):
for root, dirs, files in os.walk(path):
for fn in files:
p = fsdecode(os.path.join(root, fn))
rp = os.path.relpath(p, path)
ap = to_posix(os.path.join(data_dir, key, rp))
archive_paths.append((ap, p))
if key == 'scripts' and not p.endswith('.exe'):
with open(p, 'rb') as f:
data = f.read()
data = self.process_shebang(data)
with open(p, 'wb') as f:
f.write(data)
# Now, stuff which is in site-packages, other than the
# distinfo stuff.
path = libdir
distinfo = None
for root, dirs, files in os.walk(path):
if root == path:
# At the top level only, save distinfo for later
# and skip it for now
for i, dn in enumerate(dirs):
dn = fsdecode(dn)
if dn.endswith('.dist-info'):
distinfo = os.path.join(root, dn)
del dirs[i]
break
assert distinfo, '.dist-info directory expected, not found'
for fn in files:
# comment out next suite to leave .pyc files in
if fsdecode(fn).endswith(('.pyc', '.pyo')):
continue
p = os.path.join(root, fn)
rp = to_posix(os.path.relpath(p, path))
archive_paths.append((rp, p))
# Now distinfo. Assumed to be flat, i.e. os.listdir is enough.
files = os.listdir(distinfo)
for fn in files:
if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'):
p = fsdecode(os.path.join(distinfo, fn))
ap = to_posix(os.path.join(info_dir, fn))
archive_paths.append((ap, p))
wheel_metadata = [
'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version),
'Generator: distlib %s' % __version__,
'Root-Is-Purelib: %s' % is_pure,
]
for pyver, abi, arch in self.tags:
wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch))
p = os.path.join(distinfo, 'WHEEL')
with open(p, 'w') as f:
f.write('\n'.join(wheel_metadata))
ap = to_posix(os.path.join(info_dir, 'WHEEL'))
archive_paths.append((ap, p))
# sort the entries by archive path. Not needed by any spec, but it
# keeps the archive listing and RECORD tidier than they would otherwise
# be. Use the number of path segments to keep directory entries together,
# and keep the dist-info stuff at the end.
def sorter(t):
ap = t[0]
n = ap.count('/')
if '.dist-info' in ap:
n += 10000
return (n, ap)
archive_paths = sorted(archive_paths, key=sorter)
# Now, at last, RECORD.
# Paths in here are archive paths - nothing else makes sense.
self.write_records((distinfo, info_dir), libdir, archive_paths)
# Now, ready to build the zip file
pathname = os.path.join(self.dirname, self.filename)
self.build_zip(pathname, archive_paths)
return pathname | [
"def",
"build",
"(",
"self",
",",
"paths",
",",
"tags",
"=",
"None",
",",
"wheel_version",
"=",
"None",
")",
":",
"if",
"tags",
"is",
"None",
":",
"tags",
"=",
"{",
"}",
"libkey",
"=",
"list",
"(",
"filter",
"(",
"lambda",
"o",
":",
"o",
"in",
"paths",
",",
"(",
"'purelib'",
",",
"'platlib'",
")",
")",
")",
"[",
"0",
"]",
"if",
"libkey",
"==",
"'platlib'",
":",
"is_pure",
"=",
"'false'",
"default_pyver",
"=",
"[",
"IMPVER",
"]",
"default_abi",
"=",
"[",
"ABI",
"]",
"default_arch",
"=",
"[",
"ARCH",
"]",
"else",
":",
"is_pure",
"=",
"'true'",
"default_pyver",
"=",
"[",
"PYVER",
"]",
"default_abi",
"=",
"[",
"'none'",
"]",
"default_arch",
"=",
"[",
"'any'",
"]",
"self",
".",
"pyver",
"=",
"tags",
".",
"get",
"(",
"'pyver'",
",",
"default_pyver",
")",
"self",
".",
"abi",
"=",
"tags",
".",
"get",
"(",
"'abi'",
",",
"default_abi",
")",
"self",
".",
"arch",
"=",
"tags",
".",
"get",
"(",
"'arch'",
",",
"default_arch",
")",
"libdir",
"=",
"paths",
"[",
"libkey",
"]",
"name_ver",
"=",
"'%s-%s'",
"%",
"(",
"self",
".",
"name",
",",
"self",
".",
"version",
")",
"data_dir",
"=",
"'%s.data'",
"%",
"name_ver",
"info_dir",
"=",
"'%s.dist-info'",
"%",
"name_ver",
"archive_paths",
"=",
"[",
"]",
"# First, stuff which is not in site-packages",
"for",
"key",
"in",
"(",
"'data'",
",",
"'headers'",
",",
"'scripts'",
")",
":",
"if",
"key",
"not",
"in",
"paths",
":",
"continue",
"path",
"=",
"paths",
"[",
"key",
"]",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
":",
"for",
"root",
",",
"dirs",
",",
"files",
"in",
"os",
".",
"walk",
"(",
"path",
")",
":",
"for",
"fn",
"in",
"files",
":",
"p",
"=",
"fsdecode",
"(",
"os",
".",
"path",
".",
"join",
"(",
"root",
",",
"fn",
")",
")",
"rp",
"=",
"os",
".",
"path",
".",
"relpath",
"(",
"p",
",",
"path",
")",
"ap",
"=",
"to_posix",
"(",
"os",
".",
"path",
".",
"join",
"(",
"data_dir",
",",
"key",
",",
"rp",
")",
")",
"archive_paths",
".",
"append",
"(",
"(",
"ap",
",",
"p",
")",
")",
"if",
"key",
"==",
"'scripts'",
"and",
"not",
"p",
".",
"endswith",
"(",
"'.exe'",
")",
":",
"with",
"open",
"(",
"p",
",",
"'rb'",
")",
"as",
"f",
":",
"data",
"=",
"f",
".",
"read",
"(",
")",
"data",
"=",
"self",
".",
"process_shebang",
"(",
"data",
")",
"with",
"open",
"(",
"p",
",",
"'wb'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"data",
")",
"# Now, stuff which is in site-packages, other than the",
"# distinfo stuff.",
"path",
"=",
"libdir",
"distinfo",
"=",
"None",
"for",
"root",
",",
"dirs",
",",
"files",
"in",
"os",
".",
"walk",
"(",
"path",
")",
":",
"if",
"root",
"==",
"path",
":",
"# At the top level only, save distinfo for later",
"# and skip it for now",
"for",
"i",
",",
"dn",
"in",
"enumerate",
"(",
"dirs",
")",
":",
"dn",
"=",
"fsdecode",
"(",
"dn",
")",
"if",
"dn",
".",
"endswith",
"(",
"'.dist-info'",
")",
":",
"distinfo",
"=",
"os",
".",
"path",
".",
"join",
"(",
"root",
",",
"dn",
")",
"del",
"dirs",
"[",
"i",
"]",
"break",
"assert",
"distinfo",
",",
"'.dist-info directory expected, not found'",
"for",
"fn",
"in",
"files",
":",
"# comment out next suite to leave .pyc files in",
"if",
"fsdecode",
"(",
"fn",
")",
".",
"endswith",
"(",
"(",
"'.pyc'",
",",
"'.pyo'",
")",
")",
":",
"continue",
"p",
"=",
"os",
".",
"path",
".",
"join",
"(",
"root",
",",
"fn",
")",
"rp",
"=",
"to_posix",
"(",
"os",
".",
"path",
".",
"relpath",
"(",
"p",
",",
"path",
")",
")",
"archive_paths",
".",
"append",
"(",
"(",
"rp",
",",
"p",
")",
")",
"# Now distinfo. Assumed to be flat, i.e. os.listdir is enough.",
"files",
"=",
"os",
".",
"listdir",
"(",
"distinfo",
")",
"for",
"fn",
"in",
"files",
":",
"if",
"fn",
"not",
"in",
"(",
"'RECORD'",
",",
"'INSTALLER'",
",",
"'SHARED'",
",",
"'WHEEL'",
")",
":",
"p",
"=",
"fsdecode",
"(",
"os",
".",
"path",
".",
"join",
"(",
"distinfo",
",",
"fn",
")",
")",
"ap",
"=",
"to_posix",
"(",
"os",
".",
"path",
".",
"join",
"(",
"info_dir",
",",
"fn",
")",
")",
"archive_paths",
".",
"append",
"(",
"(",
"ap",
",",
"p",
")",
")",
"wheel_metadata",
"=",
"[",
"'Wheel-Version: %d.%d'",
"%",
"(",
"wheel_version",
"or",
"self",
".",
"wheel_version",
")",
",",
"'Generator: distlib %s'",
"%",
"__version__",
",",
"'Root-Is-Purelib: %s'",
"%",
"is_pure",
",",
"]",
"for",
"pyver",
",",
"abi",
",",
"arch",
"in",
"self",
".",
"tags",
":",
"wheel_metadata",
".",
"append",
"(",
"'Tag: %s-%s-%s'",
"%",
"(",
"pyver",
",",
"abi",
",",
"arch",
")",
")",
"p",
"=",
"os",
".",
"path",
".",
"join",
"(",
"distinfo",
",",
"'WHEEL'",
")",
"with",
"open",
"(",
"p",
",",
"'w'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"'\\n'",
".",
"join",
"(",
"wheel_metadata",
")",
")",
"ap",
"=",
"to_posix",
"(",
"os",
".",
"path",
".",
"join",
"(",
"info_dir",
",",
"'WHEEL'",
")",
")",
"archive_paths",
".",
"append",
"(",
"(",
"ap",
",",
"p",
")",
")",
"# sort the entries by archive path. Not needed by any spec, but it",
"# keeps the archive listing and RECORD tidier than they would otherwise",
"# be. Use the number of path segments to keep directory entries together,",
"# and keep the dist-info stuff at the end.",
"def",
"sorter",
"(",
"t",
")",
":",
"ap",
"=",
"t",
"[",
"0",
"]",
"n",
"=",
"ap",
".",
"count",
"(",
"'/'",
")",
"if",
"'.dist-info'",
"in",
"ap",
":",
"n",
"+=",
"10000",
"return",
"(",
"n",
",",
"ap",
")",
"archive_paths",
"=",
"sorted",
"(",
"archive_paths",
",",
"key",
"=",
"sorter",
")",
"# Now, at last, RECORD.",
"# Paths in here are archive paths - nothing else makes sense.",
"self",
".",
"write_records",
"(",
"(",
"distinfo",
",",
"info_dir",
")",
",",
"libdir",
",",
"archive_paths",
")",
"# Now, ready to build the zip file",
"pathname",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"dirname",
",",
"self",
".",
"filename",
")",
"self",
".",
"build_zip",
"(",
"pathname",
",",
"archive_paths",
")",
"return",
"pathname"
] | [
336,
4
] | [
450,
23
] | python | en | ['en', 'error', 'th'] | False |
Wheel.skip_entry | (self, arcname) |
Determine whether an archive entry should be skipped when verifying
or installing.
|
Determine whether an archive entry should be skipped when verifying
or installing.
| def skip_entry(self, arcname):
"""
Determine whether an archive entry should be skipped when verifying
or installing.
"""
# The signature file won't be in RECORD,
# and we don't currently don't do anything with it
# We also skip directories, as they won't be in RECORD
# either. See:
#
# https://github.com/pypa/wheel/issues/294
# https://github.com/pypa/wheel/issues/287
# https://github.com/pypa/wheel/pull/289
#
return arcname.endswith(('/', '/RECORD.jws')) | [
"def",
"skip_entry",
"(",
"self",
",",
"arcname",
")",
":",
"# The signature file won't be in RECORD,",
"# and we don't currently don't do anything with it",
"# We also skip directories, as they won't be in RECORD",
"# either. See:",
"#",
"# https://github.com/pypa/wheel/issues/294",
"# https://github.com/pypa/wheel/issues/287",
"# https://github.com/pypa/wheel/pull/289",
"#",
"return",
"arcname",
".",
"endswith",
"(",
"(",
"'/'",
",",
"'/RECORD.jws'",
")",
")"
] | [
452,
4
] | [
466,
53
] | python | en | ['en', 'error', 'th'] | False |
Wheel.install | (self, paths, maker, **kwargs) |
Install a wheel to the specified paths. If kwarg ``warner`` is
specified, it should be a callable, which will be called with two
tuples indicating the wheel version of this software and the wheel
version in the file, if there is a discrepancy in the versions.
This can be used to issue any warnings to raise any exceptions.
If kwarg ``lib_only`` is True, only the purelib/platlib files are
installed, and the headers, scripts, data and dist-info metadata are
not written. If kwarg ``bytecode_hashed_invalidation`` is True, written
bytecode will try to use file-hash based invalidation (PEP-552) on
supported interpreter versions (CPython 2.7+).
The return value is a :class:`InstalledDistribution` instance unless
``options.lib_only`` is True, in which case the return value is ``None``.
|
Install a wheel to the specified paths. If kwarg ``warner`` is
specified, it should be a callable, which will be called with two
tuples indicating the wheel version of this software and the wheel
version in the file, if there is a discrepancy in the versions.
This can be used to issue any warnings to raise any exceptions.
If kwarg ``lib_only`` is True, only the purelib/platlib files are
installed, and the headers, scripts, data and dist-info metadata are
not written. If kwarg ``bytecode_hashed_invalidation`` is True, written
bytecode will try to use file-hash based invalidation (PEP-552) on
supported interpreter versions (CPython 2.7+). | def install(self, paths, maker, **kwargs):
"""
Install a wheel to the specified paths. If kwarg ``warner`` is
specified, it should be a callable, which will be called with two
tuples indicating the wheel version of this software and the wheel
version in the file, if there is a discrepancy in the versions.
This can be used to issue any warnings to raise any exceptions.
If kwarg ``lib_only`` is True, only the purelib/platlib files are
installed, and the headers, scripts, data and dist-info metadata are
not written. If kwarg ``bytecode_hashed_invalidation`` is True, written
bytecode will try to use file-hash based invalidation (PEP-552) on
supported interpreter versions (CPython 2.7+).
The return value is a :class:`InstalledDistribution` instance unless
``options.lib_only`` is True, in which case the return value is ``None``.
"""
dry_run = maker.dry_run
warner = kwargs.get('warner')
lib_only = kwargs.get('lib_only', False)
bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False)
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
data_dir = '%s.data' % name_ver
info_dir = '%s.dist-info' % name_ver
metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME)
wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
record_name = posixpath.join(info_dir, 'RECORD')
wrapper = codecs.getreader('utf-8')
with ZipFile(pathname, 'r') as zf:
with zf.open(wheel_metadata_name) as bwf:
wf = wrapper(bwf)
message = message_from_file(wf)
wv = message['Wheel-Version'].split('.', 1)
file_version = tuple([int(i) for i in wv])
if (file_version != self.wheel_version) and warner:
warner(self.wheel_version, file_version)
if message['Root-Is-Purelib'] == 'true':
libdir = paths['purelib']
else:
libdir = paths['platlib']
records = {}
with zf.open(record_name) as bf:
with CSVReader(stream=bf) as reader:
for row in reader:
p = row[0]
records[p] = row
data_pfx = posixpath.join(data_dir, '')
info_pfx = posixpath.join(info_dir, '')
script_pfx = posixpath.join(data_dir, 'scripts', '')
# make a new instance rather than a copy of maker's,
# as we mutate it
fileop = FileOperator(dry_run=dry_run)
fileop.record = True # so we can rollback if needed
bc = not sys.dont_write_bytecode # Double negatives. Lovely!
outfiles = [] # for RECORD writing
# for script copying/shebang processing
workdir = tempfile.mkdtemp()
# set target dir later
# we default add_launchers to False, as the
# Python Launcher should be used instead
maker.source_dir = workdir
maker.target_dir = None
try:
for zinfo in zf.infolist():
arcname = zinfo.filename
if isinstance(arcname, text_type):
u_arcname = arcname
else:
u_arcname = arcname.decode('utf-8')
if self.skip_entry(u_arcname):
continue
row = records[u_arcname]
if row[2] and str(zinfo.file_size) != row[2]:
raise DistlibException('size mismatch for '
'%s' % u_arcname)
if row[1]:
kind, value = row[1].split('=', 1)
with zf.open(arcname) as bf:
data = bf.read()
_, digest = self.get_hash(data, kind)
if digest != value:
raise DistlibException('digest mismatch for '
'%s' % arcname)
if lib_only and u_arcname.startswith((info_pfx, data_pfx)):
logger.debug('lib_only: skipping %s', u_arcname)
continue
is_script = (u_arcname.startswith(script_pfx)
and not u_arcname.endswith('.exe'))
if u_arcname.startswith(data_pfx):
_, where, rp = u_arcname.split('/', 2)
outfile = os.path.join(paths[where], convert_path(rp))
else:
# meant for site-packages.
if u_arcname in (wheel_metadata_name, record_name):
continue
outfile = os.path.join(libdir, convert_path(u_arcname))
if not is_script:
with zf.open(arcname) as bf:
fileop.copy_stream(bf, outfile)
# Issue #147: permission bits aren't preserved. Using
# zf.extract(zinfo, libdir) should have worked, but didn't,
# see https://www.thetopsites.net/article/53834422.shtml
# So ... manually preserve permission bits as given in zinfo
if os.name == 'posix':
# just set the normal permission bits
os.chmod(outfile, (zinfo.external_attr >> 16) & 0x1FF)
outfiles.append(outfile)
# Double check the digest of the written file
if not dry_run and row[1]:
with open(outfile, 'rb') as bf:
data = bf.read()
_, newdigest = self.get_hash(data, kind)
if newdigest != digest:
raise DistlibException('digest mismatch '
'on write for '
'%s' % outfile)
if bc and outfile.endswith('.py'):
try:
pyc = fileop.byte_compile(outfile,
hashed_invalidation=bc_hashed_invalidation)
outfiles.append(pyc)
except Exception:
# Don't give up if byte-compilation fails,
# but log it and perhaps warn the user
logger.warning('Byte-compilation failed',
exc_info=True)
else:
fn = os.path.basename(convert_path(arcname))
workname = os.path.join(workdir, fn)
with zf.open(arcname) as bf:
fileop.copy_stream(bf, workname)
dn, fn = os.path.split(outfile)
maker.target_dir = dn
filenames = maker.make(fn)
fileop.set_executable_mode(filenames)
outfiles.extend(filenames)
if lib_only:
logger.debug('lib_only: returning None')
dist = None
else:
# Generate scripts
# Try to get pydist.json so we can see if there are
# any commands to generate. If this fails (e.g. because
# of a legacy wheel), log a warning but don't give up.
commands = None
file_version = self.info['Wheel-Version']
if file_version == '1.0':
# Use legacy info
ep = posixpath.join(info_dir, 'entry_points.txt')
try:
with zf.open(ep) as bwf:
epdata = read_exports(bwf)
commands = {}
for key in ('console', 'gui'):
k = '%s_scripts' % key
if k in epdata:
commands['wrap_%s' % key] = d = {}
for v in epdata[k].values():
s = '%s:%s' % (v.prefix, v.suffix)
if v.flags:
s += ' [%s]' % ','.join(v.flags)
d[v.name] = s
except Exception:
logger.warning('Unable to read legacy script '
'metadata, so cannot generate '
'scripts')
else:
try:
with zf.open(metadata_name) as bwf:
wf = wrapper(bwf)
commands = json.load(wf).get('extensions')
if commands:
commands = commands.get('python.commands')
except Exception:
logger.warning('Unable to read JSON metadata, so '
'cannot generate scripts')
if commands:
console_scripts = commands.get('wrap_console', {})
gui_scripts = commands.get('wrap_gui', {})
if console_scripts or gui_scripts:
script_dir = paths.get('scripts', '')
if not os.path.isdir(script_dir):
raise ValueError('Valid script path not '
'specified')
maker.target_dir = script_dir
for k, v in console_scripts.items():
script = '%s = %s' % (k, v)
filenames = maker.make(script)
fileop.set_executable_mode(filenames)
if gui_scripts:
options = {'gui': True }
for k, v in gui_scripts.items():
script = '%s = %s' % (k, v)
filenames = maker.make(script, options)
fileop.set_executable_mode(filenames)
p = os.path.join(libdir, info_dir)
dist = InstalledDistribution(p)
# Write SHARED
paths = dict(paths) # don't change passed in dict
del paths['purelib']
del paths['platlib']
paths['lib'] = libdir
p = dist.write_shared_locations(paths, dry_run)
if p:
outfiles.append(p)
# Write RECORD
dist.write_installed_files(outfiles, paths['prefix'],
dry_run)
return dist
except Exception: # pragma: no cover
logger.exception('installation failed.')
fileop.rollback()
raise
finally:
shutil.rmtree(workdir) | [
"def",
"install",
"(",
"self",
",",
"paths",
",",
"maker",
",",
"*",
"*",
"kwargs",
")",
":",
"dry_run",
"=",
"maker",
".",
"dry_run",
"warner",
"=",
"kwargs",
".",
"get",
"(",
"'warner'",
")",
"lib_only",
"=",
"kwargs",
".",
"get",
"(",
"'lib_only'",
",",
"False",
")",
"bc_hashed_invalidation",
"=",
"kwargs",
".",
"get",
"(",
"'bytecode_hashed_invalidation'",
",",
"False",
")",
"pathname",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"dirname",
",",
"self",
".",
"filename",
")",
"name_ver",
"=",
"'%s-%s'",
"%",
"(",
"self",
".",
"name",
",",
"self",
".",
"version",
")",
"data_dir",
"=",
"'%s.data'",
"%",
"name_ver",
"info_dir",
"=",
"'%s.dist-info'",
"%",
"name_ver",
"metadata_name",
"=",
"posixpath",
".",
"join",
"(",
"info_dir",
",",
"LEGACY_METADATA_FILENAME",
")",
"wheel_metadata_name",
"=",
"posixpath",
".",
"join",
"(",
"info_dir",
",",
"'WHEEL'",
")",
"record_name",
"=",
"posixpath",
".",
"join",
"(",
"info_dir",
",",
"'RECORD'",
")",
"wrapper",
"=",
"codecs",
".",
"getreader",
"(",
"'utf-8'",
")",
"with",
"ZipFile",
"(",
"pathname",
",",
"'r'",
")",
"as",
"zf",
":",
"with",
"zf",
".",
"open",
"(",
"wheel_metadata_name",
")",
"as",
"bwf",
":",
"wf",
"=",
"wrapper",
"(",
"bwf",
")",
"message",
"=",
"message_from_file",
"(",
"wf",
")",
"wv",
"=",
"message",
"[",
"'Wheel-Version'",
"]",
".",
"split",
"(",
"'.'",
",",
"1",
")",
"file_version",
"=",
"tuple",
"(",
"[",
"int",
"(",
"i",
")",
"for",
"i",
"in",
"wv",
"]",
")",
"if",
"(",
"file_version",
"!=",
"self",
".",
"wheel_version",
")",
"and",
"warner",
":",
"warner",
"(",
"self",
".",
"wheel_version",
",",
"file_version",
")",
"if",
"message",
"[",
"'Root-Is-Purelib'",
"]",
"==",
"'true'",
":",
"libdir",
"=",
"paths",
"[",
"'purelib'",
"]",
"else",
":",
"libdir",
"=",
"paths",
"[",
"'platlib'",
"]",
"records",
"=",
"{",
"}",
"with",
"zf",
".",
"open",
"(",
"record_name",
")",
"as",
"bf",
":",
"with",
"CSVReader",
"(",
"stream",
"=",
"bf",
")",
"as",
"reader",
":",
"for",
"row",
"in",
"reader",
":",
"p",
"=",
"row",
"[",
"0",
"]",
"records",
"[",
"p",
"]",
"=",
"row",
"data_pfx",
"=",
"posixpath",
".",
"join",
"(",
"data_dir",
",",
"''",
")",
"info_pfx",
"=",
"posixpath",
".",
"join",
"(",
"info_dir",
",",
"''",
")",
"script_pfx",
"=",
"posixpath",
".",
"join",
"(",
"data_dir",
",",
"'scripts'",
",",
"''",
")",
"# make a new instance rather than a copy of maker's,",
"# as we mutate it",
"fileop",
"=",
"FileOperator",
"(",
"dry_run",
"=",
"dry_run",
")",
"fileop",
".",
"record",
"=",
"True",
"# so we can rollback if needed",
"bc",
"=",
"not",
"sys",
".",
"dont_write_bytecode",
"# Double negatives. Lovely!",
"outfiles",
"=",
"[",
"]",
"# for RECORD writing",
"# for script copying/shebang processing",
"workdir",
"=",
"tempfile",
".",
"mkdtemp",
"(",
")",
"# set target dir later",
"# we default add_launchers to False, as the",
"# Python Launcher should be used instead",
"maker",
".",
"source_dir",
"=",
"workdir",
"maker",
".",
"target_dir",
"=",
"None",
"try",
":",
"for",
"zinfo",
"in",
"zf",
".",
"infolist",
"(",
")",
":",
"arcname",
"=",
"zinfo",
".",
"filename",
"if",
"isinstance",
"(",
"arcname",
",",
"text_type",
")",
":",
"u_arcname",
"=",
"arcname",
"else",
":",
"u_arcname",
"=",
"arcname",
".",
"decode",
"(",
"'utf-8'",
")",
"if",
"self",
".",
"skip_entry",
"(",
"u_arcname",
")",
":",
"continue",
"row",
"=",
"records",
"[",
"u_arcname",
"]",
"if",
"row",
"[",
"2",
"]",
"and",
"str",
"(",
"zinfo",
".",
"file_size",
")",
"!=",
"row",
"[",
"2",
"]",
":",
"raise",
"DistlibException",
"(",
"'size mismatch for '",
"'%s'",
"%",
"u_arcname",
")",
"if",
"row",
"[",
"1",
"]",
":",
"kind",
",",
"value",
"=",
"row",
"[",
"1",
"]",
".",
"split",
"(",
"'='",
",",
"1",
")",
"with",
"zf",
".",
"open",
"(",
"arcname",
")",
"as",
"bf",
":",
"data",
"=",
"bf",
".",
"read",
"(",
")",
"_",
",",
"digest",
"=",
"self",
".",
"get_hash",
"(",
"data",
",",
"kind",
")",
"if",
"digest",
"!=",
"value",
":",
"raise",
"DistlibException",
"(",
"'digest mismatch for '",
"'%s'",
"%",
"arcname",
")",
"if",
"lib_only",
"and",
"u_arcname",
".",
"startswith",
"(",
"(",
"info_pfx",
",",
"data_pfx",
")",
")",
":",
"logger",
".",
"debug",
"(",
"'lib_only: skipping %s'",
",",
"u_arcname",
")",
"continue",
"is_script",
"=",
"(",
"u_arcname",
".",
"startswith",
"(",
"script_pfx",
")",
"and",
"not",
"u_arcname",
".",
"endswith",
"(",
"'.exe'",
")",
")",
"if",
"u_arcname",
".",
"startswith",
"(",
"data_pfx",
")",
":",
"_",
",",
"where",
",",
"rp",
"=",
"u_arcname",
".",
"split",
"(",
"'/'",
",",
"2",
")",
"outfile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"paths",
"[",
"where",
"]",
",",
"convert_path",
"(",
"rp",
")",
")",
"else",
":",
"# meant for site-packages.",
"if",
"u_arcname",
"in",
"(",
"wheel_metadata_name",
",",
"record_name",
")",
":",
"continue",
"outfile",
"=",
"os",
".",
"path",
".",
"join",
"(",
"libdir",
",",
"convert_path",
"(",
"u_arcname",
")",
")",
"if",
"not",
"is_script",
":",
"with",
"zf",
".",
"open",
"(",
"arcname",
")",
"as",
"bf",
":",
"fileop",
".",
"copy_stream",
"(",
"bf",
",",
"outfile",
")",
"# Issue #147: permission bits aren't preserved. Using",
"# zf.extract(zinfo, libdir) should have worked, but didn't,",
"# see https://www.thetopsites.net/article/53834422.shtml",
"# So ... manually preserve permission bits as given in zinfo",
"if",
"os",
".",
"name",
"==",
"'posix'",
":",
"# just set the normal permission bits",
"os",
".",
"chmod",
"(",
"outfile",
",",
"(",
"zinfo",
".",
"external_attr",
">>",
"16",
")",
"&",
"0x1FF",
")",
"outfiles",
".",
"append",
"(",
"outfile",
")",
"# Double check the digest of the written file",
"if",
"not",
"dry_run",
"and",
"row",
"[",
"1",
"]",
":",
"with",
"open",
"(",
"outfile",
",",
"'rb'",
")",
"as",
"bf",
":",
"data",
"=",
"bf",
".",
"read",
"(",
")",
"_",
",",
"newdigest",
"=",
"self",
".",
"get_hash",
"(",
"data",
",",
"kind",
")",
"if",
"newdigest",
"!=",
"digest",
":",
"raise",
"DistlibException",
"(",
"'digest mismatch '",
"'on write for '",
"'%s'",
"%",
"outfile",
")",
"if",
"bc",
"and",
"outfile",
".",
"endswith",
"(",
"'.py'",
")",
":",
"try",
":",
"pyc",
"=",
"fileop",
".",
"byte_compile",
"(",
"outfile",
",",
"hashed_invalidation",
"=",
"bc_hashed_invalidation",
")",
"outfiles",
".",
"append",
"(",
"pyc",
")",
"except",
"Exception",
":",
"# Don't give up if byte-compilation fails,",
"# but log it and perhaps warn the user",
"logger",
".",
"warning",
"(",
"'Byte-compilation failed'",
",",
"exc_info",
"=",
"True",
")",
"else",
":",
"fn",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"convert_path",
"(",
"arcname",
")",
")",
"workname",
"=",
"os",
".",
"path",
".",
"join",
"(",
"workdir",
",",
"fn",
")",
"with",
"zf",
".",
"open",
"(",
"arcname",
")",
"as",
"bf",
":",
"fileop",
".",
"copy_stream",
"(",
"bf",
",",
"workname",
")",
"dn",
",",
"fn",
"=",
"os",
".",
"path",
".",
"split",
"(",
"outfile",
")",
"maker",
".",
"target_dir",
"=",
"dn",
"filenames",
"=",
"maker",
".",
"make",
"(",
"fn",
")",
"fileop",
".",
"set_executable_mode",
"(",
"filenames",
")",
"outfiles",
".",
"extend",
"(",
"filenames",
")",
"if",
"lib_only",
":",
"logger",
".",
"debug",
"(",
"'lib_only: returning None'",
")",
"dist",
"=",
"None",
"else",
":",
"# Generate scripts",
"# Try to get pydist.json so we can see if there are",
"# any commands to generate. If this fails (e.g. because",
"# of a legacy wheel), log a warning but don't give up.",
"commands",
"=",
"None",
"file_version",
"=",
"self",
".",
"info",
"[",
"'Wheel-Version'",
"]",
"if",
"file_version",
"==",
"'1.0'",
":",
"# Use legacy info",
"ep",
"=",
"posixpath",
".",
"join",
"(",
"info_dir",
",",
"'entry_points.txt'",
")",
"try",
":",
"with",
"zf",
".",
"open",
"(",
"ep",
")",
"as",
"bwf",
":",
"epdata",
"=",
"read_exports",
"(",
"bwf",
")",
"commands",
"=",
"{",
"}",
"for",
"key",
"in",
"(",
"'console'",
",",
"'gui'",
")",
":",
"k",
"=",
"'%s_scripts'",
"%",
"key",
"if",
"k",
"in",
"epdata",
":",
"commands",
"[",
"'wrap_%s'",
"%",
"key",
"]",
"=",
"d",
"=",
"{",
"}",
"for",
"v",
"in",
"epdata",
"[",
"k",
"]",
".",
"values",
"(",
")",
":",
"s",
"=",
"'%s:%s'",
"%",
"(",
"v",
".",
"prefix",
",",
"v",
".",
"suffix",
")",
"if",
"v",
".",
"flags",
":",
"s",
"+=",
"' [%s]'",
"%",
"','",
".",
"join",
"(",
"v",
".",
"flags",
")",
"d",
"[",
"v",
".",
"name",
"]",
"=",
"s",
"except",
"Exception",
":",
"logger",
".",
"warning",
"(",
"'Unable to read legacy script '",
"'metadata, so cannot generate '",
"'scripts'",
")",
"else",
":",
"try",
":",
"with",
"zf",
".",
"open",
"(",
"metadata_name",
")",
"as",
"bwf",
":",
"wf",
"=",
"wrapper",
"(",
"bwf",
")",
"commands",
"=",
"json",
".",
"load",
"(",
"wf",
")",
".",
"get",
"(",
"'extensions'",
")",
"if",
"commands",
":",
"commands",
"=",
"commands",
".",
"get",
"(",
"'python.commands'",
")",
"except",
"Exception",
":",
"logger",
".",
"warning",
"(",
"'Unable to read JSON metadata, so '",
"'cannot generate scripts'",
")",
"if",
"commands",
":",
"console_scripts",
"=",
"commands",
".",
"get",
"(",
"'wrap_console'",
",",
"{",
"}",
")",
"gui_scripts",
"=",
"commands",
".",
"get",
"(",
"'wrap_gui'",
",",
"{",
"}",
")",
"if",
"console_scripts",
"or",
"gui_scripts",
":",
"script_dir",
"=",
"paths",
".",
"get",
"(",
"'scripts'",
",",
"''",
")",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"script_dir",
")",
":",
"raise",
"ValueError",
"(",
"'Valid script path not '",
"'specified'",
")",
"maker",
".",
"target_dir",
"=",
"script_dir",
"for",
"k",
",",
"v",
"in",
"console_scripts",
".",
"items",
"(",
")",
":",
"script",
"=",
"'%s = %s'",
"%",
"(",
"k",
",",
"v",
")",
"filenames",
"=",
"maker",
".",
"make",
"(",
"script",
")",
"fileop",
".",
"set_executable_mode",
"(",
"filenames",
")",
"if",
"gui_scripts",
":",
"options",
"=",
"{",
"'gui'",
":",
"True",
"}",
"for",
"k",
",",
"v",
"in",
"gui_scripts",
".",
"items",
"(",
")",
":",
"script",
"=",
"'%s = %s'",
"%",
"(",
"k",
",",
"v",
")",
"filenames",
"=",
"maker",
".",
"make",
"(",
"script",
",",
"options",
")",
"fileop",
".",
"set_executable_mode",
"(",
"filenames",
")",
"p",
"=",
"os",
".",
"path",
".",
"join",
"(",
"libdir",
",",
"info_dir",
")",
"dist",
"=",
"InstalledDistribution",
"(",
"p",
")",
"# Write SHARED",
"paths",
"=",
"dict",
"(",
"paths",
")",
"# don't change passed in dict",
"del",
"paths",
"[",
"'purelib'",
"]",
"del",
"paths",
"[",
"'platlib'",
"]",
"paths",
"[",
"'lib'",
"]",
"=",
"libdir",
"p",
"=",
"dist",
".",
"write_shared_locations",
"(",
"paths",
",",
"dry_run",
")",
"if",
"p",
":",
"outfiles",
".",
"append",
"(",
"p",
")",
"# Write RECORD",
"dist",
".",
"write_installed_files",
"(",
"outfiles",
",",
"paths",
"[",
"'prefix'",
"]",
",",
"dry_run",
")",
"return",
"dist",
"except",
"Exception",
":",
"# pragma: no cover",
"logger",
".",
"exception",
"(",
"'installation failed.'",
")",
"fileop",
".",
"rollback",
"(",
")",
"raise",
"finally",
":",
"shutil",
".",
"rmtree",
"(",
"workdir",
")"
] | [
468,
4
] | [
703,
38
] | python | en | ['en', 'error', 'th'] | False |
Wheel.is_compatible | (self) |
Determine if a wheel is compatible with the running system.
|
Determine if a wheel is compatible with the running system.
| def is_compatible(self):
"""
Determine if a wheel is compatible with the running system.
"""
return is_compatible(self) | [
"def",
"is_compatible",
"(",
"self",
")",
":",
"return",
"is_compatible",
"(",
"self",
")"
] | [
748,
4
] | [
752,
34
] | python | en | ['en', 'error', 'th'] | False |
Wheel.is_mountable | (self) |
Determine if a wheel is asserted as mountable by its metadata.
|
Determine if a wheel is asserted as mountable by its metadata.
| def is_mountable(self):
"""
Determine if a wheel is asserted as mountable by its metadata.
"""
return True | [
"def",
"is_mountable",
"(",
"self",
")",
":",
"return",
"True"
] | [
754,
4
] | [
758,
19
] | python | en | ['en', 'error', 'th'] | False |
Wheel.update | (self, modifier, dest_dir=None, **kwargs) |
Update the contents of a wheel in a generic way. The modifier should
be a callable which expects a dictionary argument: its keys are
archive-entry paths, and its values are absolute filesystem paths
where the contents the corresponding archive entries can be found. The
modifier is free to change the contents of the files pointed to, add
new entries and remove entries, before returning. This method will
extract the entire contents of the wheel to a temporary location, call
the modifier, and then use the passed (and possibly updated)
dictionary to write a new wheel. If ``dest_dir`` is specified, the new
wheel is written there -- otherwise, the original wheel is overwritten.
The modifier should return True if it updated the wheel, else False.
This method returns the same value the modifier returns.
|
Update the contents of a wheel in a generic way. The modifier should
be a callable which expects a dictionary argument: its keys are
archive-entry paths, and its values are absolute filesystem paths
where the contents the corresponding archive entries can be found. The
modifier is free to change the contents of the files pointed to, add
new entries and remove entries, before returning. This method will
extract the entire contents of the wheel to a temporary location, call
the modifier, and then use the passed (and possibly updated)
dictionary to write a new wheel. If ``dest_dir`` is specified, the new
wheel is written there -- otherwise, the original wheel is overwritten. | def update(self, modifier, dest_dir=None, **kwargs):
"""
Update the contents of a wheel in a generic way. The modifier should
be a callable which expects a dictionary argument: its keys are
archive-entry paths, and its values are absolute filesystem paths
where the contents the corresponding archive entries can be found. The
modifier is free to change the contents of the files pointed to, add
new entries and remove entries, before returning. This method will
extract the entire contents of the wheel to a temporary location, call
the modifier, and then use the passed (and possibly updated)
dictionary to write a new wheel. If ``dest_dir`` is specified, the new
wheel is written there -- otherwise, the original wheel is overwritten.
The modifier should return True if it updated the wheel, else False.
This method returns the same value the modifier returns.
"""
def get_version(path_map, info_dir):
version = path = None
key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME)
if key not in path_map:
key = '%s/PKG-INFO' % info_dir
if key in path_map:
path = path_map[key]
version = Metadata(path=path).version
return version, path
def update_version(version, path):
updated = None
try:
v = NormalizedVersion(version)
i = version.find('-')
if i < 0:
updated = '%s+1' % version
else:
parts = [int(s) for s in version[i + 1:].split('.')]
parts[-1] += 1
updated = '%s+%s' % (version[:i],
'.'.join(str(i) for i in parts))
except UnsupportedVersionError:
logger.debug('Cannot update non-compliant (PEP-440) '
'version %r', version)
if updated:
md = Metadata(path=path)
md.version = updated
legacy = path.endswith(LEGACY_METADATA_FILENAME)
md.write(path=path, legacy=legacy)
logger.debug('Version updated from %r to %r', version,
updated)
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
record_name = posixpath.join(info_dir, 'RECORD')
with tempdir() as workdir:
with ZipFile(pathname, 'r') as zf:
path_map = {}
for zinfo in zf.infolist():
arcname = zinfo.filename
if isinstance(arcname, text_type):
u_arcname = arcname
else:
u_arcname = arcname.decode('utf-8')
if u_arcname == record_name:
continue
if '..' in u_arcname:
raise DistlibException('invalid entry in '
'wheel: %r' % u_arcname)
zf.extract(zinfo, workdir)
path = os.path.join(workdir, convert_path(u_arcname))
path_map[u_arcname] = path
# Remember the version.
original_version, _ = get_version(path_map, info_dir)
# Files extracted. Call the modifier.
modified = modifier(path_map, **kwargs)
if modified:
# Something changed - need to build a new wheel.
current_version, path = get_version(path_map, info_dir)
if current_version and (current_version == original_version):
# Add or update local version to signify changes.
update_version(current_version, path)
# Decide where the new wheel goes.
if dest_dir is None:
fd, newpath = tempfile.mkstemp(suffix='.whl',
prefix='wheel-update-',
dir=workdir)
os.close(fd)
else:
if not os.path.isdir(dest_dir):
raise DistlibException('Not a directory: %r' % dest_dir)
newpath = os.path.join(dest_dir, self.filename)
archive_paths = list(path_map.items())
distinfo = os.path.join(workdir, info_dir)
info = distinfo, info_dir
self.write_records(info, workdir, archive_paths)
self.build_zip(newpath, archive_paths)
if dest_dir is None:
shutil.copyfile(newpath, pathname)
return modified | [
"def",
"update",
"(",
"self",
",",
"modifier",
",",
"dest_dir",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"def",
"get_version",
"(",
"path_map",
",",
"info_dir",
")",
":",
"version",
"=",
"path",
"=",
"None",
"key",
"=",
"'%s/%s'",
"%",
"(",
"info_dir",
",",
"LEGACY_METADATA_FILENAME",
")",
"if",
"key",
"not",
"in",
"path_map",
":",
"key",
"=",
"'%s/PKG-INFO'",
"%",
"info_dir",
"if",
"key",
"in",
"path_map",
":",
"path",
"=",
"path_map",
"[",
"key",
"]",
"version",
"=",
"Metadata",
"(",
"path",
"=",
"path",
")",
".",
"version",
"return",
"version",
",",
"path",
"def",
"update_version",
"(",
"version",
",",
"path",
")",
":",
"updated",
"=",
"None",
"try",
":",
"v",
"=",
"NormalizedVersion",
"(",
"version",
")",
"i",
"=",
"version",
".",
"find",
"(",
"'-'",
")",
"if",
"i",
"<",
"0",
":",
"updated",
"=",
"'%s+1'",
"%",
"version",
"else",
":",
"parts",
"=",
"[",
"int",
"(",
"s",
")",
"for",
"s",
"in",
"version",
"[",
"i",
"+",
"1",
":",
"]",
".",
"split",
"(",
"'.'",
")",
"]",
"parts",
"[",
"-",
"1",
"]",
"+=",
"1",
"updated",
"=",
"'%s+%s'",
"%",
"(",
"version",
"[",
":",
"i",
"]",
",",
"'.'",
".",
"join",
"(",
"str",
"(",
"i",
")",
"for",
"i",
"in",
"parts",
")",
")",
"except",
"UnsupportedVersionError",
":",
"logger",
".",
"debug",
"(",
"'Cannot update non-compliant (PEP-440) '",
"'version %r'",
",",
"version",
")",
"if",
"updated",
":",
"md",
"=",
"Metadata",
"(",
"path",
"=",
"path",
")",
"md",
".",
"version",
"=",
"updated",
"legacy",
"=",
"path",
".",
"endswith",
"(",
"LEGACY_METADATA_FILENAME",
")",
"md",
".",
"write",
"(",
"path",
"=",
"path",
",",
"legacy",
"=",
"legacy",
")",
"logger",
".",
"debug",
"(",
"'Version updated from %r to %r'",
",",
"version",
",",
"updated",
")",
"pathname",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"dirname",
",",
"self",
".",
"filename",
")",
"name_ver",
"=",
"'%s-%s'",
"%",
"(",
"self",
".",
"name",
",",
"self",
".",
"version",
")",
"info_dir",
"=",
"'%s.dist-info'",
"%",
"name_ver",
"record_name",
"=",
"posixpath",
".",
"join",
"(",
"info_dir",
",",
"'RECORD'",
")",
"with",
"tempdir",
"(",
")",
"as",
"workdir",
":",
"with",
"ZipFile",
"(",
"pathname",
",",
"'r'",
")",
"as",
"zf",
":",
"path_map",
"=",
"{",
"}",
"for",
"zinfo",
"in",
"zf",
".",
"infolist",
"(",
")",
":",
"arcname",
"=",
"zinfo",
".",
"filename",
"if",
"isinstance",
"(",
"arcname",
",",
"text_type",
")",
":",
"u_arcname",
"=",
"arcname",
"else",
":",
"u_arcname",
"=",
"arcname",
".",
"decode",
"(",
"'utf-8'",
")",
"if",
"u_arcname",
"==",
"record_name",
":",
"continue",
"if",
"'..'",
"in",
"u_arcname",
":",
"raise",
"DistlibException",
"(",
"'invalid entry in '",
"'wheel: %r'",
"%",
"u_arcname",
")",
"zf",
".",
"extract",
"(",
"zinfo",
",",
"workdir",
")",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"workdir",
",",
"convert_path",
"(",
"u_arcname",
")",
")",
"path_map",
"[",
"u_arcname",
"]",
"=",
"path",
"# Remember the version.",
"original_version",
",",
"_",
"=",
"get_version",
"(",
"path_map",
",",
"info_dir",
")",
"# Files extracted. Call the modifier.",
"modified",
"=",
"modifier",
"(",
"path_map",
",",
"*",
"*",
"kwargs",
")",
"if",
"modified",
":",
"# Something changed - need to build a new wheel.",
"current_version",
",",
"path",
"=",
"get_version",
"(",
"path_map",
",",
"info_dir",
")",
"if",
"current_version",
"and",
"(",
"current_version",
"==",
"original_version",
")",
":",
"# Add or update local version to signify changes.",
"update_version",
"(",
"current_version",
",",
"path",
")",
"# Decide where the new wheel goes.",
"if",
"dest_dir",
"is",
"None",
":",
"fd",
",",
"newpath",
"=",
"tempfile",
".",
"mkstemp",
"(",
"suffix",
"=",
"'.whl'",
",",
"prefix",
"=",
"'wheel-update-'",
",",
"dir",
"=",
"workdir",
")",
"os",
".",
"close",
"(",
"fd",
")",
"else",
":",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"dest_dir",
")",
":",
"raise",
"DistlibException",
"(",
"'Not a directory: %r'",
"%",
"dest_dir",
")",
"newpath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dest_dir",
",",
"self",
".",
"filename",
")",
"archive_paths",
"=",
"list",
"(",
"path_map",
".",
"items",
"(",
")",
")",
"distinfo",
"=",
"os",
".",
"path",
".",
"join",
"(",
"workdir",
",",
"info_dir",
")",
"info",
"=",
"distinfo",
",",
"info_dir",
"self",
".",
"write_records",
"(",
"info",
",",
"workdir",
",",
"archive_paths",
")",
"self",
".",
"build_zip",
"(",
"newpath",
",",
"archive_paths",
")",
"if",
"dest_dir",
"is",
"None",
":",
"shutil",
".",
"copyfile",
"(",
"newpath",
",",
"pathname",
")",
"return",
"modified"
] | [
849,
4
] | [
948,
23
] | python | en | ['en', 'error', 'th'] | False |
logout_then_login | (request, login_url=None) |
Log out the user if they are logged in. Then redirect to the login page.
|
Log out the user if they are logged in. Then redirect to the login page.
| def logout_then_login(request, login_url=None):
"""
Log out the user if they are logged in. Then redirect to the login page.
"""
login_url = resolve_url(login_url or settings.LOGIN_URL)
return LogoutView.as_view(next_page=login_url)(request) | [
"def",
"logout_then_login",
"(",
"request",
",",
"login_url",
"=",
"None",
")",
":",
"login_url",
"=",
"resolve_url",
"(",
"login_url",
"or",
"settings",
".",
"LOGIN_URL",
")",
"return",
"LogoutView",
".",
"as_view",
"(",
"next_page",
"=",
"login_url",
")",
"(",
"request",
")"
] | [
165,
0
] | [
170,
59
] | python | en | ['en', 'error', 'th'] | False |
redirect_to_login | (next, login_url=None, redirect_field_name=REDIRECT_FIELD_NAME) |
Redirect the user to the login page, passing the given 'next' page.
|
Redirect the user to the login page, passing the given 'next' page.
| def redirect_to_login(next, login_url=None, redirect_field_name=REDIRECT_FIELD_NAME):
"""
Redirect the user to the login page, passing the given 'next' page.
"""
resolved_url = resolve_url(login_url or settings.LOGIN_URL)
login_url_parts = list(urlparse(resolved_url))
if redirect_field_name:
querystring = QueryDict(login_url_parts[4], mutable=True)
querystring[redirect_field_name] = next
login_url_parts[4] = querystring.urlencode(safe='/')
return HttpResponseRedirect(urlunparse(login_url_parts)) | [
"def",
"redirect_to_login",
"(",
"next",
",",
"login_url",
"=",
"None",
",",
"redirect_field_name",
"=",
"REDIRECT_FIELD_NAME",
")",
":",
"resolved_url",
"=",
"resolve_url",
"(",
"login_url",
"or",
"settings",
".",
"LOGIN_URL",
")",
"login_url_parts",
"=",
"list",
"(",
"urlparse",
"(",
"resolved_url",
")",
")",
"if",
"redirect_field_name",
":",
"querystring",
"=",
"QueryDict",
"(",
"login_url_parts",
"[",
"4",
"]",
",",
"mutable",
"=",
"True",
")",
"querystring",
"[",
"redirect_field_name",
"]",
"=",
"next",
"login_url_parts",
"[",
"4",
"]",
"=",
"querystring",
".",
"urlencode",
"(",
"safe",
"=",
"'/'",
")",
"return",
"HttpResponseRedirect",
"(",
"urlunparse",
"(",
"login_url_parts",
")",
")"
] | [
173,
0
] | [
185,
60
] | python | en | ['en', 'error', 'th'] | False |
LoginView.get_redirect_url | (self) | Return the user-originating redirect URL if it's safe. | Return the user-originating redirect URL if it's safe. | def get_redirect_url(self):
"""Return the user-originating redirect URL if it's safe."""
redirect_to = self.request.POST.get(
self.redirect_field_name,
self.request.GET.get(self.redirect_field_name, '')
)
url_is_safe = url_has_allowed_host_and_scheme(
url=redirect_to,
allowed_hosts=self.get_success_url_allowed_hosts(),
require_https=self.request.is_secure(),
)
return redirect_to if url_is_safe else '' | [
"def",
"get_redirect_url",
"(",
"self",
")",
":",
"redirect_to",
"=",
"self",
".",
"request",
".",
"POST",
".",
"get",
"(",
"self",
".",
"redirect_field_name",
",",
"self",
".",
"request",
".",
"GET",
".",
"get",
"(",
"self",
".",
"redirect_field_name",
",",
"''",
")",
")",
"url_is_safe",
"=",
"url_has_allowed_host_and_scheme",
"(",
"url",
"=",
"redirect_to",
",",
"allowed_hosts",
"=",
"self",
".",
"get_success_url_allowed_hosts",
"(",
")",
",",
"require_https",
"=",
"self",
".",
"request",
".",
"is_secure",
"(",
")",
",",
")",
"return",
"redirect_to",
"if",
"url_is_safe",
"else",
"''"
] | [
68,
4
] | [
79,
49
] | python | en | ['en', 'en', 'en'] | True |
LoginView.form_valid | (self, form) | Security check complete. Log the user in. | Security check complete. Log the user in. | def form_valid(self, form):
"""Security check complete. Log the user in."""
auth_login(self.request, form.get_user())
return HttpResponseRedirect(self.get_success_url()) | [
"def",
"form_valid",
"(",
"self",
",",
"form",
")",
":",
"auth_login",
"(",
"self",
".",
"request",
",",
"form",
".",
"get_user",
"(",
")",
")",
"return",
"HttpResponseRedirect",
"(",
"self",
".",
"get_success_url",
"(",
")",
")"
] | [
89,
4
] | [
92,
59
] | python | en | ['en', 'en', 'en'] | True |
LogoutView.post | (self, request, *args, **kwargs) | Logout may be done via POST. | Logout may be done via POST. | def post(self, request, *args, **kwargs):
"""Logout may be done via POST."""
return self.get(request, *args, **kwargs) | [
"def",
"post",
"(",
"self",
",",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"get",
"(",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | [
124,
4
] | [
126,
49
] | python | en | ['en', 'fil', 'en'] | True |
default_subprocess_runner | (cmd, cwd=None, extra_environ=None) | The default method of calling the wrapper subprocess. | The default method of calling the wrapper subprocess. | def default_subprocess_runner(cmd, cwd=None, extra_environ=None):
"""The default method of calling the wrapper subprocess."""
env = os.environ.copy()
if extra_environ:
env.update(extra_environ)
check_call(cmd, cwd=cwd, env=env) | [
"def",
"default_subprocess_runner",
"(",
"cmd",
",",
"cwd",
"=",
"None",
",",
"extra_environ",
"=",
"None",
")",
":",
"env",
"=",
"os",
".",
"environ",
".",
"copy",
"(",
")",
"if",
"extra_environ",
":",
"env",
".",
"update",
"(",
"extra_environ",
")",
"check_call",
"(",
"cmd",
",",
"cwd",
"=",
"cwd",
",",
"env",
"=",
"env",
")"
] | [
59,
0
] | [
65,
37
] | python | en | ['en', 'en', 'en'] | True |
quiet_subprocess_runner | (cmd, cwd=None, extra_environ=None) | A method of calling the wrapper subprocess while suppressing output. | A method of calling the wrapper subprocess while suppressing output. | def quiet_subprocess_runner(cmd, cwd=None, extra_environ=None):
"""A method of calling the wrapper subprocess while suppressing output."""
env = os.environ.copy()
if extra_environ:
env.update(extra_environ)
check_output(cmd, cwd=cwd, env=env, stderr=STDOUT) | [
"def",
"quiet_subprocess_runner",
"(",
"cmd",
",",
"cwd",
"=",
"None",
",",
"extra_environ",
"=",
"None",
")",
":",
"env",
"=",
"os",
".",
"environ",
".",
"copy",
"(",
")",
"if",
"extra_environ",
":",
"env",
".",
"update",
"(",
"extra_environ",
")",
"check_output",
"(",
"cmd",
",",
"cwd",
"=",
"cwd",
",",
"env",
"=",
"env",
",",
"stderr",
"=",
"STDOUT",
")"
] | [
68,
0
] | [
74,
54
] | python | en | ['en', 'en', 'en'] | True |
norm_and_check | (source_tree, requested) | Normalise and check a backend path.
Ensure that the requested backend path is specified as a relative path,
and resolves to a location under the given source tree.
Return an absolute version of the requested path.
| Normalise and check a backend path. | def norm_and_check(source_tree, requested):
"""Normalise and check a backend path.
Ensure that the requested backend path is specified as a relative path,
and resolves to a location under the given source tree.
Return an absolute version of the requested path.
"""
if os.path.isabs(requested):
raise ValueError("paths must be relative")
abs_source = os.path.abspath(source_tree)
abs_requested = os.path.normpath(os.path.join(abs_source, requested))
# We have to use commonprefix for Python 2.7 compatibility. So we
# normalise case to avoid problems because commonprefix is a character
# based comparison :-(
norm_source = os.path.normcase(abs_source)
norm_requested = os.path.normcase(abs_requested)
if os.path.commonprefix([norm_source, norm_requested]) != norm_source:
raise ValueError("paths must be inside source tree")
return abs_requested | [
"def",
"norm_and_check",
"(",
"source_tree",
",",
"requested",
")",
":",
"if",
"os",
".",
"path",
".",
"isabs",
"(",
"requested",
")",
":",
"raise",
"ValueError",
"(",
"\"paths must be relative\"",
")",
"abs_source",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"source_tree",
")",
"abs_requested",
"=",
"os",
".",
"path",
".",
"normpath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"abs_source",
",",
"requested",
")",
")",
"# We have to use commonprefix for Python 2.7 compatibility. So we",
"# normalise case to avoid problems because commonprefix is a character",
"# based comparison :-(",
"norm_source",
"=",
"os",
".",
"path",
".",
"normcase",
"(",
"abs_source",
")",
"norm_requested",
"=",
"os",
".",
"path",
".",
"normcase",
"(",
"abs_requested",
")",
"if",
"os",
".",
"path",
".",
"commonprefix",
"(",
"[",
"norm_source",
",",
"norm_requested",
"]",
")",
"!=",
"norm_source",
":",
"raise",
"ValueError",
"(",
"\"paths must be inside source tree\"",
")",
"return",
"abs_requested"
] | [
77,
0
] | [
98,
24
] | python | en | ['en', 'en', 'en'] | True |
Pep517HookCaller.subprocess_runner | (self, runner) | A context manager for temporarily overriding the default subprocess
runner.
| A context manager for temporarily overriding the default subprocess
runner.
| def subprocess_runner(self, runner):
"""A context manager for temporarily overriding the default subprocess
runner.
"""
prev = self._subprocess_runner
self._subprocess_runner = runner
try:
yield
finally:
self._subprocess_runner = prev | [
"def",
"subprocess_runner",
"(",
"self",
",",
"runner",
")",
":",
"prev",
"=",
"self",
".",
"_subprocess_runner",
"self",
".",
"_subprocess_runner",
"=",
"runner",
"try",
":",
"yield",
"finally",
":",
"self",
".",
"_subprocess_runner",
"=",
"prev"
] | [
145,
4
] | [
154,
42
] | python | en | ['en', 'en', 'en'] | True |
Pep517HookCaller.get_requires_for_build_wheel | (self, config_settings=None) | Identify packages required for building a wheel
Returns a list of dependency specifications, e.g.::
["wheel >= 0.25", "setuptools"]
This does not include requirements specified in pyproject.toml.
It returns the result of calling the equivalently named hook in a
subprocess.
| Identify packages required for building a wheel | def get_requires_for_build_wheel(self, config_settings=None):
"""Identify packages required for building a wheel
Returns a list of dependency specifications, e.g.::
["wheel >= 0.25", "setuptools"]
This does not include requirements specified in pyproject.toml.
It returns the result of calling the equivalently named hook in a
subprocess.
"""
return self._call_hook('get_requires_for_build_wheel', {
'config_settings': config_settings
}) | [
"def",
"get_requires_for_build_wheel",
"(",
"self",
",",
"config_settings",
"=",
"None",
")",
":",
"return",
"self",
".",
"_call_hook",
"(",
"'get_requires_for_build_wheel'",
",",
"{",
"'config_settings'",
":",
"config_settings",
"}",
")"
] | [
156,
4
] | [
169,
10
] | python | en | ['en', 'en', 'en'] | True |
Pep517HookCaller.prepare_metadata_for_build_wheel | (
self, metadata_directory, config_settings=None,
_allow_fallback=True) | Prepare a ``*.dist-info`` folder with metadata for this project.
Returns the name of the newly created folder.
If the build backend defines a hook with this name, it will be called
in a subprocess. If not, the backend will be asked to build a wheel,
and the dist-info extracted from that (unless _allow_fallback is
False).
| Prepare a ``*.dist-info`` folder with metadata for this project. | def prepare_metadata_for_build_wheel(
self, metadata_directory, config_settings=None,
_allow_fallback=True):
"""Prepare a ``*.dist-info`` folder with metadata for this project.
Returns the name of the newly created folder.
If the build backend defines a hook with this name, it will be called
in a subprocess. If not, the backend will be asked to build a wheel,
and the dist-info extracted from that (unless _allow_fallback is
False).
"""
return self._call_hook('prepare_metadata_for_build_wheel', {
'metadata_directory': abspath(metadata_directory),
'config_settings': config_settings,
'_allow_fallback': _allow_fallback,
}) | [
"def",
"prepare_metadata_for_build_wheel",
"(",
"self",
",",
"metadata_directory",
",",
"config_settings",
"=",
"None",
",",
"_allow_fallback",
"=",
"True",
")",
":",
"return",
"self",
".",
"_call_hook",
"(",
"'prepare_metadata_for_build_wheel'",
",",
"{",
"'metadata_directory'",
":",
"abspath",
"(",
"metadata_directory",
")",
",",
"'config_settings'",
":",
"config_settings",
",",
"'_allow_fallback'",
":",
"_allow_fallback",
",",
"}",
")"
] | [
171,
4
] | [
187,
10
] | python | en | ['en', 'en', 'en'] | True |
Pep517HookCaller.build_wheel | (
self, wheel_directory, config_settings=None,
metadata_directory=None) | Build a wheel from this project.
Returns the name of the newly created file.
In general, this will call the 'build_wheel' hook in the backend.
However, if that was previously called by
'prepare_metadata_for_build_wheel', and the same metadata_directory is
used, the previously built wheel will be copied to wheel_directory.
| Build a wheel from this project. | def build_wheel(
self, wheel_directory, config_settings=None,
metadata_directory=None):
"""Build a wheel from this project.
Returns the name of the newly created file.
In general, this will call the 'build_wheel' hook in the backend.
However, if that was previously called by
'prepare_metadata_for_build_wheel', and the same metadata_directory is
used, the previously built wheel will be copied to wheel_directory.
"""
if metadata_directory is not None:
metadata_directory = abspath(metadata_directory)
return self._call_hook('build_wheel', {
'wheel_directory': abspath(wheel_directory),
'config_settings': config_settings,
'metadata_directory': metadata_directory,
}) | [
"def",
"build_wheel",
"(",
"self",
",",
"wheel_directory",
",",
"config_settings",
"=",
"None",
",",
"metadata_directory",
"=",
"None",
")",
":",
"if",
"metadata_directory",
"is",
"not",
"None",
":",
"metadata_directory",
"=",
"abspath",
"(",
"metadata_directory",
")",
"return",
"self",
".",
"_call_hook",
"(",
"'build_wheel'",
",",
"{",
"'wheel_directory'",
":",
"abspath",
"(",
"wheel_directory",
")",
",",
"'config_settings'",
":",
"config_settings",
",",
"'metadata_directory'",
":",
"metadata_directory",
",",
"}",
")"
] | [
189,
4
] | [
207,
10
] | python | en | ['en', 'en', 'en'] | True |
Pep517HookCaller.get_requires_for_build_editable | (self, config_settings=None) | Identify packages required for building an editable wheel
Returns a list of dependency specifications, e.g.::
["wheel >= 0.25", "setuptools"]
This does not include requirements specified in pyproject.toml.
It returns the result of calling the equivalently named hook in a
subprocess.
| Identify packages required for building an editable wheel | def get_requires_for_build_editable(self, config_settings=None):
"""Identify packages required for building an editable wheel
Returns a list of dependency specifications, e.g.::
["wheel >= 0.25", "setuptools"]
This does not include requirements specified in pyproject.toml.
It returns the result of calling the equivalently named hook in a
subprocess.
"""
return self._call_hook('get_requires_for_build_editable', {
'config_settings': config_settings
}) | [
"def",
"get_requires_for_build_editable",
"(",
"self",
",",
"config_settings",
"=",
"None",
")",
":",
"return",
"self",
".",
"_call_hook",
"(",
"'get_requires_for_build_editable'",
",",
"{",
"'config_settings'",
":",
"config_settings",
"}",
")"
] | [
209,
4
] | [
222,
10
] | python | en | ['en', 'en', 'en'] | True |
Pep517HookCaller.prepare_metadata_for_build_editable | (
self, metadata_directory, config_settings=None,
_allow_fallback=True) | Prepare a ``*.dist-info`` folder with metadata for this project.
Returns the name of the newly created folder.
If the build backend defines a hook with this name, it will be called
in a subprocess. If not, the backend will be asked to build an editable
wheel, and the dist-info extracted from that (unless _allow_fallback is
False).
| Prepare a ``*.dist-info`` folder with metadata for this project. | def prepare_metadata_for_build_editable(
self, metadata_directory, config_settings=None,
_allow_fallback=True):
"""Prepare a ``*.dist-info`` folder with metadata for this project.
Returns the name of the newly created folder.
If the build backend defines a hook with this name, it will be called
in a subprocess. If not, the backend will be asked to build an editable
wheel, and the dist-info extracted from that (unless _allow_fallback is
False).
"""
return self._call_hook('prepare_metadata_for_build_editable', {
'metadata_directory': abspath(metadata_directory),
'config_settings': config_settings,
'_allow_fallback': _allow_fallback,
}) | [
"def",
"prepare_metadata_for_build_editable",
"(",
"self",
",",
"metadata_directory",
",",
"config_settings",
"=",
"None",
",",
"_allow_fallback",
"=",
"True",
")",
":",
"return",
"self",
".",
"_call_hook",
"(",
"'prepare_metadata_for_build_editable'",
",",
"{",
"'metadata_directory'",
":",
"abspath",
"(",
"metadata_directory",
")",
",",
"'config_settings'",
":",
"config_settings",
",",
"'_allow_fallback'",
":",
"_allow_fallback",
",",
"}",
")"
] | [
224,
4
] | [
240,
10
] | python | en | ['en', 'en', 'en'] | True |
Pep517HookCaller.build_editable | (
self, wheel_directory, config_settings=None,
metadata_directory=None) | Build an editable wheel from this project.
Returns the name of the newly created file.
In general, this will call the 'build_editable' hook in the backend.
However, if that was previously called by
'prepare_metadata_for_build_editable', and the same metadata_directory
is used, the previously built wheel will be copied to wheel_directory.
| Build an editable wheel from this project. | def build_editable(
self, wheel_directory, config_settings=None,
metadata_directory=None):
"""Build an editable wheel from this project.
Returns the name of the newly created file.
In general, this will call the 'build_editable' hook in the backend.
However, if that was previously called by
'prepare_metadata_for_build_editable', and the same metadata_directory
is used, the previously built wheel will be copied to wheel_directory.
"""
if metadata_directory is not None:
metadata_directory = abspath(metadata_directory)
return self._call_hook('build_editable', {
'wheel_directory': abspath(wheel_directory),
'config_settings': config_settings,
'metadata_directory': metadata_directory,
}) | [
"def",
"build_editable",
"(",
"self",
",",
"wheel_directory",
",",
"config_settings",
"=",
"None",
",",
"metadata_directory",
"=",
"None",
")",
":",
"if",
"metadata_directory",
"is",
"not",
"None",
":",
"metadata_directory",
"=",
"abspath",
"(",
"metadata_directory",
")",
"return",
"self",
".",
"_call_hook",
"(",
"'build_editable'",
",",
"{",
"'wheel_directory'",
":",
"abspath",
"(",
"wheel_directory",
")",
",",
"'config_settings'",
":",
"config_settings",
",",
"'metadata_directory'",
":",
"metadata_directory",
",",
"}",
")"
] | [
242,
4
] | [
260,
10
] | python | en | ['en', 'en', 'en'] | True |
Pep517HookCaller.get_requires_for_build_sdist | (self, config_settings=None) | Identify packages required for building a wheel
Returns a list of dependency specifications, e.g.::
["setuptools >= 26"]
This does not include requirements specified in pyproject.toml.
It returns the result of calling the equivalently named hook in a
subprocess.
| Identify packages required for building a wheel | def get_requires_for_build_sdist(self, config_settings=None):
"""Identify packages required for building a wheel
Returns a list of dependency specifications, e.g.::
["setuptools >= 26"]
This does not include requirements specified in pyproject.toml.
It returns the result of calling the equivalently named hook in a
subprocess.
"""
return self._call_hook('get_requires_for_build_sdist', {
'config_settings': config_settings
}) | [
"def",
"get_requires_for_build_sdist",
"(",
"self",
",",
"config_settings",
"=",
"None",
")",
":",
"return",
"self",
".",
"_call_hook",
"(",
"'get_requires_for_build_sdist'",
",",
"{",
"'config_settings'",
":",
"config_settings",
"}",
")"
] | [
262,
4
] | [
275,
10
] | python | en | ['en', 'en', 'en'] | True |
Pep517HookCaller.build_sdist | (self, sdist_directory, config_settings=None) | Build an sdist from this project.
Returns the name of the newly created file.
This calls the 'build_sdist' backend hook in a subprocess.
| Build an sdist from this project. | def build_sdist(self, sdist_directory, config_settings=None):
"""Build an sdist from this project.
Returns the name of the newly created file.
This calls the 'build_sdist' backend hook in a subprocess.
"""
return self._call_hook('build_sdist', {
'sdist_directory': abspath(sdist_directory),
'config_settings': config_settings,
}) | [
"def",
"build_sdist",
"(",
"self",
",",
"sdist_directory",
",",
"config_settings",
"=",
"None",
")",
":",
"return",
"self",
".",
"_call_hook",
"(",
"'build_sdist'",
",",
"{",
"'sdist_directory'",
":",
"abspath",
"(",
"sdist_directory",
")",
",",
"'config_settings'",
":",
"config_settings",
",",
"}",
")"
] | [
277,
4
] | [
287,
10
] | python | en | ['en', 'en', 'en'] | True |
UpdateOwnProfile.has_object_permission | (self, request, view, obj) | Check user is trying to edit their own profile | Check user is trying to edit their own profile | def has_object_permission(self, request, view, obj):
"""Check user is trying to edit their own profile"""
if request.method in permissions.SAFE_METHODS:
return True
return obj.id == request.user.id | [
"def",
"has_object_permission",
"(",
"self",
",",
"request",
",",
"view",
",",
"obj",
")",
":",
"if",
"request",
".",
"method",
"in",
"permissions",
".",
"SAFE_METHODS",
":",
"return",
"True",
"return",
"obj",
".",
"id",
"==",
"request",
".",
"user",
".",
"id"
] | [
6,
4
] | [
11,
40
] | python | en | ['en', 'en', 'en'] | True |
UpdateOwnStatus.has_object_permission | (self, request, view, obj) | Check the user is trying to update their own status | Check the user is trying to update their own status | def has_object_permission(self, request, view, obj):
"""Check the user is trying to update their own status"""
if request.method in permissions.SAFE_METHODS:
return True
return obj.id == request.user.id | [
"def",
"has_object_permission",
"(",
"self",
",",
"request",
",",
"view",
",",
"obj",
")",
":",
"if",
"request",
".",
"method",
"in",
"permissions",
".",
"SAFE_METHODS",
":",
"return",
"True",
"return",
"obj",
".",
"id",
"==",
"request",
".",
"user",
".",
"id"
] | [
17,
4
] | [
22,
40
] | python | en | ['en', 'en', 'en'] | True |
ModificationTrackingDict.copy | (self) | Create a flat copy of the dict. | Create a flat copy of the dict. | def copy(self):
"""Create a flat copy of the dict."""
missing = object()
result = object.__new__(self.__class__)
for name in self.__slots__:
val = getattr(self, name, missing)
if val is not missing:
setattr(result, name, val)
return result | [
"def",
"copy",
"(",
"self",
")",
":",
"missing",
"=",
"object",
"(",
")",
"result",
"=",
"object",
".",
"__new__",
"(",
"self",
".",
"__class__",
")",
"for",
"name",
"in",
"self",
".",
"__slots__",
":",
"val",
"=",
"getattr",
"(",
"self",
",",
"name",
",",
"missing",
")",
"if",
"val",
"is",
"not",
"missing",
":",
"setattr",
"(",
"result",
",",
"name",
",",
"val",
")",
"return",
"result"
] | [
108,
4
] | [
116,
21
] | python | en | ['en', 'en', 'en'] | True |
Session.should_save | (self) | True if the session should be saved.
.. versionchanged:: 0.6
By default the session is now only saved if the session is
modified, not if it is new like it was before.
| True if the session should be saved. | def should_save(self):
"""True if the session should be saved.
.. versionchanged:: 0.6
By default the session is now only saved if the session is
modified, not if it is new like it was before.
"""
return self.modified | [
"def",
"should_save",
"(",
"self",
")",
":",
"return",
"self",
".",
"modified"
] | [
143,
4
] | [
150,
28
] | python | en | ['en', 'en', 'en'] | True |
SessionStore.is_valid_key | (self, key) | Check if a key has the correct format. | Check if a key has the correct format. | def is_valid_key(self, key):
"""Check if a key has the correct format."""
return _sha1_re.match(key) is not None | [
"def",
"is_valid_key",
"(",
"self",
",",
"key",
")",
":",
"return",
"_sha1_re",
".",
"match",
"(",
"key",
")",
"is",
"not",
"None"
] | [
167,
4
] | [
169,
46
] | python | en | ['en', 'en', 'en'] | True |
SessionStore.generate_key | (self, salt=None) | Simple function that generates a new session key. | Simple function that generates a new session key. | def generate_key(self, salt=None):
"""Simple function that generates a new session key."""
return generate_key(salt) | [
"def",
"generate_key",
"(",
"self",
",",
"salt",
"=",
"None",
")",
":",
"return",
"generate_key",
"(",
"salt",
")"
] | [
171,
4
] | [
173,
33
] | python | en | ['en', 'en', 'en'] | True |
SessionStore.new | (self) | Generate a new session. | Generate a new session. | def new(self):
"""Generate a new session."""
return self.session_class({}, self.generate_key(), True) | [
"def",
"new",
"(",
"self",
")",
":",
"return",
"self",
".",
"session_class",
"(",
"{",
"}",
",",
"self",
".",
"generate_key",
"(",
")",
",",
"True",
")"
] | [
175,
4
] | [
177,
64
] | python | en | ['en', 'en', 'en'] | True |
SessionStore.save | (self, session) | Save a session. | Save a session. | def save(self, session):
"""Save a session.""" | [
"def",
"save",
"(",
"self",
",",
"session",
")",
":"
] | [
179,
4
] | [
180,
29
] | python | en | ['en', 'en', 'en'] | True |
SessionStore.save_if_modified | (self, session) | Save if a session class wants an update. | Save if a session class wants an update. | def save_if_modified(self, session):
"""Save if a session class wants an update."""
if session.should_save:
self.save(session) | [
"def",
"save_if_modified",
"(",
"self",
",",
"session",
")",
":",
"if",
"session",
".",
"should_save",
":",
"self",
".",
"save",
"(",
"session",
")"
] | [
182,
4
] | [
185,
30
] | python | en | ['en', 'en', 'en'] | True |
SessionStore.delete | (self, session) | Delete a session. | Delete a session. | def delete(self, session):
"""Delete a session.""" | [
"def",
"delete",
"(",
"self",
",",
"session",
")",
":"
] | [
187,
4
] | [
188,
31
] | python | en | ['en', 'it', 'en'] | True |
SessionStore.get | (self, sid) | Get a session for this sid or a new session object. This method
has to check if the session key is valid and create a new session if
that wasn't the case.
| Get a session for this sid or a new session object. This method
has to check if the session key is valid and create a new session if
that wasn't the case.
| def get(self, sid):
"""Get a session for this sid or a new session object. This method
has to check if the session key is valid and create a new session if
that wasn't the case.
"""
return self.session_class({}, sid, True) | [
"def",
"get",
"(",
"self",
",",
"sid",
")",
":",
"return",
"self",
".",
"session_class",
"(",
"{",
"}",
",",
"sid",
",",
"True",
")"
] | [
190,
4
] | [
195,
48
] | python | en | ['en', 'en', 'en'] | True |
FilesystemSessionStore.list | (self) | Lists all sessions in the store.
.. versionadded:: 0.6
| Lists all sessions in the store. | def list(self):
"""Lists all sessions in the store.
.. versionadded:: 0.6
"""
before, after = self.filename_template.split("%s", 1)
filename_re = re.compile(
r"%s(.{5,})%s$" % (re.escape(before), re.escape(after))
)
result = []
for filename in os.listdir(self.path):
#: this is a session that is still being saved.
if filename.endswith(_fs_transaction_suffix):
continue
match = filename_re.match(filename)
if match is not None:
result.append(match.group(1))
return result | [
"def",
"list",
"(",
"self",
")",
":",
"before",
",",
"after",
"=",
"self",
".",
"filename_template",
".",
"split",
"(",
"\"%s\"",
",",
"1",
")",
"filename_re",
"=",
"re",
".",
"compile",
"(",
"r\"%s(.{5,})%s$\"",
"%",
"(",
"re",
".",
"escape",
"(",
"before",
")",
",",
"re",
".",
"escape",
"(",
"after",
")",
")",
")",
"result",
"=",
"[",
"]",
"for",
"filename",
"in",
"os",
".",
"listdir",
"(",
"self",
".",
"path",
")",
":",
"#: this is a session that is still being saved.",
"if",
"filename",
".",
"endswith",
"(",
"_fs_transaction_suffix",
")",
":",
"continue",
"match",
"=",
"filename_re",
".",
"match",
"(",
"filename",
")",
"if",
"match",
"is",
"not",
"None",
":",
"result",
".",
"append",
"(",
"match",
".",
"group",
"(",
"1",
")",
")",
"return",
"result"
] | [
293,
4
] | [
310,
21
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceTransport.__init__ | (
self,
*,
host: str = DEFAULT_HOST,
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
**kwargs,
) | Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
| Instantiate the transport. | def __init__(
self,
*,
host: str = DEFAULT_HOST,
credentials: ga_credentials.Credentials = None,
credentials_file: Optional[str] = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
**kwargs,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
host += ":443"
self._host = host
scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES}
# Save the scopes.
self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
raise core_exceptions.DuplicateCredentialArgs(
"'credentials_file' and 'credentials' are mutually exclusive"
)
if credentials_file is not None:
credentials, _ = google.auth.load_credentials_from_file(
credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = google.auth.default(
**scopes_kwargs, quota_project_id=quota_project_id
)
# If the credentials are service account credentials, then always try to use self signed JWT.
if (
always_use_jwt_access
and isinstance(credentials, service_account.Credentials)
and hasattr(service_account.Credentials, "with_always_use_jwt_access")
):
credentials = credentials.with_always_use_jwt_access(True)
# Save the credentials.
self._credentials = credentials | [
"def",
"__init__",
"(",
"self",
",",
"*",
",",
"host",
":",
"str",
"=",
"DEFAULT_HOST",
",",
"credentials",
":",
"ga_credentials",
".",
"Credentials",
"=",
"None",
",",
"credentials_file",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"scopes",
":",
"Optional",
"[",
"Sequence",
"[",
"str",
"]",
"]",
"=",
"None",
",",
"quota_project_id",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"client_info",
":",
"gapic_v1",
".",
"client_info",
".",
"ClientInfo",
"=",
"DEFAULT_CLIENT_INFO",
",",
"always_use_jwt_access",
":",
"Optional",
"[",
"bool",
"]",
"=",
"False",
",",
"*",
"*",
"kwargs",
",",
")",
"->",
"None",
":",
"# Save the hostname. Default to port 443 (HTTPS) if none is specified.",
"if",
"\":\"",
"not",
"in",
"host",
":",
"host",
"+=",
"\":443\"",
"self",
".",
"_host",
"=",
"host",
"scopes_kwargs",
"=",
"{",
"\"scopes\"",
":",
"scopes",
",",
"\"default_scopes\"",
":",
"self",
".",
"AUTH_SCOPES",
"}",
"# Save the scopes.",
"self",
".",
"_scopes",
"=",
"scopes",
"# If no credentials are provided, then determine the appropriate",
"# defaults.",
"if",
"credentials",
"and",
"credentials_file",
":",
"raise",
"core_exceptions",
".",
"DuplicateCredentialArgs",
"(",
"\"'credentials_file' and 'credentials' are mutually exclusive\"",
")",
"if",
"credentials_file",
"is",
"not",
"None",
":",
"credentials",
",",
"_",
"=",
"google",
".",
"auth",
".",
"load_credentials_from_file",
"(",
"credentials_file",
",",
"*",
"*",
"scopes_kwargs",
",",
"quota_project_id",
"=",
"quota_project_id",
")",
"elif",
"credentials",
"is",
"None",
":",
"credentials",
",",
"_",
"=",
"google",
".",
"auth",
".",
"default",
"(",
"*",
"*",
"scopes_kwargs",
",",
"quota_project_id",
"=",
"quota_project_id",
")",
"# If the credentials are service account credentials, then always try to use self signed JWT.",
"if",
"(",
"always_use_jwt_access",
"and",
"isinstance",
"(",
"credentials",
",",
"service_account",
".",
"Credentials",
")",
"and",
"hasattr",
"(",
"service_account",
".",
"Credentials",
",",
"\"with_always_use_jwt_access\"",
")",
")",
":",
"credentials",
"=",
"credentials",
".",
"with_always_use_jwt_access",
"(",
"True",
")",
"# Save the credentials.",
"self",
".",
"_credentials",
"=",
"credentials"
] | [
53,
4
] | [
125,
39
] | python | en | ['en', 'en', 'en'] | True |
DashboardsServiceTransport.close | (self) | Closes resources associated with the transport.
.. warning::
Only call this method if the transport is NOT shared
with other clients - this may cause errors in other clients!
| Closes resources associated with the transport. | def close(self):
"""Closes resources associated with the transport.
.. warning::
Only call this method if the transport is NOT shared
with other clients - this may cause errors in other clients!
"""
raise NotImplementedError() | [
"def",
"close",
"(",
"self",
")",
":",
"raise",
"NotImplementedError",
"(",
")"
] | [
147,
4
] | [
154,
35
] | python | en | ['en', 'en', 'en'] | True |
pega_salada_sobremesa_suco | (items) | Funcao auxiliar que popula os atributos salada, sobremesa e suco do cardapio da refeicao fornecida. | Funcao auxiliar que popula os atributos salada, sobremesa e suco do cardapio da refeicao fornecida. | def pega_salada_sobremesa_suco(items):
""" Funcao auxiliar que popula os atributos salada, sobremesa e suco do cardapio da refeicao fornecida."""
alimentos = ["salada", "suco", "sobremesa"]
cardapio = {}
for alim in alimentos:
tag = alim.upper() + ":" # tag para procurar o cardapio dos alimentos acima dentro do vetor items
valor = [s.replace(tag, "") for s in items if tag in s][0] # pega o valor do alimento e ja tira a tag (e.g. "SOBREMESA:")
cardapio[alim] = valor.capitalize() # lowercase eh melhor para exibir.
items = [s for s in items if tag not in s]
return cardapio, items | [
"def",
"pega_salada_sobremesa_suco",
"(",
"items",
")",
":",
"alimentos",
"=",
"[",
"\"salada\"",
",",
"\"suco\"",
",",
"\"sobremesa\"",
"]",
"cardapio",
"=",
"{",
"}",
"for",
"alim",
"in",
"alimentos",
":",
"tag",
"=",
"alim",
".",
"upper",
"(",
")",
"+",
"\":\"",
"# tag para procurar o cardapio dos alimentos acima dentro do vetor items",
"valor",
"=",
"[",
"s",
".",
"replace",
"(",
"tag",
",",
"\"\"",
")",
"for",
"s",
"in",
"items",
"if",
"tag",
"in",
"s",
"]",
"[",
"0",
"]",
"# pega o valor do alimento e ja tira a tag (e.g. \"SOBREMESA:\")",
"cardapio",
"[",
"alim",
"]",
"=",
"valor",
".",
"capitalize",
"(",
")",
"# lowercase eh melhor para exibir.",
"items",
"=",
"[",
"s",
"for",
"s",
"in",
"items",
"if",
"tag",
"not",
"in",
"s",
"]",
"return",
"cardapio",
",",
"items"
] | [
20,
0
] | [
32,
26
] | python | pt | ['pt', 'pt', 'pt'] | True |
get_refeicao | (tipo, soup) |
Faz o parsing do cardapio de uma refeicao (e.g. almoco, jantar), dado o elemento em HTML contendo as informacoes.
:param tipo: string que representa qual refeicao é (e.g. "Almoço", "Jantar")
:param soup: objeto do BeatifulSoup que contem o HTML e sera utilizado para fazer o parsing.
:return: dict com cardapio de uma refeicao.
|
Faz o parsing do cardapio de uma refeicao (e.g. almoco, jantar), dado o elemento em HTML contendo as informacoes. | def get_refeicao(tipo, soup):
"""
Faz o parsing do cardapio de uma refeicao (e.g. almoco, jantar), dado o elemento em HTML contendo as informacoes.
:param tipo: string que representa qual refeicao é (e.g. "Almoço", "Jantar")
:param soup: objeto do BeatifulSoup que contem o HTML e sera utilizado para fazer o parsing.
:return: dict com cardapio de uma refeicao.
"""
items = [s for s in soup.get_text().split("\n") if s] # retira todas as tags de HTML, e armazena todos os elementos (strings nao vazias) em uma lista.
cardapio = {}
# print(items)
observacoes = []
obs = items.pop()
# print(obs)
while("Observações:" not in obs):
observacoes.append(obs)
try:
obs = items.pop()
except:
return None
observacoes.append(obs)
observacoes.reverse()
observacao_final = reduce(lambda x, y: x+y, observacoes)
cardapio["observacoes"] = observacao_final.replace("Observações: ", "").replace("Observações: ", "").capitalize()
suco = items.pop()
cardapio["suco"] = suco.replace("SUCO:", "").capitalize()
sobremesa = items.pop()
cardapio["sobremesa"] = sobremesa.replace("SOBREMESA:", "").capitalize()
salada = items.pop()
cardapio["salada"] = salada.replace("SALADA:", "").capitalize()
pp = items.pop()
if "PTS" in pp:
cardapio["pts"] = pp.capitalize()
pp = items.pop()
else:
cardapio["pts"] = "-"
if "PRATO PRINCIPAL:" not in pp:
cardapio["guarnicao"] = pp.replace("GUARNIÇÃO: ", "").capitalize()
pp = items.pop()
else:
cardapio["guarnicao"] = "-"
last = items.pop()
if last is not None and "ARROZ" in last:
cardapio["arroz_feijao"] = last.capitalize()
else:
cardapio["arroz_feijao"] = "Arroz e feijão"
prato_principal = pp.replace("PRATO PRINCIPAL: ", "").replace("PRATO PRINCIPAL: ", "").capitalize()
cardapio["prato_principal"] = prato_principal
# pprint(cardapio)
limpa_especificos(cardapio)
return Refeicao(tipo=tipo, **cardapio) | [
"def",
"get_refeicao",
"(",
"tipo",
",",
"soup",
")",
":",
"items",
"=",
"[",
"s",
"for",
"s",
"in",
"soup",
".",
"get_text",
"(",
")",
".",
"split",
"(",
"\"\\n\"",
")",
"if",
"s",
"]",
"# retira todas as tags de HTML, e armazena todos os elementos (strings nao vazias) em uma lista.",
"cardapio",
"=",
"{",
"}",
"# print(items)",
"observacoes",
"=",
"[",
"]",
"obs",
"=",
"items",
".",
"pop",
"(",
")",
"# print(obs)",
"while",
"(",
"\"Observações:\" n",
"t i",
" o",
"s):",
"",
"",
"observacoes",
".",
"append",
"(",
"obs",
")",
"try",
":",
"obs",
"=",
"items",
".",
"pop",
"(",
")",
"except",
":",
"return",
"None",
"observacoes",
".",
"append",
"(",
"obs",
")",
"observacoes",
".",
"reverse",
"(",
")",
"observacao_final",
"=",
"reduce",
"(",
"lambda",
"x",
",",
"y",
":",
"x",
"+",
"y",
",",
"observacoes",
")",
"cardapio",
"[",
"\"observacoes\"",
"]",
"=",
"observacao_final",
".",
"replace",
"(",
"\"Observações: \", ",
"\"",
").",
"r",
"e",
"place(\"",
"O",
"bservações: \", \"\"",
")",
"ca",
"p",
"i",
"talize()",
"",
"",
"suco",
"=",
"items",
".",
"pop",
"(",
")",
"cardapio",
"[",
"\"suco\"",
"]",
"=",
"suco",
".",
"replace",
"(",
"\"SUCO:\"",
",",
"\"\"",
")",
".",
"capitalize",
"(",
")",
"sobremesa",
"=",
"items",
".",
"pop",
"(",
")",
"cardapio",
"[",
"\"sobremesa\"",
"]",
"=",
"sobremesa",
".",
"replace",
"(",
"\"SOBREMESA:\"",
",",
"\"\"",
")",
".",
"capitalize",
"(",
")",
"salada",
"=",
"items",
".",
"pop",
"(",
")",
"cardapio",
"[",
"\"salada\"",
"]",
"=",
"salada",
".",
"replace",
"(",
"\"SALADA:\"",
",",
"\"\"",
")",
".",
"capitalize",
"(",
")",
"pp",
"=",
"items",
".",
"pop",
"(",
")",
"if",
"\"PTS\"",
"in",
"pp",
":",
"cardapio",
"[",
"\"pts\"",
"]",
"=",
"pp",
".",
"capitalize",
"(",
")",
"pp",
"=",
"items",
".",
"pop",
"(",
")",
"else",
":",
"cardapio",
"[",
"\"pts\"",
"]",
"=",
"\"-\"",
"if",
"\"PRATO PRINCIPAL:\"",
"not",
"in",
"pp",
":",
"cardapio",
"[",
"\"guarnicao\"",
"]",
"=",
"pp",
".",
"replace",
"(",
"\"GUARNIÇÃO: \", ",
"\"",
").",
"c",
"a",
"pitalize()",
"",
"",
"pp",
"=",
"items",
".",
"pop",
"(",
")",
"else",
":",
"cardapio",
"[",
"\"guarnicao\"",
"]",
"=",
"\"-\"",
"last",
"=",
"items",
".",
"pop",
"(",
")",
"if",
"last",
"is",
"not",
"None",
"and",
"\"ARROZ\"",
"in",
"last",
":",
"cardapio",
"[",
"\"arroz_feijao\"",
"]",
"=",
"last",
".",
"capitalize",
"(",
")",
"else",
":",
"cardapio",
"[",
"\"arroz_feijao\"",
"]",
"=",
"\"Arroz e feijão\"",
"prato_principal",
"=",
"pp",
".",
"replace",
"(",
"\"PRATO PRINCIPAL: \"",
",",
"\"\"",
")",
".",
"replace",
"(",
"\"PRATO PRINCIPAL: \"",
",",
"\"\"",
")",
".",
"capitalize",
"(",
")",
"cardapio",
"[",
"\"prato_principal\"",
"]",
"=",
"prato_principal",
"# pprint(cardapio)",
"limpa_especificos",
"(",
"cardapio",
")",
"return",
"Refeicao",
"(",
"tipo",
"=",
"tipo",
",",
"*",
"*",
"cardapio",
")"
] | [
37,
0
] | [
120,
42
] | python | en | ['en', 'error', 'th'] | False |
cardapio_por_data | (data_string) |
Dada a string de uma data, fornece o objeto da classe Cardapio correspondente ao cardapio dessa data.
:param data_string: string da data do cardapio desejado.
:return: objeto da classe Cardapio que contem o cardapio requisitado.
|
Dada a string de uma data, fornece o objeto da classe Cardapio correspondente ao cardapio dessa data. | def cardapio_por_data(data_string):
"""
Dada a string de uma data, fornece o objeto da classe Cardapio correspondente ao cardapio dessa data.
:param data_string: string da data do cardapio desejado.
:return: objeto da classe Cardapio que contem o cardapio requisitado.
"""
res = requests.get(URL_TEMPLATE+data_string) # faz o request para a pagina da prefeitura
html_doc = res.content # pega a pagina HTML
soup = BeautifulSoup(html_doc, 'html.parser')
meals = soup.find_all(class_="fundo_cardapio") # pega todos os elementos da classe fundo_cardapio. Dos 5 encontrados, 4 sao os cardapios das 4 refeicoes.
tipos_refeicoes = list(TipoRefeicao) # lista os tipos de refeicao para iterarmos sobre eles.
chaves_para_tipos = {
'Almoço': 'almoco',
'Almoço vegetariano': 'almoco_vegetariano',
'Jantar': 'jantar',
'Jantar vegetariano': 'jantar_vegetariano'
}
refeicoes = {}
if len(meals) < 4:
return None
for i, m in enumerate(meals[1:]):
v = list(chaves_para_tipos.values())[i]
ref = get_refeicao(tipos_refeicoes[i].value, m)
if ref is None:
return None
refeicoes[v] = ref
return Cardapio(data=data_string, **refeicoes) | [
"def",
"cardapio_por_data",
"(",
"data_string",
")",
":",
"res",
"=",
"requests",
".",
"get",
"(",
"URL_TEMPLATE",
"+",
"data_string",
")",
"# faz o request para a pagina da prefeitura",
"html_doc",
"=",
"res",
".",
"content",
"# pega a pagina HTML",
"soup",
"=",
"BeautifulSoup",
"(",
"html_doc",
",",
"'html.parser'",
")",
"meals",
"=",
"soup",
".",
"find_all",
"(",
"class_",
"=",
"\"fundo_cardapio\"",
")",
"# pega todos os elementos da classe fundo_cardapio. Dos 5 encontrados, 4 sao os cardapios das 4 refeicoes.",
"tipos_refeicoes",
"=",
"list",
"(",
"TipoRefeicao",
")",
"# lista os tipos de refeicao para iterarmos sobre eles.",
"chaves_para_tipos",
"=",
"{",
"'Almoço':",
" ",
"almoco',",
"",
"'Almoço vegetariano':",
" ",
"almoco_vegetariano',",
"",
"'Jantar'",
":",
"'jantar'",
",",
"'Jantar vegetariano'",
":",
"'jantar_vegetariano'",
"}",
"refeicoes",
"=",
"{",
"}",
"if",
"len",
"(",
"meals",
")",
"<",
"4",
":",
"return",
"None",
"for",
"i",
",",
"m",
"in",
"enumerate",
"(",
"meals",
"[",
"1",
":",
"]",
")",
":",
"v",
"=",
"list",
"(",
"chaves_para_tipos",
".",
"values",
"(",
")",
")",
"[",
"i",
"]",
"ref",
"=",
"get_refeicao",
"(",
"tipos_refeicoes",
"[",
"i",
"]",
".",
"value",
",",
"m",
")",
"if",
"ref",
"is",
"None",
":",
"return",
"None",
"refeicoes",
"[",
"v",
"]",
"=",
"ref",
"return",
"Cardapio",
"(",
"data",
"=",
"data_string",
",",
"*",
"*",
"refeicoes",
")"
] | [
124,
0
] | [
161,
50
] | python | en | ['en', 'error', 'th'] | False |
cardapio_para_datas | (data_strings) |
Dada uma lista com strings das datas, essa funcao retorna os objetos Cardapio desses cardapios em uma lista.
:param data_strings: lista com strings das datas dos cardapios desejados.
:return: lista com objetos da classe Cardapio que contem os cardapios das datas fornecidas.
|
Dada uma lista com strings das datas, essa funcao retorna os objetos Cardapio desses cardapios em uma lista. | def cardapio_para_datas(data_strings):
"""
Dada uma lista com strings das datas, essa funcao retorna os objetos Cardapio desses cardapios em uma lista.
:param data_strings: lista com strings das datas dos cardapios desejados.
:return: lista com objetos da classe Cardapio que contem os cardapios das datas fornecidas.
"""
cardapios = []
for data in data_strings:
c = cardapio_por_data(data)
if c is not None and len(c.__dict__.keys()) > 2:
print("Cardapio para data: {}.".format(data))
# print(c)
cardapios.append(limpa_nao_informado(c))
print("len(cardapios) = {}".format(len(cardapios)))
return cardapios | [
"def",
"cardapio_para_datas",
"(",
"data_strings",
")",
":",
"cardapios",
"=",
"[",
"]",
"for",
"data",
"in",
"data_strings",
":",
"c",
"=",
"cardapio_por_data",
"(",
"data",
")",
"if",
"c",
"is",
"not",
"None",
"and",
"len",
"(",
"c",
".",
"__dict__",
".",
"keys",
"(",
")",
")",
">",
"2",
":",
"print",
"(",
"\"Cardapio para data: {}.\"",
".",
"format",
"(",
"data",
")",
")",
"# print(c)",
"cardapios",
".",
"append",
"(",
"limpa_nao_informado",
"(",
"c",
")",
")",
"print",
"(",
"\"len(cardapios) = {}\"",
".",
"format",
"(",
"len",
"(",
"cardapios",
")",
")",
")",
"return",
"cardapios"
] | [
168,
0
] | [
185,
20
] | python | en | ['en', 'error', 'th'] | False |
get_next_cardapios | (date_string, next) |
Fornece os cardapios dos *next* dias a partir da data fornecida em *date_string*.
Ponto de entrada principal. A view que sera chamada no app sera essa.
:param date_string: data inicial.
:param next: inteiro que representa a quantidade de cardapios desejados a partir da data inicial.
:return: lista com os objetos Cardapio contendo os cardapios das datas requisitadas.
|
Fornece os cardapios dos *next* dias a partir da data fornecida em *date_string*. | def get_next_cardapios(date_string, next):
"""
Fornece os cardapios dos *next* dias a partir da data fornecida em *date_string*.
Ponto de entrada principal. A view que sera chamada no app sera essa.
:param date_string: data inicial.
:param next: inteiro que representa a quantidade de cardapios desejados a partir da data inicial.
:return: lista com os objetos Cardapio contendo os cardapios das datas requisitadas.
"""
date_strings = date_services.next_weekdays(next, start_date_string=date_string)
# print("EXECUTOU get_next_cardapio")
return cardapio_para_datas(date_strings) | [
"def",
"get_next_cardapios",
"(",
"date_string",
",",
"next",
")",
":",
"date_strings",
"=",
"date_services",
".",
"next_weekdays",
"(",
"next",
",",
"start_date_string",
"=",
"date_string",
")",
"# print(\"EXECUTOU get_next_cardapio\")",
"return",
"cardapio_para_datas",
"(",
"date_strings",
")"
] | [
189,
0
] | [
204,
44
] | python | en | ['en', 'error', 'th'] | False |
check_error_file | (file_check) |
Function to check if file exist
:param file_check: File to check
:type file_check: str
|
Function to check if file exist | def check_error_file(file_check):
"""
Function to check if file exist
:param file_check: File to check
:type file_check: str
"""
try:
open(file_check)
except TypeError:
raise TypeError("File cannot be empty or file is invalid: " + str(file_check)) | [
"def",
"check_error_file",
"(",
"file_check",
")",
":",
"try",
":",
"open",
"(",
"file_check",
")",
"except",
"TypeError",
":",
"raise",
"TypeError",
"(",
"\"File cannot be empty or file is invalid: \"",
"+",
"str",
"(",
"file_check",
")",
")"
] | [
18,
0
] | [
30,
86
] | python | en | ['en', 'error', 'th'] | False |
check_len_lists | (list1, list2) |
Function to check if 2 have the same length
:param list1: First list
:type list1: list
:param list2: Second list
:type list2: list
|
Function to check if 2 have the same length | def check_len_lists(list1, list2):
"""
Function to check if 2 have the same length
:param list1: First list
:type list1: list
:param list2: Second list
:type list2: list
"""
if len(list1) != len(list2):
print("Error: Number of files in train list and rank list must be equal!")
sys.exit() | [
"def",
"check_len_lists",
"(",
"list1",
",",
"list2",
")",
":",
"if",
"len",
"(",
"list1",
")",
"!=",
"len",
"(",
"list2",
")",
":",
"print",
"(",
"\"Error: Number of files in train list and rank list must be equal!\"",
")",
"sys",
".",
"exit",
"(",
")"
] | [
33,
0
] | [
47,
18
] | python | en | ['en', 'error', 'th'] | False |
timed | (f) |
Function to calculate the time of execution
:param f: Function name without ()
:type f: function name
:return: Time of execution
:rtype: float
|
Function to calculate the time of execution | def timed(f):
"""
Function to calculate the time of execution
:param f: Function name without ()
:type f: function name
:return: Time of execution
:rtype: float
"""
start = time.time()
f()
elapsed = time.time() - start
return elapsed | [
"def",
"timed",
"(",
"f",
")",
":",
"start",
"=",
"time",
".",
"time",
"(",
")",
"f",
"(",
")",
"elapsed",
"=",
"time",
".",
"time",
"(",
")",
"-",
"start",
"return",
"elapsed"
] | [
50,
0
] | [
64,
18
] | python | en | ['en', 'error', 'th'] | False |
print_header | (header_info, test_info=None) |
Function to print the header with information of the files
:param header_info: Dictionary with information about dataset or train file
:type header_info: dict
:param test_info: Dictionary with information about test file
:type test_info: dict
|
Function to print the header with information of the files | def print_header(header_info, test_info=None):
"""
Function to print the header with information of the files
:param header_info: Dictionary with information about dataset or train file
:type header_info: dict
:param test_info: Dictionary with information about test file
:type test_info: dict
"""
print("[Case Recommender: %s]\n" % header_info['title'])
print("train data:: %d users and %d items (%d interactions) | sparsity:: %.2f%%" %
(header_info['n_users'], header_info['n_items'], header_info['n_interactions'], header_info['sparsity']))
if test_info is not None:
print("test data:: %d users and %d items (%d interactions) | sparsity:: %.2f%%\n" %
(test_info['n_users'], test_info['n_items'], test_info['n_interactions'], test_info['sparsity'])) | [
"def",
"print_header",
"(",
"header_info",
",",
"test_info",
"=",
"None",
")",
":",
"print",
"(",
"\"[Case Recommender: %s]\\n\"",
"%",
"header_info",
"[",
"'title'",
"]",
")",
"print",
"(",
"\"train data:: %d users and %d items (%d interactions) | sparsity:: %.2f%%\"",
"%",
"(",
"header_info",
"[",
"'n_users'",
"]",
",",
"header_info",
"[",
"'n_items'",
"]",
",",
"header_info",
"[",
"'n_interactions'",
"]",
",",
"header_info",
"[",
"'sparsity'",
"]",
")",
")",
"if",
"test_info",
"is",
"not",
"None",
":",
"print",
"(",
"\"test data:: %d users and %d items (%d interactions) | sparsity:: %.2f%%\\n\"",
"%",
"(",
"test_info",
"[",
"'n_users'",
"]",
",",
"test_info",
"[",
"'n_items'",
"]",
",",
"test_info",
"[",
"'n_interactions'",
"]",
",",
"test_info",
"[",
"'sparsity'",
"]",
")",
")"
] | [
67,
0
] | [
85,
111
] | python | en | ['en', 'error', 'th'] | False |
user_passes_test | (test_func, login_url=None, redirect_field_name=REDIRECT_FIELD_NAME) |
Decorator for views that checks that the user passes the given test,
redirecting to the log-in page if necessary. The test should be a callable
that takes the user object and returns True if the user passes.
|
Decorator for views that checks that the user passes the given test,
redirecting to the log-in page if necessary. The test should be a callable
that takes the user object and returns True if the user passes.
| def user_passes_test(test_func, login_url=None, redirect_field_name=REDIRECT_FIELD_NAME):
"""
Decorator for views that checks that the user passes the given test,
redirecting to the log-in page if necessary. The test should be a callable
that takes the user object and returns True if the user passes.
"""
def decorator(view_func):
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
if test_func(request.user):
return view_func(request, *args, **kwargs)
path = request.build_absolute_uri()
resolved_login_url = resolve_url(login_url or settings.LOGIN_URL)
# If the login url is the same scheme and net location then just
# use the path as the "next" url.
login_scheme, login_netloc = urlparse(resolved_login_url)[:2]
current_scheme, current_netloc = urlparse(path)[:2]
if ((not login_scheme or login_scheme == current_scheme) and
(not login_netloc or login_netloc == current_netloc)):
path = request.get_full_path()
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(
path, resolved_login_url, redirect_field_name)
return _wrapped_view
return decorator | [
"def",
"user_passes_test",
"(",
"test_func",
",",
"login_url",
"=",
"None",
",",
"redirect_field_name",
"=",
"REDIRECT_FIELD_NAME",
")",
":",
"def",
"decorator",
"(",
"view_func",
")",
":",
"@",
"wraps",
"(",
"view_func",
")",
"def",
"_wrapped_view",
"(",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"test_func",
"(",
"request",
".",
"user",
")",
":",
"return",
"view_func",
"(",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"path",
"=",
"request",
".",
"build_absolute_uri",
"(",
")",
"resolved_login_url",
"=",
"resolve_url",
"(",
"login_url",
"or",
"settings",
".",
"LOGIN_URL",
")",
"# If the login url is the same scheme and net location then just",
"# use the path as the \"next\" url.",
"login_scheme",
",",
"login_netloc",
"=",
"urlparse",
"(",
"resolved_login_url",
")",
"[",
":",
"2",
"]",
"current_scheme",
",",
"current_netloc",
"=",
"urlparse",
"(",
"path",
")",
"[",
":",
"2",
"]",
"if",
"(",
"(",
"not",
"login_scheme",
"or",
"login_scheme",
"==",
"current_scheme",
")",
"and",
"(",
"not",
"login_netloc",
"or",
"login_netloc",
"==",
"current_netloc",
")",
")",
":",
"path",
"=",
"request",
".",
"get_full_path",
"(",
")",
"from",
"django",
".",
"contrib",
".",
"auth",
".",
"views",
"import",
"redirect_to_login",
"return",
"redirect_to_login",
"(",
"path",
",",
"resolved_login_url",
",",
"redirect_field_name",
")",
"return",
"_wrapped_view",
"return",
"decorator"
] | [
9,
0
] | [
34,
20
] | python | en | ['en', 'error', 'th'] | False |
login_required | (function=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None) |
Decorator for views that checks that the user is logged in, redirecting
to the log-in page if necessary.
|
Decorator for views that checks that the user is logged in, redirecting
to the log-in page if necessary.
| def login_required(function=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log-in page if necessary.
"""
actual_decorator = user_passes_test(
lambda u: u.is_authenticated,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator | [
"def",
"login_required",
"(",
"function",
"=",
"None",
",",
"redirect_field_name",
"=",
"REDIRECT_FIELD_NAME",
",",
"login_url",
"=",
"None",
")",
":",
"actual_decorator",
"=",
"user_passes_test",
"(",
"lambda",
"u",
":",
"u",
".",
"is_authenticated",
",",
"login_url",
"=",
"login_url",
",",
"redirect_field_name",
"=",
"redirect_field_name",
")",
"if",
"function",
":",
"return",
"actual_decorator",
"(",
"function",
")",
"return",
"actual_decorator"
] | [
37,
0
] | [
49,
27
] | python | en | ['en', 'error', 'th'] | False |
permission_required | (perm, login_url=None, raise_exception=False) |
Decorator for views that checks whether a user has a particular permission
enabled, redirecting to the log-in page if necessary.
If the raise_exception parameter is given the PermissionDenied exception
is raised.
|
Decorator for views that checks whether a user has a particular permission
enabled, redirecting to the log-in page if necessary.
If the raise_exception parameter is given the PermissionDenied exception
is raised.
| def permission_required(perm, login_url=None, raise_exception=False):
"""
Decorator for views that checks whether a user has a particular permission
enabled, redirecting to the log-in page if necessary.
If the raise_exception parameter is given the PermissionDenied exception
is raised.
"""
def check_perms(user):
if isinstance(perm, str):
perms = (perm,)
else:
perms = perm
# First check if the user has the permission (even anon users)
if user.has_perms(perms):
return True
# In case the 403 handler should be called raise the exception
if raise_exception:
raise PermissionDenied
# As the last resort, show the login form
return False
return user_passes_test(check_perms, login_url=login_url) | [
"def",
"permission_required",
"(",
"perm",
",",
"login_url",
"=",
"None",
",",
"raise_exception",
"=",
"False",
")",
":",
"def",
"check_perms",
"(",
"user",
")",
":",
"if",
"isinstance",
"(",
"perm",
",",
"str",
")",
":",
"perms",
"=",
"(",
"perm",
",",
")",
"else",
":",
"perms",
"=",
"perm",
"# First check if the user has the permission (even anon users)",
"if",
"user",
".",
"has_perms",
"(",
"perms",
")",
":",
"return",
"True",
"# In case the 403 handler should be called raise the exception",
"if",
"raise_exception",
":",
"raise",
"PermissionDenied",
"# As the last resort, show the login form",
"return",
"False",
"return",
"user_passes_test",
"(",
"check_perms",
",",
"login_url",
"=",
"login_url",
")"
] | [
52,
0
] | [
72,
61
] | python | en | ['en', 'error', 'th'] | False |
ServiceAccountCredentials._to_json | (self, strip, to_serialize=None) | Utility function that creates JSON repr. of a credentials object.
Over-ride is needed since PKCS#12 keys will not in general be JSON
serializable.
Args:
strip: array, An array of names of members to exclude from the
JSON.
to_serialize: dict, (Optional) The properties for this object
that will be serialized. This allows callers to
modify before serializing.
Returns:
string, a JSON representation of this instance, suitable to pass to
from_json().
| Utility function that creates JSON repr. of a credentials object. | def _to_json(self, strip, to_serialize=None):
"""Utility function that creates JSON repr. of a credentials object.
Over-ride is needed since PKCS#12 keys will not in general be JSON
serializable.
Args:
strip: array, An array of names of members to exclude from the
JSON.
to_serialize: dict, (Optional) The properties for this object
that will be serialized. This allows callers to
modify before serializing.
Returns:
string, a JSON representation of this instance, suitable to pass to
from_json().
"""
if to_serialize is None:
to_serialize = copy.copy(self.__dict__)
pkcs12_val = to_serialize.get(_PKCS12_KEY)
if pkcs12_val is not None:
to_serialize[_PKCS12_KEY] = base64.b64encode(pkcs12_val)
return super(ServiceAccountCredentials, self)._to_json(
strip, to_serialize=to_serialize) | [
"def",
"_to_json",
"(",
"self",
",",
"strip",
",",
"to_serialize",
"=",
"None",
")",
":",
"if",
"to_serialize",
"is",
"None",
":",
"to_serialize",
"=",
"copy",
".",
"copy",
"(",
"self",
".",
"__dict__",
")",
"pkcs12_val",
"=",
"to_serialize",
".",
"get",
"(",
"_PKCS12_KEY",
")",
"if",
"pkcs12_val",
"is",
"not",
"None",
":",
"to_serialize",
"[",
"_PKCS12_KEY",
"]",
"=",
"base64",
".",
"b64encode",
"(",
"pkcs12_val",
")",
"return",
"super",
"(",
"ServiceAccountCredentials",
",",
"self",
")",
".",
"_to_json",
"(",
"strip",
",",
"to_serialize",
"=",
"to_serialize",
")"
] | [
117,
4
] | [
140,
45
] | python | en | ['en', 'en', 'en'] | True |
ServiceAccountCredentials._from_parsed_json_keyfile | (cls, keyfile_dict, scopes,
token_uri=None, revoke_uri=None) | Helper for factory constructors from JSON keyfile.
Args:
keyfile_dict: dict-like object, The parsed dictionary-like object
containing the contents of the JSON keyfile.
scopes: List or string, Scopes to use when acquiring an
access token.
token_uri: string, URI for OAuth 2.0 provider token endpoint.
If unset and not present in keyfile_dict, defaults
to Google's endpoints.
revoke_uri: string, URI for OAuth 2.0 provider revoke endpoint.
If unset and not present in keyfile_dict, defaults
to Google's endpoints.
Returns:
ServiceAccountCredentials, a credentials object created from
the keyfile contents.
Raises:
ValueError, if the credential type is not :data:`SERVICE_ACCOUNT`.
KeyError, if one of the expected keys is not present in
the keyfile.
| Helper for factory constructors from JSON keyfile. | def _from_parsed_json_keyfile(cls, keyfile_dict, scopes,
token_uri=None, revoke_uri=None):
"""Helper for factory constructors from JSON keyfile.
Args:
keyfile_dict: dict-like object, The parsed dictionary-like object
containing the contents of the JSON keyfile.
scopes: List or string, Scopes to use when acquiring an
access token.
token_uri: string, URI for OAuth 2.0 provider token endpoint.
If unset and not present in keyfile_dict, defaults
to Google's endpoints.
revoke_uri: string, URI for OAuth 2.0 provider revoke endpoint.
If unset and not present in keyfile_dict, defaults
to Google's endpoints.
Returns:
ServiceAccountCredentials, a credentials object created from
the keyfile contents.
Raises:
ValueError, if the credential type is not :data:`SERVICE_ACCOUNT`.
KeyError, if one of the expected keys is not present in
the keyfile.
"""
creds_type = keyfile_dict.get('type')
if creds_type != client.SERVICE_ACCOUNT:
raise ValueError('Unexpected credentials type', creds_type,
'Expected', client.SERVICE_ACCOUNT)
service_account_email = keyfile_dict['client_email']
private_key_pkcs8_pem = keyfile_dict['private_key']
private_key_id = keyfile_dict['private_key_id']
client_id = keyfile_dict['client_id']
if not token_uri:
token_uri = keyfile_dict.get('token_uri',
oauth2client.GOOGLE_TOKEN_URI)
if not revoke_uri:
revoke_uri = keyfile_dict.get('revoke_uri',
oauth2client.GOOGLE_REVOKE_URI)
signer = crypt.Signer.from_string(private_key_pkcs8_pem)
credentials = cls(service_account_email, signer, scopes=scopes,
private_key_id=private_key_id,
client_id=client_id, token_uri=token_uri,
revoke_uri=revoke_uri)
credentials._private_key_pkcs8_pem = private_key_pkcs8_pem
return credentials | [
"def",
"_from_parsed_json_keyfile",
"(",
"cls",
",",
"keyfile_dict",
",",
"scopes",
",",
"token_uri",
"=",
"None",
",",
"revoke_uri",
"=",
"None",
")",
":",
"creds_type",
"=",
"keyfile_dict",
".",
"get",
"(",
"'type'",
")",
"if",
"creds_type",
"!=",
"client",
".",
"SERVICE_ACCOUNT",
":",
"raise",
"ValueError",
"(",
"'Unexpected credentials type'",
",",
"creds_type",
",",
"'Expected'",
",",
"client",
".",
"SERVICE_ACCOUNT",
")",
"service_account_email",
"=",
"keyfile_dict",
"[",
"'client_email'",
"]",
"private_key_pkcs8_pem",
"=",
"keyfile_dict",
"[",
"'private_key'",
"]",
"private_key_id",
"=",
"keyfile_dict",
"[",
"'private_key_id'",
"]",
"client_id",
"=",
"keyfile_dict",
"[",
"'client_id'",
"]",
"if",
"not",
"token_uri",
":",
"token_uri",
"=",
"keyfile_dict",
".",
"get",
"(",
"'token_uri'",
",",
"oauth2client",
".",
"GOOGLE_TOKEN_URI",
")",
"if",
"not",
"revoke_uri",
":",
"revoke_uri",
"=",
"keyfile_dict",
".",
"get",
"(",
"'revoke_uri'",
",",
"oauth2client",
".",
"GOOGLE_REVOKE_URI",
")",
"signer",
"=",
"crypt",
".",
"Signer",
".",
"from_string",
"(",
"private_key_pkcs8_pem",
")",
"credentials",
"=",
"cls",
"(",
"service_account_email",
",",
"signer",
",",
"scopes",
"=",
"scopes",
",",
"private_key_id",
"=",
"private_key_id",
",",
"client_id",
"=",
"client_id",
",",
"token_uri",
"=",
"token_uri",
",",
"revoke_uri",
"=",
"revoke_uri",
")",
"credentials",
".",
"_private_key_pkcs8_pem",
"=",
"private_key_pkcs8_pem",
"return",
"credentials"
] | [
143,
4
] | [
190,
26
] | python | en | ['en', 'en', 'en'] | True |
ServiceAccountCredentials.from_json_keyfile_name | (cls, filename, scopes='',
token_uri=None, revoke_uri=None) | Factory constructor from JSON keyfile by name.
Args:
filename: string, The location of the keyfile.
scopes: List or string, (Optional) Scopes to use when acquiring an
access token.
token_uri: string, URI for OAuth 2.0 provider token endpoint.
If unset and not present in the key file, defaults
to Google's endpoints.
revoke_uri: string, URI for OAuth 2.0 provider revoke endpoint.
If unset and not present in the key file, defaults
to Google's endpoints.
Returns:
ServiceAccountCredentials, a credentials object created from
the keyfile.
Raises:
ValueError, if the credential type is not :data:`SERVICE_ACCOUNT`.
KeyError, if one of the expected keys is not present in
the keyfile.
| Factory constructor from JSON keyfile by name. | def from_json_keyfile_name(cls, filename, scopes='',
token_uri=None, revoke_uri=None):
"""Factory constructor from JSON keyfile by name.
Args:
filename: string, The location of the keyfile.
scopes: List or string, (Optional) Scopes to use when acquiring an
access token.
token_uri: string, URI for OAuth 2.0 provider token endpoint.
If unset and not present in the key file, defaults
to Google's endpoints.
revoke_uri: string, URI for OAuth 2.0 provider revoke endpoint.
If unset and not present in the key file, defaults
to Google's endpoints.
Returns:
ServiceAccountCredentials, a credentials object created from
the keyfile.
Raises:
ValueError, if the credential type is not :data:`SERVICE_ACCOUNT`.
KeyError, if one of the expected keys is not present in
the keyfile.
"""
with open(filename, 'r') as file_obj:
client_credentials = json.load(file_obj)
return cls._from_parsed_json_keyfile(client_credentials, scopes,
token_uri=token_uri,
revoke_uri=revoke_uri) | [
"def",
"from_json_keyfile_name",
"(",
"cls",
",",
"filename",
",",
"scopes",
"=",
"''",
",",
"token_uri",
"=",
"None",
",",
"revoke_uri",
"=",
"None",
")",
":",
"with",
"open",
"(",
"filename",
",",
"'r'",
")",
"as",
"file_obj",
":",
"client_credentials",
"=",
"json",
".",
"load",
"(",
"file_obj",
")",
"return",
"cls",
".",
"_from_parsed_json_keyfile",
"(",
"client_credentials",
",",
"scopes",
",",
"token_uri",
"=",
"token_uri",
",",
"revoke_uri",
"=",
"revoke_uri",
")"
] | [
193,
4
] | [
222,
67
] | python | en | ['en', 'en', 'en'] | True |
ServiceAccountCredentials.from_json_keyfile_dict | (cls, keyfile_dict, scopes='',
token_uri=None, revoke_uri=None) | Factory constructor from parsed JSON keyfile.
Args:
keyfile_dict: dict-like object, The parsed dictionary-like object
containing the contents of the JSON keyfile.
scopes: List or string, (Optional) Scopes to use when acquiring an
access token.
token_uri: string, URI for OAuth 2.0 provider token endpoint.
If unset and not present in keyfile_dict, defaults
to Google's endpoints.
revoke_uri: string, URI for OAuth 2.0 provider revoke endpoint.
If unset and not present in keyfile_dict, defaults
to Google's endpoints.
Returns:
ServiceAccountCredentials, a credentials object created from
the keyfile.
Raises:
ValueError, if the credential type is not :data:`SERVICE_ACCOUNT`.
KeyError, if one of the expected keys is not present in
the keyfile.
| Factory constructor from parsed JSON keyfile. | def from_json_keyfile_dict(cls, keyfile_dict, scopes='',
token_uri=None, revoke_uri=None):
"""Factory constructor from parsed JSON keyfile.
Args:
keyfile_dict: dict-like object, The parsed dictionary-like object
containing the contents of the JSON keyfile.
scopes: List or string, (Optional) Scopes to use when acquiring an
access token.
token_uri: string, URI for OAuth 2.0 provider token endpoint.
If unset and not present in keyfile_dict, defaults
to Google's endpoints.
revoke_uri: string, URI for OAuth 2.0 provider revoke endpoint.
If unset and not present in keyfile_dict, defaults
to Google's endpoints.
Returns:
ServiceAccountCredentials, a credentials object created from
the keyfile.
Raises:
ValueError, if the credential type is not :data:`SERVICE_ACCOUNT`.
KeyError, if one of the expected keys is not present in
the keyfile.
"""
return cls._from_parsed_json_keyfile(keyfile_dict, scopes,
token_uri=token_uri,
revoke_uri=revoke_uri) | [
"def",
"from_json_keyfile_dict",
"(",
"cls",
",",
"keyfile_dict",
",",
"scopes",
"=",
"''",
",",
"token_uri",
"=",
"None",
",",
"revoke_uri",
"=",
"None",
")",
":",
"return",
"cls",
".",
"_from_parsed_json_keyfile",
"(",
"keyfile_dict",
",",
"scopes",
",",
"token_uri",
"=",
"token_uri",
",",
"revoke_uri",
"=",
"revoke_uri",
")"
] | [
225,
4
] | [
252,
67
] | python | en | ['en', 'en', 'en'] | True |
ServiceAccountCredentials._from_p12_keyfile_contents | (cls, service_account_email,
private_key_pkcs12,
private_key_password=None, scopes='',
token_uri=oauth2client.GOOGLE_TOKEN_URI,
revoke_uri=oauth2client.GOOGLE_REVOKE_URI) | Factory constructor from JSON keyfile.
Args:
service_account_email: string, The email associated with the
service account.
private_key_pkcs12: string, The contents of a PKCS#12 keyfile.
private_key_password: string, (Optional) Password for PKCS#12
private key. Defaults to ``notasecret``.
scopes: List or string, (Optional) Scopes to use when acquiring an
access token.
token_uri: string, URI for token endpoint. For convenience defaults
to Google's endpoints but any OAuth 2.0 provider can be
used.
revoke_uri: string, URI for revoke endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0
provider can be used.
Returns:
ServiceAccountCredentials, a credentials object created from
the keyfile.
Raises:
NotImplementedError if pyOpenSSL is not installed / not the
active crypto library.
| Factory constructor from JSON keyfile. | def _from_p12_keyfile_contents(cls, service_account_email,
private_key_pkcs12,
private_key_password=None, scopes='',
token_uri=oauth2client.GOOGLE_TOKEN_URI,
revoke_uri=oauth2client.GOOGLE_REVOKE_URI):
"""Factory constructor from JSON keyfile.
Args:
service_account_email: string, The email associated with the
service account.
private_key_pkcs12: string, The contents of a PKCS#12 keyfile.
private_key_password: string, (Optional) Password for PKCS#12
private key. Defaults to ``notasecret``.
scopes: List or string, (Optional) Scopes to use when acquiring an
access token.
token_uri: string, URI for token endpoint. For convenience defaults
to Google's endpoints but any OAuth 2.0 provider can be
used.
revoke_uri: string, URI for revoke endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0
provider can be used.
Returns:
ServiceAccountCredentials, a credentials object created from
the keyfile.
Raises:
NotImplementedError if pyOpenSSL is not installed / not the
active crypto library.
"""
if private_key_password is None:
private_key_password = _PASSWORD_DEFAULT
if crypt.Signer is not crypt.OpenSSLSigner:
raise NotImplementedError(_PKCS12_ERROR)
signer = crypt.Signer.from_string(private_key_pkcs12,
private_key_password)
credentials = cls(service_account_email, signer, scopes=scopes,
token_uri=token_uri, revoke_uri=revoke_uri)
credentials._private_key_pkcs12 = private_key_pkcs12
credentials._private_key_password = private_key_password
return credentials | [
"def",
"_from_p12_keyfile_contents",
"(",
"cls",
",",
"service_account_email",
",",
"private_key_pkcs12",
",",
"private_key_password",
"=",
"None",
",",
"scopes",
"=",
"''",
",",
"token_uri",
"=",
"oauth2client",
".",
"GOOGLE_TOKEN_URI",
",",
"revoke_uri",
"=",
"oauth2client",
".",
"GOOGLE_REVOKE_URI",
")",
":",
"if",
"private_key_password",
"is",
"None",
":",
"private_key_password",
"=",
"_PASSWORD_DEFAULT",
"if",
"crypt",
".",
"Signer",
"is",
"not",
"crypt",
".",
"OpenSSLSigner",
":",
"raise",
"NotImplementedError",
"(",
"_PKCS12_ERROR",
")",
"signer",
"=",
"crypt",
".",
"Signer",
".",
"from_string",
"(",
"private_key_pkcs12",
",",
"private_key_password",
")",
"credentials",
"=",
"cls",
"(",
"service_account_email",
",",
"signer",
",",
"scopes",
"=",
"scopes",
",",
"token_uri",
"=",
"token_uri",
",",
"revoke_uri",
"=",
"revoke_uri",
")",
"credentials",
".",
"_private_key_pkcs12",
"=",
"private_key_pkcs12",
"credentials",
".",
"_private_key_password",
"=",
"private_key_password",
"return",
"credentials"
] | [
255,
4
] | [
295,
26
] | python | en | ['en', 'en', 'en'] | True |
ServiceAccountCredentials.from_p12_keyfile | (cls, service_account_email, filename,
private_key_password=None, scopes='',
token_uri=oauth2client.GOOGLE_TOKEN_URI,
revoke_uri=oauth2client.GOOGLE_REVOKE_URI) | Factory constructor from JSON keyfile.
Args:
service_account_email: string, The email associated with the
service account.
filename: string, The location of the PKCS#12 keyfile.
private_key_password: string, (Optional) Password for PKCS#12
private key. Defaults to ``notasecret``.
scopes: List or string, (Optional) Scopes to use when acquiring an
access token.
token_uri: string, URI for token endpoint. For convenience defaults
to Google's endpoints but any OAuth 2.0 provider can be
used.
revoke_uri: string, URI for revoke endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0
provider can be used.
Returns:
ServiceAccountCredentials, a credentials object created from
the keyfile.
Raises:
NotImplementedError if pyOpenSSL is not installed / not the
active crypto library.
| Factory constructor from JSON keyfile. | def from_p12_keyfile(cls, service_account_email, filename,
private_key_password=None, scopes='',
token_uri=oauth2client.GOOGLE_TOKEN_URI,
revoke_uri=oauth2client.GOOGLE_REVOKE_URI):
"""Factory constructor from JSON keyfile.
Args:
service_account_email: string, The email associated with the
service account.
filename: string, The location of the PKCS#12 keyfile.
private_key_password: string, (Optional) Password for PKCS#12
private key. Defaults to ``notasecret``.
scopes: List or string, (Optional) Scopes to use when acquiring an
access token.
token_uri: string, URI for token endpoint. For convenience defaults
to Google's endpoints but any OAuth 2.0 provider can be
used.
revoke_uri: string, URI for revoke endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0
provider can be used.
Returns:
ServiceAccountCredentials, a credentials object created from
the keyfile.
Raises:
NotImplementedError if pyOpenSSL is not installed / not the
active crypto library.
"""
with open(filename, 'rb') as file_obj:
private_key_pkcs12 = file_obj.read()
return cls._from_p12_keyfile_contents(
service_account_email, private_key_pkcs12,
private_key_password=private_key_password, scopes=scopes,
token_uri=token_uri, revoke_uri=revoke_uri) | [
"def",
"from_p12_keyfile",
"(",
"cls",
",",
"service_account_email",
",",
"filename",
",",
"private_key_password",
"=",
"None",
",",
"scopes",
"=",
"''",
",",
"token_uri",
"=",
"oauth2client",
".",
"GOOGLE_TOKEN_URI",
",",
"revoke_uri",
"=",
"oauth2client",
".",
"GOOGLE_REVOKE_URI",
")",
":",
"with",
"open",
"(",
"filename",
",",
"'rb'",
")",
"as",
"file_obj",
":",
"private_key_pkcs12",
"=",
"file_obj",
".",
"read",
"(",
")",
"return",
"cls",
".",
"_from_p12_keyfile_contents",
"(",
"service_account_email",
",",
"private_key_pkcs12",
",",
"private_key_password",
"=",
"private_key_password",
",",
"scopes",
"=",
"scopes",
",",
"token_uri",
"=",
"token_uri",
",",
"revoke_uri",
"=",
"revoke_uri",
")"
] | [
298,
4
] | [
333,
55
] | python | en | ['en', 'en', 'en'] | True |
ServiceAccountCredentials.from_p12_keyfile_buffer | (cls, service_account_email, file_buffer,
private_key_password=None, scopes='',
token_uri=oauth2client.GOOGLE_TOKEN_URI,
revoke_uri=oauth2client.GOOGLE_REVOKE_URI) | Factory constructor from JSON keyfile.
Args:
service_account_email: string, The email associated with the
service account.
file_buffer: stream, A buffer that implements ``read()``
and contains the PKCS#12 key contents.
private_key_password: string, (Optional) Password for PKCS#12
private key. Defaults to ``notasecret``.
scopes: List or string, (Optional) Scopes to use when acquiring an
access token.
token_uri: string, URI for token endpoint. For convenience defaults
to Google's endpoints but any OAuth 2.0 provider can be
used.
revoke_uri: string, URI for revoke endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0
provider can be used.
Returns:
ServiceAccountCredentials, a credentials object created from
the keyfile.
Raises:
NotImplementedError if pyOpenSSL is not installed / not the
active crypto library.
| Factory constructor from JSON keyfile. | def from_p12_keyfile_buffer(cls, service_account_email, file_buffer,
private_key_password=None, scopes='',
token_uri=oauth2client.GOOGLE_TOKEN_URI,
revoke_uri=oauth2client.GOOGLE_REVOKE_URI):
"""Factory constructor from JSON keyfile.
Args:
service_account_email: string, The email associated with the
service account.
file_buffer: stream, A buffer that implements ``read()``
and contains the PKCS#12 key contents.
private_key_password: string, (Optional) Password for PKCS#12
private key. Defaults to ``notasecret``.
scopes: List or string, (Optional) Scopes to use when acquiring an
access token.
token_uri: string, URI for token endpoint. For convenience defaults
to Google's endpoints but any OAuth 2.0 provider can be
used.
revoke_uri: string, URI for revoke endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0
provider can be used.
Returns:
ServiceAccountCredentials, a credentials object created from
the keyfile.
Raises:
NotImplementedError if pyOpenSSL is not installed / not the
active crypto library.
"""
private_key_pkcs12 = file_buffer.read()
return cls._from_p12_keyfile_contents(
service_account_email, private_key_pkcs12,
private_key_password=private_key_password, scopes=scopes,
token_uri=token_uri, revoke_uri=revoke_uri) | [
"def",
"from_p12_keyfile_buffer",
"(",
"cls",
",",
"service_account_email",
",",
"file_buffer",
",",
"private_key_password",
"=",
"None",
",",
"scopes",
"=",
"''",
",",
"token_uri",
"=",
"oauth2client",
".",
"GOOGLE_TOKEN_URI",
",",
"revoke_uri",
"=",
"oauth2client",
".",
"GOOGLE_REVOKE_URI",
")",
":",
"private_key_pkcs12",
"=",
"file_buffer",
".",
"read",
"(",
")",
"return",
"cls",
".",
"_from_p12_keyfile_contents",
"(",
"service_account_email",
",",
"private_key_pkcs12",
",",
"private_key_password",
"=",
"private_key_password",
",",
"scopes",
"=",
"scopes",
",",
"token_uri",
"=",
"token_uri",
",",
"revoke_uri",
"=",
"revoke_uri",
")"
] | [
336,
4
] | [
370,
55
] | python | en | ['en', 'en', 'en'] | True |
ServiceAccountCredentials._generate_assertion | (self) | Generate the assertion that will be used in the request. | Generate the assertion that will be used in the request. | def _generate_assertion(self):
"""Generate the assertion that will be used in the request."""
now = int(time.time())
payload = {
'aud': self.token_uri,
'scope': self._scopes,
'iat': now,
'exp': now + self.MAX_TOKEN_LIFETIME_SECS,
'iss': self._service_account_email,
}
payload.update(self._kwargs)
return crypt.make_signed_jwt(self._signer, payload,
key_id=self._private_key_id) | [
"def",
"_generate_assertion",
"(",
"self",
")",
":",
"now",
"=",
"int",
"(",
"time",
".",
"time",
"(",
")",
")",
"payload",
"=",
"{",
"'aud'",
":",
"self",
".",
"token_uri",
",",
"'scope'",
":",
"self",
".",
"_scopes",
",",
"'iat'",
":",
"now",
",",
"'exp'",
":",
"now",
"+",
"self",
".",
"MAX_TOKEN_LIFETIME_SECS",
",",
"'iss'",
":",
"self",
".",
"_service_account_email",
",",
"}",
"payload",
".",
"update",
"(",
"self",
".",
"_kwargs",
")",
"return",
"crypt",
".",
"make_signed_jwt",
"(",
"self",
".",
"_signer",
",",
"payload",
",",
"key_id",
"=",
"self",
".",
"_private_key_id",
")"
] | [
372,
4
] | [
384,
65
] | python | en | ['en', 'en', 'en'] | True |
ServiceAccountCredentials.sign_blob | (self, blob) | Cryptographically sign a blob (of bytes).
Implements abstract method
:meth:`oauth2client.client.AssertionCredentials.sign_blob`.
Args:
blob: bytes, Message to be signed.
Returns:
tuple, A pair of the private key ID used to sign the blob and
the signed contents.
| Cryptographically sign a blob (of bytes). | def sign_blob(self, blob):
"""Cryptographically sign a blob (of bytes).
Implements abstract method
:meth:`oauth2client.client.AssertionCredentials.sign_blob`.
Args:
blob: bytes, Message to be signed.
Returns:
tuple, A pair of the private key ID used to sign the blob and
the signed contents.
"""
return self._private_key_id, self._signer.sign(blob) | [
"def",
"sign_blob",
"(",
"self",
",",
"blob",
")",
":",
"return",
"self",
".",
"_private_key_id",
",",
"self",
".",
"_signer",
".",
"sign",
"(",
"blob",
")"
] | [
386,
4
] | [
399,
60
] | python | en | ['en', 'en', 'en'] | True |
ServiceAccountCredentials.service_account_email | (self) | Get the email for the current service account.
Returns:
string, The email associated with the service account.
| Get the email for the current service account. | def service_account_email(self):
"""Get the email for the current service account.
Returns:
string, The email associated with the service account.
"""
return self._service_account_email | [
"def",
"service_account_email",
"(",
"self",
")",
":",
"return",
"self",
".",
"_service_account_email"
] | [
402,
4
] | [
408,
42
] | python | en | ['en', 'en', 'en'] | True |
ServiceAccountCredentials.from_json | (cls, json_data) | Deserialize a JSON-serialized instance.
Inverse to :meth:`to_json`.
Args:
json_data: dict or string, Serialized JSON (as a string or an
already parsed dictionary) representing a credential.
Returns:
ServiceAccountCredentials from the serialized data.
| Deserialize a JSON-serialized instance. | def from_json(cls, json_data):
"""Deserialize a JSON-serialized instance.
Inverse to :meth:`to_json`.
Args:
json_data: dict or string, Serialized JSON (as a string or an
already parsed dictionary) representing a credential.
Returns:
ServiceAccountCredentials from the serialized data.
"""
if not isinstance(json_data, dict):
json_data = json.loads(_helpers._from_bytes(json_data))
private_key_pkcs8_pem = None
pkcs12_val = json_data.get(_PKCS12_KEY)
password = None
if pkcs12_val is None:
private_key_pkcs8_pem = json_data['_private_key_pkcs8_pem']
signer = crypt.Signer.from_string(private_key_pkcs8_pem)
else:
# NOTE: This assumes that private_key_pkcs8_pem is not also
# in the serialized data. This would be very incorrect
# state.
pkcs12_val = base64.b64decode(pkcs12_val)
password = json_data['_private_key_password']
signer = crypt.Signer.from_string(pkcs12_val, password)
credentials = cls(
json_data['_service_account_email'],
signer,
scopes=json_data['_scopes'],
private_key_id=json_data['_private_key_id'],
client_id=json_data['client_id'],
user_agent=json_data['_user_agent'],
**json_data['_kwargs']
)
if private_key_pkcs8_pem is not None:
credentials._private_key_pkcs8_pem = private_key_pkcs8_pem
if pkcs12_val is not None:
credentials._private_key_pkcs12 = pkcs12_val
if password is not None:
credentials._private_key_password = password
credentials.invalid = json_data['invalid']
credentials.access_token = json_data['access_token']
credentials.token_uri = json_data['token_uri']
credentials.revoke_uri = json_data['revoke_uri']
token_expiry = json_data.get('token_expiry', None)
if token_expiry is not None:
credentials.token_expiry = datetime.datetime.strptime(
token_expiry, client.EXPIRY_FORMAT)
return credentials | [
"def",
"from_json",
"(",
"cls",
",",
"json_data",
")",
":",
"if",
"not",
"isinstance",
"(",
"json_data",
",",
"dict",
")",
":",
"json_data",
"=",
"json",
".",
"loads",
"(",
"_helpers",
".",
"_from_bytes",
"(",
"json_data",
")",
")",
"private_key_pkcs8_pem",
"=",
"None",
"pkcs12_val",
"=",
"json_data",
".",
"get",
"(",
"_PKCS12_KEY",
")",
"password",
"=",
"None",
"if",
"pkcs12_val",
"is",
"None",
":",
"private_key_pkcs8_pem",
"=",
"json_data",
"[",
"'_private_key_pkcs8_pem'",
"]",
"signer",
"=",
"crypt",
".",
"Signer",
".",
"from_string",
"(",
"private_key_pkcs8_pem",
")",
"else",
":",
"# NOTE: This assumes that private_key_pkcs8_pem is not also",
"# in the serialized data. This would be very incorrect",
"# state.",
"pkcs12_val",
"=",
"base64",
".",
"b64decode",
"(",
"pkcs12_val",
")",
"password",
"=",
"json_data",
"[",
"'_private_key_password'",
"]",
"signer",
"=",
"crypt",
".",
"Signer",
".",
"from_string",
"(",
"pkcs12_val",
",",
"password",
")",
"credentials",
"=",
"cls",
"(",
"json_data",
"[",
"'_service_account_email'",
"]",
",",
"signer",
",",
"scopes",
"=",
"json_data",
"[",
"'_scopes'",
"]",
",",
"private_key_id",
"=",
"json_data",
"[",
"'_private_key_id'",
"]",
",",
"client_id",
"=",
"json_data",
"[",
"'client_id'",
"]",
",",
"user_agent",
"=",
"json_data",
"[",
"'_user_agent'",
"]",
",",
"*",
"*",
"json_data",
"[",
"'_kwargs'",
"]",
")",
"if",
"private_key_pkcs8_pem",
"is",
"not",
"None",
":",
"credentials",
".",
"_private_key_pkcs8_pem",
"=",
"private_key_pkcs8_pem",
"if",
"pkcs12_val",
"is",
"not",
"None",
":",
"credentials",
".",
"_private_key_pkcs12",
"=",
"pkcs12_val",
"if",
"password",
"is",
"not",
"None",
":",
"credentials",
".",
"_private_key_password",
"=",
"password",
"credentials",
".",
"invalid",
"=",
"json_data",
"[",
"'invalid'",
"]",
"credentials",
".",
"access_token",
"=",
"json_data",
"[",
"'access_token'",
"]",
"credentials",
".",
"token_uri",
"=",
"json_data",
"[",
"'token_uri'",
"]",
"credentials",
".",
"revoke_uri",
"=",
"json_data",
"[",
"'revoke_uri'",
"]",
"token_expiry",
"=",
"json_data",
".",
"get",
"(",
"'token_expiry'",
",",
"None",
")",
"if",
"token_expiry",
"is",
"not",
"None",
":",
"credentials",
".",
"token_expiry",
"=",
"datetime",
".",
"datetime",
".",
"strptime",
"(",
"token_expiry",
",",
"client",
".",
"EXPIRY_FORMAT",
")",
"return",
"credentials"
] | [
422,
4
] | [
474,
26
] | python | en | ['en', 'cs', 'en'] | True |
ServiceAccountCredentials.create_with_claims | (self, claims) | Create credentials that specify additional claims.
Args:
claims: dict, key-value pairs for claims.
Returns:
ServiceAccountCredentials, a copy of the current service account
credentials with updated claims to use when obtaining access
tokens.
| Create credentials that specify additional claims. | def create_with_claims(self, claims):
"""Create credentials that specify additional claims.
Args:
claims: dict, key-value pairs for claims.
Returns:
ServiceAccountCredentials, a copy of the current service account
credentials with updated claims to use when obtaining access
tokens.
"""
new_kwargs = dict(self._kwargs)
new_kwargs.update(claims)
result = self.__class__(self._service_account_email,
self._signer,
scopes=self._scopes,
private_key_id=self._private_key_id,
client_id=self.client_id,
user_agent=self._user_agent,
**new_kwargs)
result.token_uri = self.token_uri
result.revoke_uri = self.revoke_uri
result._private_key_pkcs8_pem = self._private_key_pkcs8_pem
result._private_key_pkcs12 = self._private_key_pkcs12
result._private_key_password = self._private_key_password
return result | [
"def",
"create_with_claims",
"(",
"self",
",",
"claims",
")",
":",
"new_kwargs",
"=",
"dict",
"(",
"self",
".",
"_kwargs",
")",
"new_kwargs",
".",
"update",
"(",
"claims",
")",
"result",
"=",
"self",
".",
"__class__",
"(",
"self",
".",
"_service_account_email",
",",
"self",
".",
"_signer",
",",
"scopes",
"=",
"self",
".",
"_scopes",
",",
"private_key_id",
"=",
"self",
".",
"_private_key_id",
",",
"client_id",
"=",
"self",
".",
"client_id",
",",
"user_agent",
"=",
"self",
".",
"_user_agent",
",",
"*",
"*",
"new_kwargs",
")",
"result",
".",
"token_uri",
"=",
"self",
".",
"token_uri",
"result",
".",
"revoke_uri",
"=",
"self",
".",
"revoke_uri",
"result",
".",
"_private_key_pkcs8_pem",
"=",
"self",
".",
"_private_key_pkcs8_pem",
"result",
".",
"_private_key_pkcs12",
"=",
"self",
".",
"_private_key_pkcs12",
"result",
".",
"_private_key_password",
"=",
"self",
".",
"_private_key_password",
"return",
"result"
] | [
494,
4
] | [
519,
21
] | python | en | ['en', 'en', 'en'] | True |
ServiceAccountCredentials.create_delegated | (self, sub) | Create credentials that act as domain-wide delegation of authority.
Use the ``sub`` parameter as the subject to delegate on behalf of
that user.
For example::
>>> account_sub = '[email protected]'
>>> delegate_creds = creds.create_delegated(account_sub)
Args:
sub: string, An email address that this service account will
act on behalf of (via domain-wide delegation).
Returns:
ServiceAccountCredentials, a copy of the current service account
updated to act on behalf of ``sub``.
| Create credentials that act as domain-wide delegation of authority. | def create_delegated(self, sub):
"""Create credentials that act as domain-wide delegation of authority.
Use the ``sub`` parameter as the subject to delegate on behalf of
that user.
For example::
>>> account_sub = '[email protected]'
>>> delegate_creds = creds.create_delegated(account_sub)
Args:
sub: string, An email address that this service account will
act on behalf of (via domain-wide delegation).
Returns:
ServiceAccountCredentials, a copy of the current service account
updated to act on behalf of ``sub``.
"""
return self.create_with_claims({'sub': sub}) | [
"def",
"create_delegated",
"(",
"self",
",",
"sub",
")",
":",
"return",
"self",
".",
"create_with_claims",
"(",
"{",
"'sub'",
":",
"sub",
"}",
")"
] | [
521,
4
] | [
540,
52
] | python | en | ['en', 'en', 'en'] | True |
_JWTAccessCredentials.authorize | (self, http) | Authorize an httplib2.Http instance with a JWT assertion.
Unless specified, the 'aud' of the assertion will be the base
uri of the request.
Args:
http: An instance of ``httplib2.Http`` or something that acts
like it.
Returns:
A modified instance of http that was passed in.
Example::
h = httplib2.Http()
h = credentials.authorize(h)
| Authorize an httplib2.Http instance with a JWT assertion. | def authorize(self, http):
"""Authorize an httplib2.Http instance with a JWT assertion.
Unless specified, the 'aud' of the assertion will be the base
uri of the request.
Args:
http: An instance of ``httplib2.Http`` or something that acts
like it.
Returns:
A modified instance of http that was passed in.
Example::
h = httplib2.Http()
h = credentials.authorize(h)
"""
transport.wrap_http_for_jwt_access(self, http)
return http | [
"def",
"authorize",
"(",
"self",
",",
"http",
")",
":",
"transport",
".",
"wrap_http_for_jwt_access",
"(",
"self",
",",
"http",
")",
"return",
"http"
] | [
583,
4
] | [
599,
19
] | python | en | ['en', 'lb', 'en'] | True |
_JWTAccessCredentials.get_access_token | (self, http=None, additional_claims=None) | Create a signed jwt.
Args:
http: unused
additional_claims: dict, additional claims to add to
the payload of the JWT.
Returns:
An AccessTokenInfo with the signed jwt
| Create a signed jwt. | def get_access_token(self, http=None, additional_claims=None):
"""Create a signed jwt.
Args:
http: unused
additional_claims: dict, additional claims to add to
the payload of the JWT.
Returns:
An AccessTokenInfo with the signed jwt
"""
if additional_claims is None:
if self.access_token is None or self.access_token_expired:
self.refresh(None)
return client.AccessTokenInfo(
access_token=self.access_token, expires_in=self._expires_in())
else:
# Create a 1 time token
token, unused_expiry = self._create_token(additional_claims)
return client.AccessTokenInfo(
access_token=token, expires_in=self._MAX_TOKEN_LIFETIME_SECS) | [
"def",
"get_access_token",
"(",
"self",
",",
"http",
"=",
"None",
",",
"additional_claims",
"=",
"None",
")",
":",
"if",
"additional_claims",
"is",
"None",
":",
"if",
"self",
".",
"access_token",
"is",
"None",
"or",
"self",
".",
"access_token_expired",
":",
"self",
".",
"refresh",
"(",
"None",
")",
"return",
"client",
".",
"AccessTokenInfo",
"(",
"access_token",
"=",
"self",
".",
"access_token",
",",
"expires_in",
"=",
"self",
".",
"_expires_in",
"(",
")",
")",
"else",
":",
"# Create a 1 time token",
"token",
",",
"unused_expiry",
"=",
"self",
".",
"_create_token",
"(",
"additional_claims",
")",
"return",
"client",
".",
"AccessTokenInfo",
"(",
"access_token",
"=",
"token",
",",
"expires_in",
"=",
"self",
".",
"_MAX_TOKEN_LIFETIME_SECS",
")"
] | [
601,
4
] | [
620,
75
] | python | en | ['en', 'en', 'en'] | True |
_JWTAccessCredentials.revoke | (self, http) | Cannot revoke JWTAccessCredentials tokens. | Cannot revoke JWTAccessCredentials tokens. | def revoke(self, http):
"""Cannot revoke JWTAccessCredentials tokens."""
pass | [
"def",
"revoke",
"(",
"self",
",",
"http",
")",
":",
"pass"
] | [
622,
4
] | [
624,
12
] | python | en | ['en', 'ca', 'en'] | True |
_JWTAccessCredentials.refresh | (self, http) | Refreshes the access_token.
The HTTP object is unused since no request needs to be made to
get a new token, it can just be generated locally.
Args:
http: unused HTTP object
| Refreshes the access_token. | def refresh(self, http):
"""Refreshes the access_token.
The HTTP object is unused since no request needs to be made to
get a new token, it can just be generated locally.
Args:
http: unused HTTP object
"""
self._refresh(None) | [
"def",
"refresh",
"(",
"self",
",",
"http",
")",
":",
"self",
".",
"_refresh",
"(",
"None",
")"
] | [
650,
4
] | [
659,
27
] | python | en | ['en', 'en', 'en'] | True |
_JWTAccessCredentials._refresh | (self, http) | Refreshes the access_token.
Args:
http: unused HTTP object
| Refreshes the access_token. | def _refresh(self, http):
"""Refreshes the access_token.
Args:
http: unused HTTP object
"""
self.access_token, self.token_expiry = self._create_token() | [
"def",
"_refresh",
"(",
"self",
",",
"http",
")",
":",
"self",
".",
"access_token",
",",
"self",
".",
"token_expiry",
"=",
"self",
".",
"_create_token",
"(",
")"
] | [
661,
4
] | [
667,
67
] | python | en | ['en', 'en', 'en'] | True |
Marker.evaluate | (self, environment: Optional[Dict[str, str]] = None) | Evaluate a marker.
Return the boolean from evaluating the given marker against the
environment. environment is an optional argument to override all or
part of the determined environment.
The environment is determined from the current Python process.
| Evaluate a marker. | def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
"""Evaluate a marker.
Return the boolean from evaluating the given marker against the
environment. environment is an optional argument to override all or
part of the determined environment.
The environment is determined from the current Python process.
"""
current_environment = default_environment()
if environment is not None:
current_environment.update(environment)
return _evaluate_markers(self._markers, current_environment) | [
"def",
"evaluate",
"(",
"self",
",",
"environment",
":",
"Optional",
"[",
"Dict",
"[",
"str",
",",
"str",
"]",
"]",
"=",
"None",
")",
"->",
"bool",
":",
"current_environment",
"=",
"default_environment",
"(",
")",
"if",
"environment",
"is",
"not",
"None",
":",
"current_environment",
".",
"update",
"(",
"environment",
")",
"return",
"_evaluate_markers",
"(",
"self",
".",
"_markers",
",",
"current_environment",
")"
] | [
290,
4
] | [
303,
68
] | python | en | ['en', 'en', 'en'] | True |
lint | (session) | Run linters.
Returns a failure if the linters find linting errors or sufficiently
serious code quality issues.
| Run linters. | def lint(session):
"""Run linters.
Returns a failure if the linters find linting errors or sufficiently
serious code quality issues.
"""
session.install("flake8", BLACK_VERSION)
session.run(
"black", "--check", *BLACK_PATHS,
)
session.run("flake8", "google", "tests") | [
"def",
"lint",
"(",
"session",
")",
":",
"session",
".",
"install",
"(",
"\"flake8\"",
",",
"BLACK_VERSION",
")",
"session",
".",
"run",
"(",
"\"black\"",
",",
"\"--check\"",
",",
"*",
"BLACK_PATHS",
",",
")",
"session",
".",
"run",
"(",
"\"flake8\"",
",",
"\"google\"",
",",
"\"tests\"",
")"
] | [
51,
0
] | [
61,
44
] | python | en | ['es', 'ms', 'en'] | False |
blacken | (session) | Run black. Format code to uniform standard. | Run black. Format code to uniform standard. | def blacken(session):
"""Run black. Format code to uniform standard."""
session.install(BLACK_VERSION)
session.run(
"black", *BLACK_PATHS,
) | [
"def",
"blacken",
"(",
"session",
")",
":",
"session",
".",
"install",
"(",
"BLACK_VERSION",
")",
"session",
".",
"run",
"(",
"\"black\"",
",",
"*",
"BLACK_PATHS",
",",
")"
] | [
65,
0
] | [
70,
5
] | python | en | ['en', 'lb', 'en'] | True |
lint_setup_py | (session) | Verify that setup.py is valid (including RST check). | Verify that setup.py is valid (including RST check). | def lint_setup_py(session):
"""Verify that setup.py is valid (including RST check)."""
session.install("docutils", "pygments")
session.run("python", "setup.py", "check", "--restructuredtext", "--strict") | [
"def",
"lint_setup_py",
"(",
"session",
")",
":",
"session",
".",
"install",
"(",
"\"docutils\"",
",",
"\"pygments\"",
")",
"session",
".",
"run",
"(",
"\"python\"",
",",
"\"setup.py\"",
",",
"\"check\"",
",",
"\"--restructuredtext\"",
",",
"\"--strict\"",
")"
] | [
74,
0
] | [
77,
80
] | python | en | ['en', 'en', 'en'] | True |
unit | (session) | Run the unit test suite. | Run the unit test suite. | def unit(session):
"""Run the unit test suite."""
default(session) | [
"def",
"unit",
"(",
"session",
")",
":",
"default",
"(",
"session",
")"
] | [
115,
0
] | [
117,
20
] | python | en | ['en', 'fr', 'en'] | True |
system | (session) | Run the system test suite. | Run the system test suite. | def system(session):
"""Run the system test suite."""
constraints_path = str(
CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
)
system_test_path = os.path.join("tests", "system.py")
system_test_folder_path = os.path.join("tests", "system")
# Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true.
if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false":
session.skip("RUN_SYSTEM_TESTS is set to false, skipping")
# Install pyopenssl for mTLS testing.
if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true":
session.install("pyopenssl")
system_test_exists = os.path.exists(system_test_path)
system_test_folder_exists = os.path.exists(system_test_folder_path)
# Sanity check: only run tests if found.
if not system_test_exists and not system_test_folder_exists:
session.skip("System tests were not found")
# Use pre-release gRPC for system tests.
session.install("--pre", "grpcio")
# Install all test dependencies, then install this package into the
# virtualenv's dist-packages.
session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path)
session.install("-e", ".", "-c", constraints_path)
# Run py.test against the system tests.
if system_test_exists:
session.run(
"py.test",
"--quiet",
f"--junitxml=system_{session.python}_sponge_log.xml",
system_test_path,
*session.posargs,
)
if system_test_folder_exists:
session.run(
"py.test",
"--quiet",
f"--junitxml=system_{session.python}_sponge_log.xml",
system_test_folder_path,
*session.posargs,
) | [
"def",
"system",
"(",
"session",
")",
":",
"constraints_path",
"=",
"str",
"(",
"CURRENT_DIRECTORY",
"/",
"\"testing\"",
"/",
"f\"constraints-{session.python}.txt\"",
")",
"system_test_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"\"tests\"",
",",
"\"system.py\"",
")",
"system_test_folder_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"\"tests\"",
",",
"\"system\"",
")",
"# Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true.",
"if",
"os",
".",
"environ",
".",
"get",
"(",
"\"RUN_SYSTEM_TESTS\"",
",",
"\"true\"",
")",
"==",
"\"false\"",
":",
"session",
".",
"skip",
"(",
"\"RUN_SYSTEM_TESTS is set to false, skipping\"",
")",
"# Install pyopenssl for mTLS testing.",
"if",
"os",
".",
"environ",
".",
"get",
"(",
"\"GOOGLE_API_USE_CLIENT_CERTIFICATE\"",
",",
"\"false\"",
")",
"==",
"\"true\"",
":",
"session",
".",
"install",
"(",
"\"pyopenssl\"",
")",
"system_test_exists",
"=",
"os",
".",
"path",
".",
"exists",
"(",
"system_test_path",
")",
"system_test_folder_exists",
"=",
"os",
".",
"path",
".",
"exists",
"(",
"system_test_folder_path",
")",
"# Sanity check: only run tests if found.",
"if",
"not",
"system_test_exists",
"and",
"not",
"system_test_folder_exists",
":",
"session",
".",
"skip",
"(",
"\"System tests were not found\"",
")",
"# Use pre-release gRPC for system tests.",
"session",
".",
"install",
"(",
"\"--pre\"",
",",
"\"grpcio\"",
")",
"# Install all test dependencies, then install this package into the",
"# virtualenv's dist-packages.",
"session",
".",
"install",
"(",
"\"mock\"",
",",
"\"pytest\"",
",",
"\"google-cloud-testutils\"",
",",
"\"-c\"",
",",
"constraints_path",
")",
"session",
".",
"install",
"(",
"\"-e\"",
",",
"\".\"",
",",
"\"-c\"",
",",
"constraints_path",
")",
"# Run py.test against the system tests.",
"if",
"system_test_exists",
":",
"session",
".",
"run",
"(",
"\"py.test\"",
",",
"\"--quiet\"",
",",
"f\"--junitxml=system_{session.python}_sponge_log.xml\"",
",",
"system_test_path",
",",
"*",
"session",
".",
"posargs",
",",
")",
"if",
"system_test_folder_exists",
":",
"session",
".",
"run",
"(",
"\"py.test\"",
",",
"\"--quiet\"",
",",
"f\"--junitxml=system_{session.python}_sponge_log.xml\"",
",",
"system_test_folder_path",
",",
"*",
"session",
".",
"posargs",
",",
")"
] | [
121,
0
] | [
166,
9
] | python | en | ['en', 'en', 'en'] | True |
cover | (session) | Run the final coverage report.
This outputs the coverage report aggregating coverage from the unit
test runs (not system test runs), and then erases coverage data.
| Run the final coverage report. | def cover(session):
"""Run the final coverage report.
This outputs the coverage report aggregating coverage from the unit
test runs (not system test runs), and then erases coverage data.
"""
session.install("coverage", "pytest-cov")
session.run("coverage", "report", "--show-missing", "--fail-under=96")
session.run("coverage", "erase") | [
"def",
"cover",
"(",
"session",
")",
":",
"session",
".",
"install",
"(",
"\"coverage\"",
",",
"\"pytest-cov\"",
")",
"session",
".",
"run",
"(",
"\"coverage\"",
",",
"\"report\"",
",",
"\"--show-missing\"",
",",
"\"--fail-under=96\"",
")",
"session",
".",
"run",
"(",
"\"coverage\"",
",",
"\"erase\"",
")"
] | [
170,
0
] | [
179,
36
] | python | en | ['en', 'it', 'en'] | True |
docs | (session) | Build the docs for this library. | Build the docs for this library. | def docs(session):
"""Build the docs for this library."""
session.install("-e", ".")
session.install("sphinx==4.0.1", "alabaster", "recommonmark")
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
"sphinx-build",
# "-W", # warnings as errors
"-T", # show full traceback on exception
"-N", # no colors
"-b",
"html",
"-d",
os.path.join("docs", "_build", "doctrees", ""),
os.path.join("docs", ""),
os.path.join("docs", "_build", "html", ""),
) | [
"def",
"docs",
"(",
"session",
")",
":",
"session",
".",
"install",
"(",
"\"-e\"",
",",
"\".\"",
")",
"session",
".",
"install",
"(",
"\"sphinx==4.0.1\"",
",",
"\"alabaster\"",
",",
"\"recommonmark\"",
")",
"shutil",
".",
"rmtree",
"(",
"os",
".",
"path",
".",
"join",
"(",
"\"docs\"",
",",
"\"_build\"",
")",
",",
"ignore_errors",
"=",
"True",
")",
"session",
".",
"run",
"(",
"\"sphinx-build\"",
",",
"# \"-W\", # warnings as errors",
"\"-T\"",
",",
"# show full traceback on exception",
"\"-N\"",
",",
"# no colors",
"\"-b\"",
",",
"\"html\"",
",",
"\"-d\"",
",",
"os",
".",
"path",
".",
"join",
"(",
"\"docs\"",
",",
"\"_build\"",
",",
"\"doctrees\"",
",",
"\"\"",
")",
",",
"os",
".",
"path",
".",
"join",
"(",
"\"docs\"",
",",
"\"\"",
")",
",",
"os",
".",
"path",
".",
"join",
"(",
"\"docs\"",
",",
"\"_build\"",
",",
"\"html\"",
",",
"\"\"",
")",
",",
")"
] | [
183,
0
] | [
201,
5
] | python | en | ['en', 'en', 'en'] | True |
docfx | (session) | Build the docfx yaml files for this library. | Build the docfx yaml files for this library. | def docfx(session):
"""Build the docfx yaml files for this library."""
session.install("-e", ".")
session.install(
"sphinx==4.0.1", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml"
)
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
"sphinx-build",
"-T", # show full traceback on exception
"-N", # no colors
"-D",
(
"extensions=sphinx.ext.autodoc,"
"sphinx.ext.autosummary,"
"docfx_yaml.extension,"
"sphinx.ext.intersphinx,"
"sphinx.ext.coverage,"
"sphinx.ext.napoleon,"
"sphinx.ext.todo,"
"sphinx.ext.viewcode,"
"recommonmark"
),
"-b",
"html",
"-d",
os.path.join("docs", "_build", "doctrees", ""),
os.path.join("docs", ""),
os.path.join("docs", "_build", "html", ""),
) | [
"def",
"docfx",
"(",
"session",
")",
":",
"session",
".",
"install",
"(",
"\"-e\"",
",",
"\".\"",
")",
"session",
".",
"install",
"(",
"\"sphinx==4.0.1\"",
",",
"\"alabaster\"",
",",
"\"recommonmark\"",
",",
"\"gcp-sphinx-docfx-yaml\"",
")",
"shutil",
".",
"rmtree",
"(",
"os",
".",
"path",
".",
"join",
"(",
"\"docs\"",
",",
"\"_build\"",
")",
",",
"ignore_errors",
"=",
"True",
")",
"session",
".",
"run",
"(",
"\"sphinx-build\"",
",",
"\"-T\"",
",",
"# show full traceback on exception",
"\"-N\"",
",",
"# no colors",
"\"-D\"",
",",
"(",
"\"extensions=sphinx.ext.autodoc,\"",
"\"sphinx.ext.autosummary,\"",
"\"docfx_yaml.extension,\"",
"\"sphinx.ext.intersphinx,\"",
"\"sphinx.ext.coverage,\"",
"\"sphinx.ext.napoleon,\"",
"\"sphinx.ext.todo,\"",
"\"sphinx.ext.viewcode,\"",
"\"recommonmark\"",
")",
",",
"\"-b\"",
",",
"\"html\"",
",",
"\"-d\"",
",",
"os",
".",
"path",
".",
"join",
"(",
"\"docs\"",
",",
"\"_build\"",
",",
"\"doctrees\"",
",",
"\"\"",
")",
",",
"os",
".",
"path",
".",
"join",
"(",
"\"docs\"",
",",
"\"\"",
")",
",",
"os",
".",
"path",
".",
"join",
"(",
"\"docs\"",
",",
"\"_build\"",
",",
"\"html\"",
",",
"\"\"",
")",
",",
")"
] | [
205,
0
] | [
236,
5
] | python | en | ['en', 'en', 'en'] | True |
get_spontaneous_environment | (*args) | Return a new spontaneous environment. A spontaneous environment is an
unnamed and unaccessible (in theory) environment that is used for
templates generated from a string and not from the file system.
| Return a new spontaneous environment. A spontaneous environment is an
unnamed and unaccessible (in theory) environment that is used for
templates generated from a string and not from the file system.
| def get_spontaneous_environment(*args):
"""Return a new spontaneous environment. A spontaneous environment is an
unnamed and unaccessible (in theory) environment that is used for
templates generated from a string and not from the file system.
"""
try:
env = _spontaneous_environments.get(args)
except TypeError:
return Environment(*args)
if env is not None:
return env
_spontaneous_environments[args] = env = Environment(*args)
env.shared = True
return env | [
"def",
"get_spontaneous_environment",
"(",
"*",
"args",
")",
":",
"try",
":",
"env",
"=",
"_spontaneous_environments",
".",
"get",
"(",
"args",
")",
"except",
"TypeError",
":",
"return",
"Environment",
"(",
"*",
"args",
")",
"if",
"env",
"is",
"not",
"None",
":",
"return",
"env",
"_spontaneous_environments",
"[",
"args",
"]",
"=",
"env",
"=",
"Environment",
"(",
"*",
"args",
")",
"env",
".",
"shared",
"=",
"True",
"return",
"env"
] | [
43,
0
] | [
56,
14
] | python | en | ['en', 'ht', 'en'] | True |
create_cache | (size) | Return the cache class for the given size. | Return the cache class for the given size. | def create_cache(size):
"""Return the cache class for the given size."""
if size == 0:
return None
if size < 0:
return {}
return LRUCache(size) | [
"def",
"create_cache",
"(",
"size",
")",
":",
"if",
"size",
"==",
"0",
":",
"return",
"None",
"if",
"size",
"<",
"0",
":",
"return",
"{",
"}",
"return",
"LRUCache",
"(",
"size",
")"
] | [
59,
0
] | [
65,
25
] | python | en | ['en', 'en', 'en'] | True |
copy_cache | (cache) | Create an empty copy of the given cache. | Create an empty copy of the given cache. | def copy_cache(cache):
"""Create an empty copy of the given cache."""
if cache is None:
return None
elif type(cache) is dict:
return {}
return LRUCache(cache.capacity) | [
"def",
"copy_cache",
"(",
"cache",
")",
":",
"if",
"cache",
"is",
"None",
":",
"return",
"None",
"elif",
"type",
"(",
"cache",
")",
"is",
"dict",
":",
"return",
"{",
"}",
"return",
"LRUCache",
"(",
"cache",
".",
"capacity",
")"
] | [
68,
0
] | [
74,
35
] | python | en | ['en', 'en', 'en'] | True |
load_extensions | (environment, extensions) | Load the extensions from the list and bind it to the environment.
Returns a dict of instantiated environments.
| Load the extensions from the list and bind it to the environment.
Returns a dict of instantiated environments.
| def load_extensions(environment, extensions):
"""Load the extensions from the list and bind it to the environment.
Returns a dict of instantiated environments.
"""
result = {}
for extension in extensions:
if isinstance(extension, string_types):
extension = import_string(extension)
result[extension.identifier] = extension(environment)
return result | [
"def",
"load_extensions",
"(",
"environment",
",",
"extensions",
")",
":",
"result",
"=",
"{",
"}",
"for",
"extension",
"in",
"extensions",
":",
"if",
"isinstance",
"(",
"extension",
",",
"string_types",
")",
":",
"extension",
"=",
"import_string",
"(",
"extension",
")",
"result",
"[",
"extension",
".",
"identifier",
"]",
"=",
"extension",
"(",
"environment",
")",
"return",
"result"
] | [
77,
0
] | [
86,
17
] | python | en | ['en', 'en', 'en'] | True |
_environment_sanity_check | (environment) | Perform a sanity check on the environment. | Perform a sanity check on the environment. | def _environment_sanity_check(environment):
"""Perform a sanity check on the environment."""
assert issubclass(environment.undefined, Undefined), 'undefined must ' \
'be a subclass of undefined because filters depend on it.'
assert environment.block_start_string != \
environment.variable_start_string != \
environment.comment_start_string, 'block, variable and comment ' \
'start strings must be different'
assert environment.newline_sequence in ('\r', '\r\n', '\n'), \
'newline_sequence set to unknown line ending string.'
return environment | [
"def",
"_environment_sanity_check",
"(",
"environment",
")",
":",
"assert",
"issubclass",
"(",
"environment",
".",
"undefined",
",",
"Undefined",
")",
",",
"'undefined must '",
"'be a subclass of undefined because filters depend on it.'",
"assert",
"environment",
".",
"block_start_string",
"!=",
"environment",
".",
"variable_start_string",
"!=",
"environment",
".",
"comment_start_string",
",",
"'block, variable and comment '",
"'start strings must be different'",
"assert",
"environment",
".",
"newline_sequence",
"in",
"(",
"'\\r'",
",",
"'\\r\\n'",
",",
"'\\n'",
")",
",",
"'newline_sequence set to unknown line ending string.'",
"return",
"environment"
] | [
99,
0
] | [
109,
22
] | python | en | ['en', 'en', 'en'] | True |
Environment.add_extension | (self, extension) | Adds an extension after the environment was created.
.. versionadded:: 2.5
| Adds an extension after the environment was created. | def add_extension(self, extension):
"""Adds an extension after the environment was created.
.. versionadded:: 2.5
"""
self.extensions.update(load_extensions(self, [extension])) | [
"def",
"add_extension",
"(",
"self",
",",
"extension",
")",
":",
"self",
".",
"extensions",
".",
"update",
"(",
"load_extensions",
"(",
"self",
",",
"[",
"extension",
"]",
")",
")"
] | [
339,
4
] | [
344,
66
] | python | en | ['en', 'en', 'en'] | True |
Environment.extend | (self, **attributes) | Add the items to the instance of the environment if they do not exist
yet. This is used by :ref:`extensions <writing-extensions>` to register
callbacks and configuration values without breaking inheritance.
| Add the items to the instance of the environment if they do not exist
yet. This is used by :ref:`extensions <writing-extensions>` to register
callbacks and configuration values without breaking inheritance.
| def extend(self, **attributes):
"""Add the items to the instance of the environment if they do not exist
yet. This is used by :ref:`extensions <writing-extensions>` to register
callbacks and configuration values without breaking inheritance.
"""
for key, value in iteritems(attributes):
if not hasattr(self, key):
setattr(self, key, value) | [
"def",
"extend",
"(",
"self",
",",
"*",
"*",
"attributes",
")",
":",
"for",
"key",
",",
"value",
"in",
"iteritems",
"(",
"attributes",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"key",
")",
":",
"setattr",
"(",
"self",
",",
"key",
",",
"value",
")"
] | [
346,
4
] | [
353,
41
] | python | en | ['en', 'en', 'en'] | True |
Environment.overlay | (self, block_start_string=missing, block_end_string=missing,
variable_start_string=missing, variable_end_string=missing,
comment_start_string=missing, comment_end_string=missing,
line_statement_prefix=missing, line_comment_prefix=missing,
trim_blocks=missing, lstrip_blocks=missing,
extensions=missing, optimized=missing,
undefined=missing, finalize=missing, autoescape=missing,
loader=missing, cache_size=missing, auto_reload=missing,
bytecode_cache=missing) | Create a new overlay environment that shares all the data with the
current environment except for cache and the overridden attributes.
Extensions cannot be removed for an overlayed environment. An overlayed
environment automatically gets all the extensions of the environment it
is linked to plus optional extra extensions.
Creating overlays should happen after the initial environment was set
up completely. Not all attributes are truly linked, some are just
copied over so modifications on the original environment may not shine
through.
| Create a new overlay environment that shares all the data with the
current environment except for cache and the overridden attributes.
Extensions cannot be removed for an overlayed environment. An overlayed
environment automatically gets all the extensions of the environment it
is linked to plus optional extra extensions. | def overlay(self, block_start_string=missing, block_end_string=missing,
variable_start_string=missing, variable_end_string=missing,
comment_start_string=missing, comment_end_string=missing,
line_statement_prefix=missing, line_comment_prefix=missing,
trim_blocks=missing, lstrip_blocks=missing,
extensions=missing, optimized=missing,
undefined=missing, finalize=missing, autoescape=missing,
loader=missing, cache_size=missing, auto_reload=missing,
bytecode_cache=missing):
"""Create a new overlay environment that shares all the data with the
current environment except for cache and the overridden attributes.
Extensions cannot be removed for an overlayed environment. An overlayed
environment automatically gets all the extensions of the environment it
is linked to plus optional extra extensions.
Creating overlays should happen after the initial environment was set
up completely. Not all attributes are truly linked, some are just
copied over so modifications on the original environment may not shine
through.
"""
args = dict(locals())
del args['self'], args['cache_size'], args['extensions']
rv = object.__new__(self.__class__)
rv.__dict__.update(self.__dict__)
rv.overlayed = True
rv.linked_to = self
for key, value in iteritems(args):
if value is not missing:
setattr(rv, key, value)
if cache_size is not missing:
rv.cache = create_cache(cache_size)
else:
rv.cache = copy_cache(self.cache)
rv.extensions = {}
for key, value in iteritems(self.extensions):
rv.extensions[key] = value.bind(rv)
if extensions is not missing:
rv.extensions.update(load_extensions(rv, extensions))
return _environment_sanity_check(rv) | [
"def",
"overlay",
"(",
"self",
",",
"block_start_string",
"=",
"missing",
",",
"block_end_string",
"=",
"missing",
",",
"variable_start_string",
"=",
"missing",
",",
"variable_end_string",
"=",
"missing",
",",
"comment_start_string",
"=",
"missing",
",",
"comment_end_string",
"=",
"missing",
",",
"line_statement_prefix",
"=",
"missing",
",",
"line_comment_prefix",
"=",
"missing",
",",
"trim_blocks",
"=",
"missing",
",",
"lstrip_blocks",
"=",
"missing",
",",
"extensions",
"=",
"missing",
",",
"optimized",
"=",
"missing",
",",
"undefined",
"=",
"missing",
",",
"finalize",
"=",
"missing",
",",
"autoescape",
"=",
"missing",
",",
"loader",
"=",
"missing",
",",
"cache_size",
"=",
"missing",
",",
"auto_reload",
"=",
"missing",
",",
"bytecode_cache",
"=",
"missing",
")",
":",
"args",
"=",
"dict",
"(",
"locals",
"(",
")",
")",
"del",
"args",
"[",
"'self'",
"]",
",",
"args",
"[",
"'cache_size'",
"]",
",",
"args",
"[",
"'extensions'",
"]",
"rv",
"=",
"object",
".",
"__new__",
"(",
"self",
".",
"__class__",
")",
"rv",
".",
"__dict__",
".",
"update",
"(",
"self",
".",
"__dict__",
")",
"rv",
".",
"overlayed",
"=",
"True",
"rv",
".",
"linked_to",
"=",
"self",
"for",
"key",
",",
"value",
"in",
"iteritems",
"(",
"args",
")",
":",
"if",
"value",
"is",
"not",
"missing",
":",
"setattr",
"(",
"rv",
",",
"key",
",",
"value",
")",
"if",
"cache_size",
"is",
"not",
"missing",
":",
"rv",
".",
"cache",
"=",
"create_cache",
"(",
"cache_size",
")",
"else",
":",
"rv",
".",
"cache",
"=",
"copy_cache",
"(",
"self",
".",
"cache",
")",
"rv",
".",
"extensions",
"=",
"{",
"}",
"for",
"key",
",",
"value",
"in",
"iteritems",
"(",
"self",
".",
"extensions",
")",
":",
"rv",
".",
"extensions",
"[",
"key",
"]",
"=",
"value",
".",
"bind",
"(",
"rv",
")",
"if",
"extensions",
"is",
"not",
"missing",
":",
"rv",
".",
"extensions",
".",
"update",
"(",
"load_extensions",
"(",
"rv",
",",
"extensions",
")",
")",
"return",
"_environment_sanity_check",
"(",
"rv",
")"
] | [
355,
4
] | [
398,
44
] | python | en | ['en', 'en', 'en'] | True |
Environment.iter_extensions | (self) | Iterates over the extensions by priority. | Iterates over the extensions by priority. | def iter_extensions(self):
"""Iterates over the extensions by priority."""
return iter(sorted(self.extensions.values(),
key=lambda x: x.priority)) | [
"def",
"iter_extensions",
"(",
"self",
")",
":",
"return",
"iter",
"(",
"sorted",
"(",
"self",
".",
"extensions",
".",
"values",
"(",
")",
",",
"key",
"=",
"lambda",
"x",
":",
"x",
".",
"priority",
")",
")"
] | [
402,
4
] | [
405,
53
] | python | en | ['en', 'en', 'en'] | True |
Environment.getitem | (self, obj, argument) | Get an item or attribute of an object but prefer the item. | Get an item or attribute of an object but prefer the item. | def getitem(self, obj, argument):
"""Get an item or attribute of an object but prefer the item."""
try:
return obj[argument]
except (AttributeError, TypeError, LookupError):
if isinstance(argument, string_types):
try:
attr = str(argument)
except Exception:
pass
else:
try:
return getattr(obj, attr)
except AttributeError:
pass
return self.undefined(obj=obj, name=argument) | [
"def",
"getitem",
"(",
"self",
",",
"obj",
",",
"argument",
")",
":",
"try",
":",
"return",
"obj",
"[",
"argument",
"]",
"except",
"(",
"AttributeError",
",",
"TypeError",
",",
"LookupError",
")",
":",
"if",
"isinstance",
"(",
"argument",
",",
"string_types",
")",
":",
"try",
":",
"attr",
"=",
"str",
"(",
"argument",
")",
"except",
"Exception",
":",
"pass",
"else",
":",
"try",
":",
"return",
"getattr",
"(",
"obj",
",",
"attr",
")",
"except",
"AttributeError",
":",
"pass",
"return",
"self",
".",
"undefined",
"(",
"obj",
"=",
"obj",
",",
"name",
"=",
"argument",
")"
] | [
407,
4
] | [
422,
57
] | python | en | ['en', 'en', 'en'] | True |
Environment.getattr | (self, obj, attribute) | Get an item or attribute of an object but prefer the attribute.
Unlike :meth:`getitem` the attribute *must* be a bytestring.
| Get an item or attribute of an object but prefer the attribute.
Unlike :meth:`getitem` the attribute *must* be a bytestring.
| def getattr(self, obj, attribute):
"""Get an item or attribute of an object but prefer the attribute.
Unlike :meth:`getitem` the attribute *must* be a bytestring.
"""
try:
return getattr(obj, attribute)
except AttributeError:
pass
try:
return obj[attribute]
except (TypeError, LookupError, AttributeError):
return self.undefined(obj=obj, name=attribute) | [
"def",
"getattr",
"(",
"self",
",",
"obj",
",",
"attribute",
")",
":",
"try",
":",
"return",
"getattr",
"(",
"obj",
",",
"attribute",
")",
"except",
"AttributeError",
":",
"pass",
"try",
":",
"return",
"obj",
"[",
"attribute",
"]",
"except",
"(",
"TypeError",
",",
"LookupError",
",",
"AttributeError",
")",
":",
"return",
"self",
".",
"undefined",
"(",
"obj",
"=",
"obj",
",",
"name",
"=",
"attribute",
")"
] | [
424,
4
] | [
435,
58
] | python | en | ['en', 'en', 'en'] | True |
Environment.call_filter | (self, name, value, args=None, kwargs=None,
context=None, eval_ctx=None) | Invokes a filter on a value the same way the compiler does it.
Note that on Python 3 this might return a coroutine in case the
filter is running from an environment in async mode and the filter
supports async execution. It's your responsibility to await this
if needed.
.. versionadded:: 2.7
| Invokes a filter on a value the same way the compiler does it. | def call_filter(self, name, value, args=None, kwargs=None,
context=None, eval_ctx=None):
"""Invokes a filter on a value the same way the compiler does it.
Note that on Python 3 this might return a coroutine in case the
filter is running from an environment in async mode and the filter
supports async execution. It's your responsibility to await this
if needed.
.. versionadded:: 2.7
"""
func = self.filters.get(name)
if func is None:
fail_for_missing_callable('no filter named %r', name)
args = [value] + list(args or ())
if getattr(func, 'contextfilter', False):
if context is None:
raise TemplateRuntimeError('Attempted to invoke context '
'filter without context')
args.insert(0, context)
elif getattr(func, 'evalcontextfilter', False):
if eval_ctx is None:
if context is not None:
eval_ctx = context.eval_ctx
else:
eval_ctx = EvalContext(self)
args.insert(0, eval_ctx)
elif getattr(func, 'environmentfilter', False):
args.insert(0, self)
return func(*args, **(kwargs or {})) | [
"def",
"call_filter",
"(",
"self",
",",
"name",
",",
"value",
",",
"args",
"=",
"None",
",",
"kwargs",
"=",
"None",
",",
"context",
"=",
"None",
",",
"eval_ctx",
"=",
"None",
")",
":",
"func",
"=",
"self",
".",
"filters",
".",
"get",
"(",
"name",
")",
"if",
"func",
"is",
"None",
":",
"fail_for_missing_callable",
"(",
"'no filter named %r'",
",",
"name",
")",
"args",
"=",
"[",
"value",
"]",
"+",
"list",
"(",
"args",
"or",
"(",
")",
")",
"if",
"getattr",
"(",
"func",
",",
"'contextfilter'",
",",
"False",
")",
":",
"if",
"context",
"is",
"None",
":",
"raise",
"TemplateRuntimeError",
"(",
"'Attempted to invoke context '",
"'filter without context'",
")",
"args",
".",
"insert",
"(",
"0",
",",
"context",
")",
"elif",
"getattr",
"(",
"func",
",",
"'evalcontextfilter'",
",",
"False",
")",
":",
"if",
"eval_ctx",
"is",
"None",
":",
"if",
"context",
"is",
"not",
"None",
":",
"eval_ctx",
"=",
"context",
".",
"eval_ctx",
"else",
":",
"eval_ctx",
"=",
"EvalContext",
"(",
"self",
")",
"args",
".",
"insert",
"(",
"0",
",",
"eval_ctx",
")",
"elif",
"getattr",
"(",
"func",
",",
"'environmentfilter'",
",",
"False",
")",
":",
"args",
".",
"insert",
"(",
"0",
",",
"self",
")",
"return",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"(",
"kwargs",
"or",
"{",
"}",
")",
")"
] | [
437,
4
] | [
466,
44
] | python | en | ['en', 'en', 'en'] | True |
Environment.call_test | (self, name, value, args=None, kwargs=None) | Invokes a test on a value the same way the compiler does it.
.. versionadded:: 2.7
| Invokes a test on a value the same way the compiler does it. | def call_test(self, name, value, args=None, kwargs=None):
"""Invokes a test on a value the same way the compiler does it.
.. versionadded:: 2.7
"""
func = self.tests.get(name)
if func is None:
fail_for_missing_callable('no test named %r', name)
return func(value, *(args or ()), **(kwargs or {})) | [
"def",
"call_test",
"(",
"self",
",",
"name",
",",
"value",
",",
"args",
"=",
"None",
",",
"kwargs",
"=",
"None",
")",
":",
"func",
"=",
"self",
".",
"tests",
".",
"get",
"(",
"name",
")",
"if",
"func",
"is",
"None",
":",
"fail_for_missing_callable",
"(",
"'no test named %r'",
",",
"name",
")",
"return",
"func",
"(",
"value",
",",
"*",
"(",
"args",
"or",
"(",
")",
")",
",",
"*",
"*",
"(",
"kwargs",
"or",
"{",
"}",
")",
")"
] | [
468,
4
] | [
476,
59
] | python | en | ['en', 'en', 'en'] | True |
Environment.parse | (self, source, name=None, filename=None) | Parse the sourcecode and return the abstract syntax tree. This
tree of nodes is used by the compiler to convert the template into
executable source- or bytecode. This is useful for debugging or to
extract information from templates.
If you are :ref:`developing Jinja2 extensions <writing-extensions>`
this gives you a good overview of the node tree generated.
| Parse the sourcecode and return the abstract syntax tree. This
tree of nodes is used by the compiler to convert the template into
executable source- or bytecode. This is useful for debugging or to
extract information from templates. | def parse(self, source, name=None, filename=None):
"""Parse the sourcecode and return the abstract syntax tree. This
tree of nodes is used by the compiler to convert the template into
executable source- or bytecode. This is useful for debugging or to
extract information from templates.
If you are :ref:`developing Jinja2 extensions <writing-extensions>`
this gives you a good overview of the node tree generated.
"""
try:
return self._parse(source, name, filename)
except TemplateSyntaxError:
exc_info = sys.exc_info()
self.handle_exception(exc_info, source_hint=source) | [
"def",
"parse",
"(",
"self",
",",
"source",
",",
"name",
"=",
"None",
",",
"filename",
"=",
"None",
")",
":",
"try",
":",
"return",
"self",
".",
"_parse",
"(",
"source",
",",
"name",
",",
"filename",
")",
"except",
"TemplateSyntaxError",
":",
"exc_info",
"=",
"sys",
".",
"exc_info",
"(",
")",
"self",
".",
"handle_exception",
"(",
"exc_info",
",",
"source_hint",
"=",
"source",
")"
] | [
479,
4
] | [
492,
59
] | python | en | ['en', 'en', 'en'] | True |
Environment._parse | (self, source, name, filename) | Internal parsing function used by `parse` and `compile`. | Internal parsing function used by `parse` and `compile`. | def _parse(self, source, name, filename):
"""Internal parsing function used by `parse` and `compile`."""
return Parser(self, source, name, encode_filename(filename)).parse() | [
"def",
"_parse",
"(",
"self",
",",
"source",
",",
"name",
",",
"filename",
")",
":",
"return",
"Parser",
"(",
"self",
",",
"source",
",",
"name",
",",
"encode_filename",
"(",
"filename",
")",
")",
".",
"parse",
"(",
")"
] | [
494,
4
] | [
496,
76
] | python | en | ['en', 'en', 'en'] | True |
Environment.lex | (self, source, name=None, filename=None) | Lex the given sourcecode and return a generator that yields
tokens as tuples in the form ``(lineno, token_type, value)``.
This can be useful for :ref:`extension development <writing-extensions>`
and debugging templates.
This does not perform preprocessing. If you want the preprocessing
of the extensions to be applied you have to filter source through
the :meth:`preprocess` method.
| Lex the given sourcecode and return a generator that yields
tokens as tuples in the form ``(lineno, token_type, value)``.
This can be useful for :ref:`extension development <writing-extensions>`
and debugging templates. | def lex(self, source, name=None, filename=None):
"""Lex the given sourcecode and return a generator that yields
tokens as tuples in the form ``(lineno, token_type, value)``.
This can be useful for :ref:`extension development <writing-extensions>`
and debugging templates.
This does not perform preprocessing. If you want the preprocessing
of the extensions to be applied you have to filter source through
the :meth:`preprocess` method.
"""
source = text_type(source)
try:
return self.lexer.tokeniter(source, name, filename)
except TemplateSyntaxError:
exc_info = sys.exc_info()
self.handle_exception(exc_info, source_hint=source) | [
"def",
"lex",
"(",
"self",
",",
"source",
",",
"name",
"=",
"None",
",",
"filename",
"=",
"None",
")",
":",
"source",
"=",
"text_type",
"(",
"source",
")",
"try",
":",
"return",
"self",
".",
"lexer",
".",
"tokeniter",
"(",
"source",
",",
"name",
",",
"filename",
")",
"except",
"TemplateSyntaxError",
":",
"exc_info",
"=",
"sys",
".",
"exc_info",
"(",
")",
"self",
".",
"handle_exception",
"(",
"exc_info",
",",
"source_hint",
"=",
"source",
")"
] | [
498,
4
] | [
513,
59
] | python | en | ['en', 'en', 'en'] | True |
Environment.preprocess | (self, source, name=None, filename=None) | Preprocesses the source with all extensions. This is automatically
called for all parsing and compiling methods but *not* for :meth:`lex`
because there you usually only want the actual source tokenized.
| Preprocesses the source with all extensions. This is automatically
called for all parsing and compiling methods but *not* for :meth:`lex`
because there you usually only want the actual source tokenized.
| def preprocess(self, source, name=None, filename=None):
"""Preprocesses the source with all extensions. This is automatically
called for all parsing and compiling methods but *not* for :meth:`lex`
because there you usually only want the actual source tokenized.
"""
return reduce(lambda s, e: e.preprocess(s, name, filename),
self.iter_extensions(), text_type(source)) | [
"def",
"preprocess",
"(",
"self",
",",
"source",
",",
"name",
"=",
"None",
",",
"filename",
"=",
"None",
")",
":",
"return",
"reduce",
"(",
"lambda",
"s",
",",
"e",
":",
"e",
".",
"preprocess",
"(",
"s",
",",
"name",
",",
"filename",
")",
",",
"self",
".",
"iter_extensions",
"(",
")",
",",
"text_type",
"(",
"source",
")",
")"
] | [
515,
4
] | [
521,
64
] | python | en | ['en', 'en', 'en'] | True |
Environment._tokenize | (self, source, name, filename=None, state=None) | Called by the parser to do the preprocessing and filtering
for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`.
| Called by the parser to do the preprocessing and filtering
for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`.
| def _tokenize(self, source, name, filename=None, state=None):
"""Called by the parser to do the preprocessing and filtering
for all the extensions. Returns a :class:`~jinja2.lexer.TokenStream`.
"""
source = self.preprocess(source, name, filename)
stream = self.lexer.tokenize(source, name, filename, state)
for ext in self.iter_extensions():
stream = ext.filter_stream(stream)
if not isinstance(stream, TokenStream):
stream = TokenStream(stream, name, filename)
return stream | [
"def",
"_tokenize",
"(",
"self",
",",
"source",
",",
"name",
",",
"filename",
"=",
"None",
",",
"state",
"=",
"None",
")",
":",
"source",
"=",
"self",
".",
"preprocess",
"(",
"source",
",",
"name",
",",
"filename",
")",
"stream",
"=",
"self",
".",
"lexer",
".",
"tokenize",
"(",
"source",
",",
"name",
",",
"filename",
",",
"state",
")",
"for",
"ext",
"in",
"self",
".",
"iter_extensions",
"(",
")",
":",
"stream",
"=",
"ext",
".",
"filter_stream",
"(",
"stream",
")",
"if",
"not",
"isinstance",
"(",
"stream",
",",
"TokenStream",
")",
":",
"stream",
"=",
"TokenStream",
"(",
"stream",
",",
"name",
",",
"filename",
")",
"return",
"stream"
] | [
523,
4
] | [
533,
21
] | python | en | ['en', 'en', 'en'] | True |
Environment._generate | (self, source, name, filename, defer_init=False) | Internal hook that can be overridden to hook a different generate
method in.
.. versionadded:: 2.5
| Internal hook that can be overridden to hook a different generate
method in. | def _generate(self, source, name, filename, defer_init=False):
"""Internal hook that can be overridden to hook a different generate
method in.
.. versionadded:: 2.5
"""
return generate(source, self, name, filename, defer_init=defer_init,
optimized=self.optimized) | [
"def",
"_generate",
"(",
"self",
",",
"source",
",",
"name",
",",
"filename",
",",
"defer_init",
"=",
"False",
")",
":",
"return",
"generate",
"(",
"source",
",",
"self",
",",
"name",
",",
"filename",
",",
"defer_init",
"=",
"defer_init",
",",
"optimized",
"=",
"self",
".",
"optimized",
")"
] | [
535,
4
] | [
542,
49
] | python | en | ['en', 'en', 'en'] | True |
Environment._compile | (self, source, filename) | Internal hook that can be overridden to hook a different compile
method in.
.. versionadded:: 2.5
| Internal hook that can be overridden to hook a different compile
method in. | def _compile(self, source, filename):
"""Internal hook that can be overridden to hook a different compile
method in.
.. versionadded:: 2.5
"""
return compile(source, filename, 'exec') | [
"def",
"_compile",
"(",
"self",
",",
"source",
",",
"filename",
")",
":",
"return",
"compile",
"(",
"source",
",",
"filename",
",",
"'exec'",
")"
] | [
544,
4
] | [
550,
48
] | python | en | ['en', 'en', 'en'] | True |
Environment.compile | (self, source, name=None, filename=None, raw=False,
defer_init=False) | Compile a node or template source code. The `name` parameter is
the load name of the template after it was joined using
:meth:`join_path` if necessary, not the filename on the file system.
the `filename` parameter is the estimated filename of the template on
the file system. If the template came from a database or memory this
can be omitted.
The return value of this method is a python code object. If the `raw`
parameter is `True` the return value will be a string with python
code equivalent to the bytecode returned otherwise. This method is
mainly used internally.
`defer_init` is use internally to aid the module code generator. This
causes the generated code to be able to import without the global
environment variable to be set.
.. versionadded:: 2.4
`defer_init` parameter added.
| Compile a node or template source code. The `name` parameter is
the load name of the template after it was joined using
:meth:`join_path` if necessary, not the filename on the file system.
the `filename` parameter is the estimated filename of the template on
the file system. If the template came from a database or memory this
can be omitted. | def compile(self, source, name=None, filename=None, raw=False,
defer_init=False):
"""Compile a node or template source code. The `name` parameter is
the load name of the template after it was joined using
:meth:`join_path` if necessary, not the filename on the file system.
the `filename` parameter is the estimated filename of the template on
the file system. If the template came from a database or memory this
can be omitted.
The return value of this method is a python code object. If the `raw`
parameter is `True` the return value will be a string with python
code equivalent to the bytecode returned otherwise. This method is
mainly used internally.
`defer_init` is use internally to aid the module code generator. This
causes the generated code to be able to import without the global
environment variable to be set.
.. versionadded:: 2.4
`defer_init` parameter added.
"""
source_hint = None
try:
if isinstance(source, string_types):
source_hint = source
source = self._parse(source, name, filename)
source = self._generate(source, name, filename,
defer_init=defer_init)
if raw:
return source
if filename is None:
filename = '<template>'
else:
filename = encode_filename(filename)
return self._compile(source, filename)
except TemplateSyntaxError:
exc_info = sys.exc_info()
self.handle_exception(exc_info, source_hint=source_hint) | [
"def",
"compile",
"(",
"self",
",",
"source",
",",
"name",
"=",
"None",
",",
"filename",
"=",
"None",
",",
"raw",
"=",
"False",
",",
"defer_init",
"=",
"False",
")",
":",
"source_hint",
"=",
"None",
"try",
":",
"if",
"isinstance",
"(",
"source",
",",
"string_types",
")",
":",
"source_hint",
"=",
"source",
"source",
"=",
"self",
".",
"_parse",
"(",
"source",
",",
"name",
",",
"filename",
")",
"source",
"=",
"self",
".",
"_generate",
"(",
"source",
",",
"name",
",",
"filename",
",",
"defer_init",
"=",
"defer_init",
")",
"if",
"raw",
":",
"return",
"source",
"if",
"filename",
"is",
"None",
":",
"filename",
"=",
"'<template>'",
"else",
":",
"filename",
"=",
"encode_filename",
"(",
"filename",
")",
"return",
"self",
".",
"_compile",
"(",
"source",
",",
"filename",
")",
"except",
"TemplateSyntaxError",
":",
"exc_info",
"=",
"sys",
".",
"exc_info",
"(",
")",
"self",
".",
"handle_exception",
"(",
"exc_info",
",",
"source_hint",
"=",
"source_hint",
")"
] | [
553,
4
] | [
590,
64
] | python | en | ['en', 'en', 'en'] | True |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.