Search is not available for this dataset
identifier
stringlengths 1
155
| parameters
stringlengths 2
6.09k
| docstring
stringlengths 11
63.4k
| docstring_summary
stringlengths 0
63.4k
| function
stringlengths 29
99.8k
| function_tokens
sequence | start_point
sequence | end_point
sequence | language
stringclasses 1
value | docstring_language
stringlengths 2
7
| docstring_language_predictions
stringlengths 18
23
| is_langid_reliable
stringclasses 2
values |
---|---|---|---|---|---|---|---|---|---|---|---|
get_all_distribution_names | (url=None) |
Return all distribution names known by an index.
:param url: The URL of the index.
:return: A list of all known distribution names.
|
Return all distribution names known by an index.
:param url: The URL of the index.
:return: A list of all known distribution names.
| def get_all_distribution_names(url=None):
"""
Return all distribution names known by an index.
:param url: The URL of the index.
:return: A list of all known distribution names.
"""
if url is None:
url = DEFAULT_INDEX
client = ServerProxy(url, timeout=3.0)
try:
return client.list_packages()
finally:
client('close')() | [
"def",
"get_all_distribution_names",
"(",
"url",
"=",
"None",
")",
":",
"if",
"url",
"is",
"None",
":",
"url",
"=",
"DEFAULT_INDEX",
"client",
"=",
"ServerProxy",
"(",
"url",
",",
"timeout",
"=",
"3.0",
")",
"try",
":",
"return",
"client",
".",
"list_packages",
"(",
")",
"finally",
":",
"client",
"(",
"'close'",
")",
"(",
")"
] | [
40,
0
] | [
52,
25
] | python | en | ['en', 'error', 'th'] | False |
Locator.__init__ | (self, scheme='default') |
Initialise an instance.
:param scheme: Because locators look for most recent versions, they
need to know the version scheme to use. This specifies
the current PEP-recommended scheme - use ``'legacy'``
if you need to support existing distributions on PyPI.
|
Initialise an instance.
:param scheme: Because locators look for most recent versions, they
need to know the version scheme to use. This specifies
the current PEP-recommended scheme - use ``'legacy'``
if you need to support existing distributions on PyPI.
| def __init__(self, scheme='default'):
"""
Initialise an instance.
:param scheme: Because locators look for most recent versions, they
need to know the version scheme to use. This specifies
the current PEP-recommended scheme - use ``'legacy'``
if you need to support existing distributions on PyPI.
"""
self._cache = {}
self.scheme = scheme
# Because of bugs in some of the handlers on some of the platforms,
# we use our own opener rather than just using urlopen.
self.opener = build_opener(RedirectHandler())
# If get_project() is called from locate(), the matcher instance
# is set from the requirement passed to locate(). See issue #18 for
# why this can be useful to know.
self.matcher = None
self.errors = queue.Queue() | [
"def",
"__init__",
"(",
"self",
",",
"scheme",
"=",
"'default'",
")",
":",
"self",
".",
"_cache",
"=",
"{",
"}",
"self",
".",
"scheme",
"=",
"scheme",
"# Because of bugs in some of the handlers on some of the platforms,",
"# we use our own opener rather than just using urlopen.",
"self",
".",
"opener",
"=",
"build_opener",
"(",
"RedirectHandler",
"(",
")",
")",
"# If get_project() is called from locate(), the matcher instance",
"# is set from the requirement passed to locate(). See issue #18 for",
"# why this can be useful to know.",
"self",
".",
"matcher",
"=",
"None",
"self",
".",
"errors",
"=",
"queue",
".",
"Queue",
"(",
")"
] | [
101,
4
] | [
118,
35
] | python | en | ['en', 'error', 'th'] | False |
Locator.get_errors | (self) |
Return any errors which have occurred.
|
Return any errors which have occurred.
| def get_errors(self):
"""
Return any errors which have occurred.
"""
result = []
while not self.errors.empty(): # pragma: no cover
try:
e = self.errors.get(False)
result.append(e)
except self.errors.Empty:
continue
self.errors.task_done()
return result | [
"def",
"get_errors",
"(",
"self",
")",
":",
"result",
"=",
"[",
"]",
"while",
"not",
"self",
".",
"errors",
".",
"empty",
"(",
")",
":",
"# pragma: no cover",
"try",
":",
"e",
"=",
"self",
".",
"errors",
".",
"get",
"(",
"False",
")",
"result",
".",
"append",
"(",
"e",
")",
"except",
"self",
".",
"errors",
".",
"Empty",
":",
"continue",
"self",
".",
"errors",
".",
"task_done",
"(",
")",
"return",
"result"
] | [
120,
4
] | [
132,
21
] | python | en | ['en', 'error', 'th'] | False |
Locator.clear_errors | (self) |
Clear any errors which may have been logged.
|
Clear any errors which may have been logged.
| def clear_errors(self):
"""
Clear any errors which may have been logged.
"""
# Just get the errors and throw them away
self.get_errors() | [
"def",
"clear_errors",
"(",
"self",
")",
":",
"# Just get the errors and throw them away",
"self",
".",
"get_errors",
"(",
")"
] | [
134,
4
] | [
139,
25
] | python | en | ['en', 'error', 'th'] | False |
Locator._get_project | (self, name) |
For a given project, get a dictionary mapping available versions to Distribution
instances.
This should be implemented in subclasses.
If called from a locate() request, self.matcher will be set to a
matcher for the requirement to satisfy, otherwise it will be None.
|
For a given project, get a dictionary mapping available versions to Distribution
instances. | def _get_project(self, name):
"""
For a given project, get a dictionary mapping available versions to Distribution
instances.
This should be implemented in subclasses.
If called from a locate() request, self.matcher will be set to a
matcher for the requirement to satisfy, otherwise it will be None.
"""
raise NotImplementedError('Please implement in the subclass') | [
"def",
"_get_project",
"(",
"self",
",",
"name",
")",
":",
"raise",
"NotImplementedError",
"(",
"'Please implement in the subclass'",
")"
] | [
152,
4
] | [
162,
69
] | python | en | ['en', 'error', 'th'] | False |
Locator.get_distribution_names | (self) |
Return all the distribution names known to this locator.
|
Return all the distribution names known to this locator.
| def get_distribution_names(self):
"""
Return all the distribution names known to this locator.
"""
raise NotImplementedError('Please implement in the subclass') | [
"def",
"get_distribution_names",
"(",
"self",
")",
":",
"raise",
"NotImplementedError",
"(",
"'Please implement in the subclass'",
")"
] | [
164,
4
] | [
168,
69
] | python | en | ['en', 'error', 'th'] | False |
Locator.get_project | (self, name) |
For a given project, get a dictionary mapping available versions to Distribution
instances.
This calls _get_project to do all the work, and just implements a caching layer on top.
|
For a given project, get a dictionary mapping available versions to Distribution
instances. | def get_project(self, name):
"""
For a given project, get a dictionary mapping available versions to Distribution
instances.
This calls _get_project to do all the work, and just implements a caching layer on top.
"""
if self._cache is None: # pragma: no cover
result = self._get_project(name)
elif name in self._cache:
result = self._cache[name]
else:
self.clear_errors()
result = self._get_project(name)
self._cache[name] = result
return result | [
"def",
"get_project",
"(",
"self",
",",
"name",
")",
":",
"if",
"self",
".",
"_cache",
"is",
"None",
":",
"# pragma: no cover",
"result",
"=",
"self",
".",
"_get_project",
"(",
"name",
")",
"elif",
"name",
"in",
"self",
".",
"_cache",
":",
"result",
"=",
"self",
".",
"_cache",
"[",
"name",
"]",
"else",
":",
"self",
".",
"clear_errors",
"(",
")",
"result",
"=",
"self",
".",
"_get_project",
"(",
"name",
")",
"self",
".",
"_cache",
"[",
"name",
"]",
"=",
"result",
"return",
"result"
] | [
170,
4
] | [
185,
21
] | python | en | ['en', 'error', 'th'] | False |
Locator.score_url | (self, url) |
Give an url a score which can be used to choose preferred URLs
for a given project release.
|
Give an url a score which can be used to choose preferred URLs
for a given project release.
| def score_url(self, url):
"""
Give an url a score which can be used to choose preferred URLs
for a given project release.
"""
t = urlparse(url)
basename = posixpath.basename(t.path)
compatible = True
is_wheel = basename.endswith('.whl')
is_downloadable = basename.endswith(self.downloadable_extensions)
if is_wheel:
compatible = is_compatible(Wheel(basename), self.wheel_tags)
return (t.scheme == 'https', 'pypi.org' in t.netloc,
is_downloadable, is_wheel, compatible, basename) | [
"def",
"score_url",
"(",
"self",
",",
"url",
")",
":",
"t",
"=",
"urlparse",
"(",
"url",
")",
"basename",
"=",
"posixpath",
".",
"basename",
"(",
"t",
".",
"path",
")",
"compatible",
"=",
"True",
"is_wheel",
"=",
"basename",
".",
"endswith",
"(",
"'.whl'",
")",
"is_downloadable",
"=",
"basename",
".",
"endswith",
"(",
"self",
".",
"downloadable_extensions",
")",
"if",
"is_wheel",
":",
"compatible",
"=",
"is_compatible",
"(",
"Wheel",
"(",
"basename",
")",
",",
"self",
".",
"wheel_tags",
")",
"return",
"(",
"t",
".",
"scheme",
"==",
"'https'",
",",
"'pypi.org'",
"in",
"t",
".",
"netloc",
",",
"is_downloadable",
",",
"is_wheel",
",",
"compatible",
",",
"basename",
")"
] | [
187,
4
] | [
200,
64
] | python | en | ['en', 'error', 'th'] | False |
Locator.prefer_url | (self, url1, url2) |
Choose one of two URLs where both are candidates for distribution
archives for the same version of a distribution (for example,
.tar.gz vs. zip).
The current implementation favours https:// URLs over http://, archives
from PyPI over those from other locations, wheel compatibility (if a
wheel) and then the archive name.
|
Choose one of two URLs where both are candidates for distribution
archives for the same version of a distribution (for example,
.tar.gz vs. zip). | def prefer_url(self, url1, url2):
"""
Choose one of two URLs where both are candidates for distribution
archives for the same version of a distribution (for example,
.tar.gz vs. zip).
The current implementation favours https:// URLs over http://, archives
from PyPI over those from other locations, wheel compatibility (if a
wheel) and then the archive name.
"""
result = url2
if url1:
s1 = self.score_url(url1)
s2 = self.score_url(url2)
if s1 > s2:
result = url1
if result != url2:
logger.debug('Not replacing %r with %r', url1, url2)
else:
logger.debug('Replacing %r with %r', url1, url2)
return result | [
"def",
"prefer_url",
"(",
"self",
",",
"url1",
",",
"url2",
")",
":",
"result",
"=",
"url2",
"if",
"url1",
":",
"s1",
"=",
"self",
".",
"score_url",
"(",
"url1",
")",
"s2",
"=",
"self",
".",
"score_url",
"(",
"url2",
")",
"if",
"s1",
">",
"s2",
":",
"result",
"=",
"url1",
"if",
"result",
"!=",
"url2",
":",
"logger",
".",
"debug",
"(",
"'Not replacing %r with %r'",
",",
"url1",
",",
"url2",
")",
"else",
":",
"logger",
".",
"debug",
"(",
"'Replacing %r with %r'",
",",
"url1",
",",
"url2",
")",
"return",
"result"
] | [
202,
4
] | [
222,
21
] | python | en | ['en', 'error', 'th'] | False |
Locator.split_filename | (self, filename, project_name) |
Attempt to split a filename in project name, version and Python version.
|
Attempt to split a filename in project name, version and Python version.
| def split_filename(self, filename, project_name):
"""
Attempt to split a filename in project name, version and Python version.
"""
return split_filename(filename, project_name) | [
"def",
"split_filename",
"(",
"self",
",",
"filename",
",",
"project_name",
")",
":",
"return",
"split_filename",
"(",
"filename",
",",
"project_name",
")"
] | [
224,
4
] | [
228,
53
] | python | en | ['en', 'error', 'th'] | False |
Locator.convert_url_to_download_info | (self, url, project_name) |
See if a URL is a candidate for a download URL for a project (the URL
has typically been scraped from an HTML page).
If it is, a dictionary is returned with keys "name", "version",
"filename" and "url"; otherwise, None is returned.
|
See if a URL is a candidate for a download URL for a project (the URL
has typically been scraped from an HTML page). | def convert_url_to_download_info(self, url, project_name):
"""
See if a URL is a candidate for a download URL for a project (the URL
has typically been scraped from an HTML page).
If it is, a dictionary is returned with keys "name", "version",
"filename" and "url"; otherwise, None is returned.
"""
def same_project(name1, name2):
return normalize_name(name1) == normalize_name(name2)
result = None
scheme, netloc, path, params, query, frag = urlparse(url)
if frag.lower().startswith('egg='): # pragma: no cover
logger.debug('%s: version hint in fragment: %r',
project_name, frag)
m = HASHER_HASH.match(frag)
if m:
algo, digest = m.groups()
else:
algo, digest = None, None
origpath = path
if path and path[-1] == '/': # pragma: no cover
path = path[:-1]
if path.endswith('.whl'):
try:
wheel = Wheel(path)
if not is_compatible(wheel, self.wheel_tags):
logger.debug('Wheel not compatible: %s', path)
else:
if project_name is None:
include = True
else:
include = same_project(wheel.name, project_name)
if include:
result = {
'name': wheel.name,
'version': wheel.version,
'filename': wheel.filename,
'url': urlunparse((scheme, netloc, origpath,
params, query, '')),
'python-version': ', '.join(
['.'.join(list(v[2:])) for v in wheel.pyver]),
}
except Exception as e: # pragma: no cover
logger.warning('invalid path for wheel: %s', path)
elif not path.endswith(self.downloadable_extensions): # pragma: no cover
logger.debug('Not downloadable: %s', path)
else: # downloadable extension
path = filename = posixpath.basename(path)
for ext in self.downloadable_extensions:
if path.endswith(ext):
path = path[:-len(ext)]
t = self.split_filename(path, project_name)
if not t: # pragma: no cover
logger.debug('No match for project/version: %s', path)
else:
name, version, pyver = t
if not project_name or same_project(project_name, name):
result = {
'name': name,
'version': version,
'filename': filename,
'url': urlunparse((scheme, netloc, origpath,
params, query, '')),
#'packagetype': 'sdist',
}
if pyver: # pragma: no cover
result['python-version'] = pyver
break
if result and algo:
result['%s_digest' % algo] = digest
return result | [
"def",
"convert_url_to_download_info",
"(",
"self",
",",
"url",
",",
"project_name",
")",
":",
"def",
"same_project",
"(",
"name1",
",",
"name2",
")",
":",
"return",
"normalize_name",
"(",
"name1",
")",
"==",
"normalize_name",
"(",
"name2",
")",
"result",
"=",
"None",
"scheme",
",",
"netloc",
",",
"path",
",",
"params",
",",
"query",
",",
"frag",
"=",
"urlparse",
"(",
"url",
")",
"if",
"frag",
".",
"lower",
"(",
")",
".",
"startswith",
"(",
"'egg='",
")",
":",
"# pragma: no cover",
"logger",
".",
"debug",
"(",
"'%s: version hint in fragment: %r'",
",",
"project_name",
",",
"frag",
")",
"m",
"=",
"HASHER_HASH",
".",
"match",
"(",
"frag",
")",
"if",
"m",
":",
"algo",
",",
"digest",
"=",
"m",
".",
"groups",
"(",
")",
"else",
":",
"algo",
",",
"digest",
"=",
"None",
",",
"None",
"origpath",
"=",
"path",
"if",
"path",
"and",
"path",
"[",
"-",
"1",
"]",
"==",
"'/'",
":",
"# pragma: no cover",
"path",
"=",
"path",
"[",
":",
"-",
"1",
"]",
"if",
"path",
".",
"endswith",
"(",
"'.whl'",
")",
":",
"try",
":",
"wheel",
"=",
"Wheel",
"(",
"path",
")",
"if",
"not",
"is_compatible",
"(",
"wheel",
",",
"self",
".",
"wheel_tags",
")",
":",
"logger",
".",
"debug",
"(",
"'Wheel not compatible: %s'",
",",
"path",
")",
"else",
":",
"if",
"project_name",
"is",
"None",
":",
"include",
"=",
"True",
"else",
":",
"include",
"=",
"same_project",
"(",
"wheel",
".",
"name",
",",
"project_name",
")",
"if",
"include",
":",
"result",
"=",
"{",
"'name'",
":",
"wheel",
".",
"name",
",",
"'version'",
":",
"wheel",
".",
"version",
",",
"'filename'",
":",
"wheel",
".",
"filename",
",",
"'url'",
":",
"urlunparse",
"(",
"(",
"scheme",
",",
"netloc",
",",
"origpath",
",",
"params",
",",
"query",
",",
"''",
")",
")",
",",
"'python-version'",
":",
"', '",
".",
"join",
"(",
"[",
"'.'",
".",
"join",
"(",
"list",
"(",
"v",
"[",
"2",
":",
"]",
")",
")",
"for",
"v",
"in",
"wheel",
".",
"pyver",
"]",
")",
",",
"}",
"except",
"Exception",
"as",
"e",
":",
"# pragma: no cover",
"logger",
".",
"warning",
"(",
"'invalid path for wheel: %s'",
",",
"path",
")",
"elif",
"not",
"path",
".",
"endswith",
"(",
"self",
".",
"downloadable_extensions",
")",
":",
"# pragma: no cover",
"logger",
".",
"debug",
"(",
"'Not downloadable: %s'",
",",
"path",
")",
"else",
":",
"# downloadable extension",
"path",
"=",
"filename",
"=",
"posixpath",
".",
"basename",
"(",
"path",
")",
"for",
"ext",
"in",
"self",
".",
"downloadable_extensions",
":",
"if",
"path",
".",
"endswith",
"(",
"ext",
")",
":",
"path",
"=",
"path",
"[",
":",
"-",
"len",
"(",
"ext",
")",
"]",
"t",
"=",
"self",
".",
"split_filename",
"(",
"path",
",",
"project_name",
")",
"if",
"not",
"t",
":",
"# pragma: no cover",
"logger",
".",
"debug",
"(",
"'No match for project/version: %s'",
",",
"path",
")",
"else",
":",
"name",
",",
"version",
",",
"pyver",
"=",
"t",
"if",
"not",
"project_name",
"or",
"same_project",
"(",
"project_name",
",",
"name",
")",
":",
"result",
"=",
"{",
"'name'",
":",
"name",
",",
"'version'",
":",
"version",
",",
"'filename'",
":",
"filename",
",",
"'url'",
":",
"urlunparse",
"(",
"(",
"scheme",
",",
"netloc",
",",
"origpath",
",",
"params",
",",
"query",
",",
"''",
")",
")",
",",
"#'packagetype': 'sdist',",
"}",
"if",
"pyver",
":",
"# pragma: no cover",
"result",
"[",
"'python-version'",
"]",
"=",
"pyver",
"break",
"if",
"result",
"and",
"algo",
":",
"result",
"[",
"'%s_digest'",
"%",
"algo",
"]",
"=",
"digest",
"return",
"result"
] | [
230,
4
] | [
302,
21
] | python | en | ['en', 'error', 'th'] | False |
Locator._get_digest | (self, info) |
Get a digest from a dictionary by looking at a "digests" dictionary
or keys of the form 'algo_digest'.
Returns a 2-tuple (algo, digest) if found, else None. Currently
looks only for SHA256, then MD5.
|
Get a digest from a dictionary by looking at a "digests" dictionary
or keys of the form 'algo_digest'. | def _get_digest(self, info):
"""
Get a digest from a dictionary by looking at a "digests" dictionary
or keys of the form 'algo_digest'.
Returns a 2-tuple (algo, digest) if found, else None. Currently
looks only for SHA256, then MD5.
"""
result = None
if 'digests' in info:
digests = info['digests']
for algo in ('sha256', 'md5'):
if algo in digests:
result = (algo, digests[algo])
break
if not result:
for algo in ('sha256', 'md5'):
key = '%s_digest' % algo
if key in info:
result = (algo, info[key])
break
return result | [
"def",
"_get_digest",
"(",
"self",
",",
"info",
")",
":",
"result",
"=",
"None",
"if",
"'digests'",
"in",
"info",
":",
"digests",
"=",
"info",
"[",
"'digests'",
"]",
"for",
"algo",
"in",
"(",
"'sha256'",
",",
"'md5'",
")",
":",
"if",
"algo",
"in",
"digests",
":",
"result",
"=",
"(",
"algo",
",",
"digests",
"[",
"algo",
"]",
")",
"break",
"if",
"not",
"result",
":",
"for",
"algo",
"in",
"(",
"'sha256'",
",",
"'md5'",
")",
":",
"key",
"=",
"'%s_digest'",
"%",
"algo",
"if",
"key",
"in",
"info",
":",
"result",
"=",
"(",
"algo",
",",
"info",
"[",
"key",
"]",
")",
"break",
"return",
"result"
] | [
304,
4
] | [
325,
21
] | python | en | ['en', 'error', 'th'] | False |
Locator._update_version_data | (self, result, info) |
Update a result dictionary (the final result from _get_project) with a
dictionary for a specific version, which typically holds information
gleaned from a filename or URL for an archive for the distribution.
|
Update a result dictionary (the final result from _get_project) with a
dictionary for a specific version, which typically holds information
gleaned from a filename or URL for an archive for the distribution.
| def _update_version_data(self, result, info):
"""
Update a result dictionary (the final result from _get_project) with a
dictionary for a specific version, which typically holds information
gleaned from a filename or URL for an archive for the distribution.
"""
name = info.pop('name')
version = info.pop('version')
if version in result:
dist = result[version]
md = dist.metadata
else:
dist = make_dist(name, version, scheme=self.scheme)
md = dist.metadata
dist.digest = digest = self._get_digest(info)
url = info['url']
result['digests'][url] = digest
if md.source_url != info['url']:
md.source_url = self.prefer_url(md.source_url, url)
result['urls'].setdefault(version, set()).add(url)
dist.locator = self
result[version] = dist | [
"def",
"_update_version_data",
"(",
"self",
",",
"result",
",",
"info",
")",
":",
"name",
"=",
"info",
".",
"pop",
"(",
"'name'",
")",
"version",
"=",
"info",
".",
"pop",
"(",
"'version'",
")",
"if",
"version",
"in",
"result",
":",
"dist",
"=",
"result",
"[",
"version",
"]",
"md",
"=",
"dist",
".",
"metadata",
"else",
":",
"dist",
"=",
"make_dist",
"(",
"name",
",",
"version",
",",
"scheme",
"=",
"self",
".",
"scheme",
")",
"md",
"=",
"dist",
".",
"metadata",
"dist",
".",
"digest",
"=",
"digest",
"=",
"self",
".",
"_get_digest",
"(",
"info",
")",
"url",
"=",
"info",
"[",
"'url'",
"]",
"result",
"[",
"'digests'",
"]",
"[",
"url",
"]",
"=",
"digest",
"if",
"md",
".",
"source_url",
"!=",
"info",
"[",
"'url'",
"]",
":",
"md",
".",
"source_url",
"=",
"self",
".",
"prefer_url",
"(",
"md",
".",
"source_url",
",",
"url",
")",
"result",
"[",
"'urls'",
"]",
".",
"setdefault",
"(",
"version",
",",
"set",
"(",
")",
")",
".",
"add",
"(",
"url",
")",
"dist",
".",
"locator",
"=",
"self",
"result",
"[",
"version",
"]",
"=",
"dist"
] | [
327,
4
] | [
348,
30
] | python | en | ['en', 'error', 'th'] | False |
Locator.locate | (self, requirement, prereleases=False) |
Find the most recent distribution which matches the given
requirement.
:param requirement: A requirement of the form 'foo (1.0)' or perhaps
'foo (>= 1.0, < 2.0, != 1.3)'
:param prereleases: If ``True``, allow pre-release versions
to be located. Otherwise, pre-release versions
are not returned.
:return: A :class:`Distribution` instance, or ``None`` if no such
distribution could be located.
|
Find the most recent distribution which matches the given
requirement. | def locate(self, requirement, prereleases=False):
"""
Find the most recent distribution which matches the given
requirement.
:param requirement: A requirement of the form 'foo (1.0)' or perhaps
'foo (>= 1.0, < 2.0, != 1.3)'
:param prereleases: If ``True``, allow pre-release versions
to be located. Otherwise, pre-release versions
are not returned.
:return: A :class:`Distribution` instance, or ``None`` if no such
distribution could be located.
"""
result = None
r = parse_requirement(requirement)
if r is None: # pragma: no cover
raise DistlibException('Not a valid requirement: %r' % requirement)
scheme = get_scheme(self.scheme)
self.matcher = matcher = scheme.matcher(r.requirement)
logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__)
versions = self.get_project(r.name)
if len(versions) > 2: # urls and digests keys are present
# sometimes, versions are invalid
slist = []
vcls = matcher.version_class
for k in versions:
if k in ('urls', 'digests'):
continue
try:
if not matcher.match(k):
pass # logger.debug('%s did not match %r', matcher, k)
else:
if prereleases or not vcls(k).is_prerelease:
slist.append(k)
# else:
# logger.debug('skipping pre-release '
# 'version %s of %s', k, matcher.name)
except Exception: # pragma: no cover
logger.warning('error matching %s with %r', matcher, k)
pass # slist.append(k)
if len(slist) > 1:
slist = sorted(slist, key=scheme.key)
if slist:
logger.debug('sorted list: %s', slist)
version = slist[-1]
result = versions[version]
if result:
if r.extras:
result.extras = r.extras
result.download_urls = versions.get('urls', {}).get(version, set())
d = {}
sd = versions.get('digests', {})
for url in result.download_urls:
if url in sd: # pragma: no cover
d[url] = sd[url]
result.digests = d
self.matcher = None
return result | [
"def",
"locate",
"(",
"self",
",",
"requirement",
",",
"prereleases",
"=",
"False",
")",
":",
"result",
"=",
"None",
"r",
"=",
"parse_requirement",
"(",
"requirement",
")",
"if",
"r",
"is",
"None",
":",
"# pragma: no cover",
"raise",
"DistlibException",
"(",
"'Not a valid requirement: %r'",
"%",
"requirement",
")",
"scheme",
"=",
"get_scheme",
"(",
"self",
".",
"scheme",
")",
"self",
".",
"matcher",
"=",
"matcher",
"=",
"scheme",
".",
"matcher",
"(",
"r",
".",
"requirement",
")",
"logger",
".",
"debug",
"(",
"'matcher: %s (%s)'",
",",
"matcher",
",",
"type",
"(",
"matcher",
")",
".",
"__name__",
")",
"versions",
"=",
"self",
".",
"get_project",
"(",
"r",
".",
"name",
")",
"if",
"len",
"(",
"versions",
")",
">",
"2",
":",
"# urls and digests keys are present",
"# sometimes, versions are invalid",
"slist",
"=",
"[",
"]",
"vcls",
"=",
"matcher",
".",
"version_class",
"for",
"k",
"in",
"versions",
":",
"if",
"k",
"in",
"(",
"'urls'",
",",
"'digests'",
")",
":",
"continue",
"try",
":",
"if",
"not",
"matcher",
".",
"match",
"(",
"k",
")",
":",
"pass",
"# logger.debug('%s did not match %r', matcher, k)",
"else",
":",
"if",
"prereleases",
"or",
"not",
"vcls",
"(",
"k",
")",
".",
"is_prerelease",
":",
"slist",
".",
"append",
"(",
"k",
")",
"# else:",
"# logger.debug('skipping pre-release '",
"# 'version %s of %s', k, matcher.name)",
"except",
"Exception",
":",
"# pragma: no cover",
"logger",
".",
"warning",
"(",
"'error matching %s with %r'",
",",
"matcher",
",",
"k",
")",
"pass",
"# slist.append(k)",
"if",
"len",
"(",
"slist",
")",
">",
"1",
":",
"slist",
"=",
"sorted",
"(",
"slist",
",",
"key",
"=",
"scheme",
".",
"key",
")",
"if",
"slist",
":",
"logger",
".",
"debug",
"(",
"'sorted list: %s'",
",",
"slist",
")",
"version",
"=",
"slist",
"[",
"-",
"1",
"]",
"result",
"=",
"versions",
"[",
"version",
"]",
"if",
"result",
":",
"if",
"r",
".",
"extras",
":",
"result",
".",
"extras",
"=",
"r",
".",
"extras",
"result",
".",
"download_urls",
"=",
"versions",
".",
"get",
"(",
"'urls'",
",",
"{",
"}",
")",
".",
"get",
"(",
"version",
",",
"set",
"(",
")",
")",
"d",
"=",
"{",
"}",
"sd",
"=",
"versions",
".",
"get",
"(",
"'digests'",
",",
"{",
"}",
")",
"for",
"url",
"in",
"result",
".",
"download_urls",
":",
"if",
"url",
"in",
"sd",
":",
"# pragma: no cover",
"d",
"[",
"url",
"]",
"=",
"sd",
"[",
"url",
"]",
"result",
".",
"digests",
"=",
"d",
"self",
".",
"matcher",
"=",
"None",
"return",
"result"
] | [
350,
4
] | [
407,
21
] | python | en | ['en', 'error', 'th'] | False |
PyPIRPCLocator.__init__ | (self, url, **kwargs) |
Initialise an instance.
:param url: The URL to use for XML-RPC.
:param kwargs: Passed to the superclass constructor.
|
Initialise an instance. | def __init__(self, url, **kwargs):
"""
Initialise an instance.
:param url: The URL to use for XML-RPC.
:param kwargs: Passed to the superclass constructor.
"""
super(PyPIRPCLocator, self).__init__(**kwargs)
self.base_url = url
self.client = ServerProxy(url, timeout=3.0) | [
"def",
"__init__",
"(",
"self",
",",
"url",
",",
"*",
"*",
"kwargs",
")",
":",
"super",
"(",
"PyPIRPCLocator",
",",
"self",
")",
".",
"__init__",
"(",
"*",
"*",
"kwargs",
")",
"self",
".",
"base_url",
"=",
"url",
"self",
".",
"client",
"=",
"ServerProxy",
"(",
"url",
",",
"timeout",
"=",
"3.0",
")"
] | [
415,
4
] | [
424,
51
] | python | en | ['en', 'error', 'th'] | False |
PyPIRPCLocator.get_distribution_names | (self) |
Return all the distribution names known to this locator.
|
Return all the distribution names known to this locator.
| def get_distribution_names(self):
"""
Return all the distribution names known to this locator.
"""
return set(self.client.list_packages()) | [
"def",
"get_distribution_names",
"(",
"self",
")",
":",
"return",
"set",
"(",
"self",
".",
"client",
".",
"list_packages",
"(",
")",
")"
] | [
426,
4
] | [
430,
47
] | python | en | ['en', 'error', 'th'] | False |
PyPIJSONLocator.get_distribution_names | (self) |
Return all the distribution names known to this locator.
|
Return all the distribution names known to this locator.
| def get_distribution_names(self):
"""
Return all the distribution names known to this locator.
"""
raise NotImplementedError('Not available from this locator') | [
"def",
"get_distribution_names",
"(",
"self",
")",
":",
"raise",
"NotImplementedError",
"(",
"'Not available from this locator'",
")"
] | [
467,
4
] | [
471,
68
] | python | en | ['en', 'error', 'th'] | False |
Page.__init__ | (self, data, url) |
Initialise an instance with the Unicode page contents and the URL they
came from.
|
Initialise an instance with the Unicode page contents and the URL they
came from.
| def __init__(self, data, url):
"""
Initialise an instance with the Unicode page contents and the URL they
came from.
"""
self.data = data
self.base_url = self.url = url
m = self._base.search(self.data)
if m:
self.base_url = m.group(1) | [
"def",
"__init__",
"(",
"self",
",",
"data",
",",
"url",
")",
":",
"self",
".",
"data",
"=",
"data",
"self",
".",
"base_url",
"=",
"self",
".",
"url",
"=",
"url",
"m",
"=",
"self",
".",
"_base",
".",
"search",
"(",
"self",
".",
"data",
")",
"if",
"m",
":",
"self",
".",
"base_url",
"=",
"m",
".",
"group",
"(",
"1",
")"
] | [
543,
4
] | [
552,
38
] | python | en | ['en', 'error', 'th'] | False |
Page.links | (self) |
Return the URLs of all the links on a page together with information
about their "rel" attribute, for determining which ones to treat as
downloads and which ones to queue for further scraping.
|
Return the URLs of all the links on a page together with information
about their "rel" attribute, for determining which ones to treat as
downloads and which ones to queue for further scraping.
| def links(self):
"""
Return the URLs of all the links on a page together with information
about their "rel" attribute, for determining which ones to treat as
downloads and which ones to queue for further scraping.
"""
def clean(url):
"Tidy up an URL."
scheme, netloc, path, params, query, frag = urlparse(url)
return urlunparse((scheme, netloc, quote(path),
params, query, frag))
result = set()
for match in self._href.finditer(self.data):
d = match.groupdict('')
rel = (d['rel1'] or d['rel2'] or d['rel3'] or
d['rel4'] or d['rel5'] or d['rel6'])
url = d['url1'] or d['url2'] or d['url3']
url = urljoin(self.base_url, url)
url = unescape(url)
url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url)
result.add((url, rel))
# We sort the result, hoping to bring the most recent versions
# to the front
result = sorted(result, key=lambda t: t[0], reverse=True)
return result | [
"def",
"links",
"(",
"self",
")",
":",
"def",
"clean",
"(",
"url",
")",
":",
"\"Tidy up an URL.\"",
"scheme",
",",
"netloc",
",",
"path",
",",
"params",
",",
"query",
",",
"frag",
"=",
"urlparse",
"(",
"url",
")",
"return",
"urlunparse",
"(",
"(",
"scheme",
",",
"netloc",
",",
"quote",
"(",
"path",
")",
",",
"params",
",",
"query",
",",
"frag",
")",
")",
"result",
"=",
"set",
"(",
")",
"for",
"match",
"in",
"self",
".",
"_href",
".",
"finditer",
"(",
"self",
".",
"data",
")",
":",
"d",
"=",
"match",
".",
"groupdict",
"(",
"''",
")",
"rel",
"=",
"(",
"d",
"[",
"'rel1'",
"]",
"or",
"d",
"[",
"'rel2'",
"]",
"or",
"d",
"[",
"'rel3'",
"]",
"or",
"d",
"[",
"'rel4'",
"]",
"or",
"d",
"[",
"'rel5'",
"]",
"or",
"d",
"[",
"'rel6'",
"]",
")",
"url",
"=",
"d",
"[",
"'url1'",
"]",
"or",
"d",
"[",
"'url2'",
"]",
"or",
"d",
"[",
"'url3'",
"]",
"url",
"=",
"urljoin",
"(",
"self",
".",
"base_url",
",",
"url",
")",
"url",
"=",
"unescape",
"(",
"url",
")",
"url",
"=",
"self",
".",
"_clean_re",
".",
"sub",
"(",
"lambda",
"m",
":",
"'%%%2x'",
"%",
"ord",
"(",
"m",
".",
"group",
"(",
"0",
")",
")",
",",
"url",
")",
"result",
".",
"add",
"(",
"(",
"url",
",",
"rel",
")",
")",
"# We sort the result, hoping to bring the most recent versions",
"# to the front",
"result",
"=",
"sorted",
"(",
"result",
",",
"key",
"=",
"lambda",
"t",
":",
"t",
"[",
"0",
"]",
",",
"reverse",
"=",
"True",
")",
"return",
"result"
] | [
557,
4
] | [
582,
21
] | python | en | ['en', 'error', 'th'] | False |
SimpleScrapingLocator.__init__ | (self, url, timeout=None, num_workers=10, **kwargs) |
Initialise an instance.
:param url: The root URL to use for scraping.
:param timeout: The timeout, in seconds, to be applied to requests.
This defaults to ``None`` (no timeout specified).
:param num_workers: The number of worker threads you want to do I/O,
This defaults to 10.
:param kwargs: Passed to the superclass.
|
Initialise an instance.
:param url: The root URL to use for scraping.
:param timeout: The timeout, in seconds, to be applied to requests.
This defaults to ``None`` (no timeout specified).
:param num_workers: The number of worker threads you want to do I/O,
This defaults to 10.
:param kwargs: Passed to the superclass.
| def __init__(self, url, timeout=None, num_workers=10, **kwargs):
"""
Initialise an instance.
:param url: The root URL to use for scraping.
:param timeout: The timeout, in seconds, to be applied to requests.
This defaults to ``None`` (no timeout specified).
:param num_workers: The number of worker threads you want to do I/O,
This defaults to 10.
:param kwargs: Passed to the superclass.
"""
super(SimpleScrapingLocator, self).__init__(**kwargs)
self.base_url = ensure_slash(url)
self.timeout = timeout
self._page_cache = {}
self._seen = set()
self._to_fetch = queue.Queue()
self._bad_hosts = set()
self.skip_externals = False
self.num_workers = num_workers
self._lock = threading.RLock()
# See issue #45: we need to be resilient when the locator is used
# in a thread, e.g. with concurrent.futures. We can't use self._lock
# as it is for coordinating our internal threads - the ones created
# in _prepare_threads.
self._gplock = threading.RLock()
self.platform_check = False | [
"def",
"__init__",
"(",
"self",
",",
"url",
",",
"timeout",
"=",
"None",
",",
"num_workers",
"=",
"10",
",",
"*",
"*",
"kwargs",
")",
":",
"super",
"(",
"SimpleScrapingLocator",
",",
"self",
")",
".",
"__init__",
"(",
"*",
"*",
"kwargs",
")",
"self",
".",
"base_url",
"=",
"ensure_slash",
"(",
"url",
")",
"self",
".",
"timeout",
"=",
"timeout",
"self",
".",
"_page_cache",
"=",
"{",
"}",
"self",
".",
"_seen",
"=",
"set",
"(",
")",
"self",
".",
"_to_fetch",
"=",
"queue",
".",
"Queue",
"(",
")",
"self",
".",
"_bad_hosts",
"=",
"set",
"(",
")",
"self",
".",
"skip_externals",
"=",
"False",
"self",
".",
"num_workers",
"=",
"num_workers",
"self",
".",
"_lock",
"=",
"threading",
".",
"RLock",
"(",
")",
"# See issue #45: we need to be resilient when the locator is used",
"# in a thread, e.g. with concurrent.futures. We can't use self._lock",
"# as it is for coordinating our internal threads - the ones created",
"# in _prepare_threads.",
"self",
".",
"_gplock",
"=",
"threading",
".",
"RLock",
"(",
")",
"self",
".",
"platform_check",
"=",
"False"
] | [
599,
4
] | [
624,
35
] | python | en | ['en', 'error', 'th'] | False |
SimpleScrapingLocator._prepare_threads | (self) |
Threads are created only when get_project is called, and terminate
before it returns. They are there primarily to parallelise I/O (i.e.
fetching web pages).
|
Threads are created only when get_project is called, and terminate
before it returns. They are there primarily to parallelise I/O (i.e.
fetching web pages).
| def _prepare_threads(self):
"""
Threads are created only when get_project is called, and terminate
before it returns. They are there primarily to parallelise I/O (i.e.
fetching web pages).
"""
self._threads = []
for i in range(self.num_workers):
t = threading.Thread(target=self._fetch)
t.setDaemon(True)
t.start()
self._threads.append(t) | [
"def",
"_prepare_threads",
"(",
"self",
")",
":",
"self",
".",
"_threads",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"num_workers",
")",
":",
"t",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"self",
".",
"_fetch",
")",
"t",
".",
"setDaemon",
"(",
"True",
")",
"t",
".",
"start",
"(",
")",
"self",
".",
"_threads",
".",
"append",
"(",
"t",
")"
] | [
626,
4
] | [
637,
35
] | python | en | ['en', 'error', 'th'] | False |
SimpleScrapingLocator._wait_threads | (self) |
Tell all the threads to terminate (by sending a sentinel value) and
wait for them to do so.
|
Tell all the threads to terminate (by sending a sentinel value) and
wait for them to do so.
| def _wait_threads(self):
"""
Tell all the threads to terminate (by sending a sentinel value) and
wait for them to do so.
"""
# Note that you need two loops, since you can't say which
# thread will get each sentinel
for t in self._threads:
self._to_fetch.put(None) # sentinel
for t in self._threads:
t.join()
self._threads = [] | [
"def",
"_wait_threads",
"(",
"self",
")",
":",
"# Note that you need two loops, since you can't say which",
"# thread will get each sentinel",
"for",
"t",
"in",
"self",
".",
"_threads",
":",
"self",
".",
"_to_fetch",
".",
"put",
"(",
"None",
")",
"# sentinel",
"for",
"t",
"in",
"self",
".",
"_threads",
":",
"t",
".",
"join",
"(",
")",
"self",
".",
"_threads",
"=",
"[",
"]"
] | [
639,
4
] | [
650,
26
] | python | en | ['en', 'error', 'th'] | False |
SimpleScrapingLocator._is_platform_dependent | (self, url) |
Does an URL refer to a platform-specific download?
|
Does an URL refer to a platform-specific download?
| def _is_platform_dependent(self, url):
"""
Does an URL refer to a platform-specific download?
"""
return self.platform_dependent.search(url) | [
"def",
"_is_platform_dependent",
"(",
"self",
",",
"url",
")",
":",
"return",
"self",
".",
"platform_dependent",
".",
"search",
"(",
"url",
")"
] | [
673,
4
] | [
677,
50
] | python | en | ['en', 'error', 'th'] | False |
SimpleScrapingLocator._process_download | (self, url) |
See if an URL is a suitable download for a project.
If it is, register information in the result dictionary (for
_get_project) about the specific version it's for.
Note that the return value isn't actually used other than as a boolean
value.
|
See if an URL is a suitable download for a project. | def _process_download(self, url):
"""
See if an URL is a suitable download for a project.
If it is, register information in the result dictionary (for
_get_project) about the specific version it's for.
Note that the return value isn't actually used other than as a boolean
value.
"""
if self.platform_check and self._is_platform_dependent(url):
info = None
else:
info = self.convert_url_to_download_info(url, self.project_name)
logger.debug('process_download: %s -> %s', url, info)
if info:
with self._lock: # needed because self.result is shared
self._update_version_data(self.result, info)
return info | [
"def",
"_process_download",
"(",
"self",
",",
"url",
")",
":",
"if",
"self",
".",
"platform_check",
"and",
"self",
".",
"_is_platform_dependent",
"(",
"url",
")",
":",
"info",
"=",
"None",
"else",
":",
"info",
"=",
"self",
".",
"convert_url_to_download_info",
"(",
"url",
",",
"self",
".",
"project_name",
")",
"logger",
".",
"debug",
"(",
"'process_download: %s -> %s'",
",",
"url",
",",
"info",
")",
"if",
"info",
":",
"with",
"self",
".",
"_lock",
":",
"# needed because self.result is shared",
"self",
".",
"_update_version_data",
"(",
"self",
".",
"result",
",",
"info",
")",
"return",
"info"
] | [
679,
4
] | [
697,
19
] | python | en | ['en', 'error', 'th'] | False |
SimpleScrapingLocator._should_queue | (self, link, referrer, rel) |
Determine whether a link URL from a referring page and with a
particular "rel" attribute should be queued for scraping.
|
Determine whether a link URL from a referring page and with a
particular "rel" attribute should be queued for scraping.
| def _should_queue(self, link, referrer, rel):
"""
Determine whether a link URL from a referring page and with a
particular "rel" attribute should be queued for scraping.
"""
scheme, netloc, path, _, _, _ = urlparse(link)
if path.endswith(self.source_extensions + self.binary_extensions +
self.excluded_extensions):
result = False
elif self.skip_externals and not link.startswith(self.base_url):
result = False
elif not referrer.startswith(self.base_url):
result = False
elif rel not in ('homepage', 'download'):
result = False
elif scheme not in ('http', 'https', 'ftp'):
result = False
elif self._is_platform_dependent(link):
result = False
else:
host = netloc.split(':', 1)[0]
if host.lower() == 'localhost':
result = False
else:
result = True
logger.debug('should_queue: %s (%s) from %s -> %s', link, rel,
referrer, result)
return result | [
"def",
"_should_queue",
"(",
"self",
",",
"link",
",",
"referrer",
",",
"rel",
")",
":",
"scheme",
",",
"netloc",
",",
"path",
",",
"_",
",",
"_",
",",
"_",
"=",
"urlparse",
"(",
"link",
")",
"if",
"path",
".",
"endswith",
"(",
"self",
".",
"source_extensions",
"+",
"self",
".",
"binary_extensions",
"+",
"self",
".",
"excluded_extensions",
")",
":",
"result",
"=",
"False",
"elif",
"self",
".",
"skip_externals",
"and",
"not",
"link",
".",
"startswith",
"(",
"self",
".",
"base_url",
")",
":",
"result",
"=",
"False",
"elif",
"not",
"referrer",
".",
"startswith",
"(",
"self",
".",
"base_url",
")",
":",
"result",
"=",
"False",
"elif",
"rel",
"not",
"in",
"(",
"'homepage'",
",",
"'download'",
")",
":",
"result",
"=",
"False",
"elif",
"scheme",
"not",
"in",
"(",
"'http'",
",",
"'https'",
",",
"'ftp'",
")",
":",
"result",
"=",
"False",
"elif",
"self",
".",
"_is_platform_dependent",
"(",
"link",
")",
":",
"result",
"=",
"False",
"else",
":",
"host",
"=",
"netloc",
".",
"split",
"(",
"':'",
",",
"1",
")",
"[",
"0",
"]",
"if",
"host",
".",
"lower",
"(",
")",
"==",
"'localhost'",
":",
"result",
"=",
"False",
"else",
":",
"result",
"=",
"True",
"logger",
".",
"debug",
"(",
"'should_queue: %s (%s) from %s -> %s'",
",",
"link",
",",
"rel",
",",
"referrer",
",",
"result",
")",
"return",
"result"
] | [
699,
4
] | [
726,
21
] | python | en | ['en', 'error', 'th'] | False |
SimpleScrapingLocator._fetch | (self) |
Get a URL to fetch from the work queue, get the HTML page, examine its
links for download candidates and candidates for further scraping.
This is a handy method to run in a thread.
|
Get a URL to fetch from the work queue, get the HTML page, examine its
links for download candidates and candidates for further scraping. | def _fetch(self):
"""
Get a URL to fetch from the work queue, get the HTML page, examine its
links for download candidates and candidates for further scraping.
This is a handy method to run in a thread.
"""
while True:
url = self._to_fetch.get()
try:
if url:
page = self.get_page(url)
if page is None: # e.g. after an error
continue
for link, rel in page.links:
if link not in self._seen:
try:
self._seen.add(link)
if (not self._process_download(link) and
self._should_queue(link, url, rel)):
logger.debug('Queueing %s from %s', link, url)
self._to_fetch.put(link)
except MetadataInvalidError: # e.g. invalid versions
pass
except Exception as e: # pragma: no cover
self.errors.put(text_type(e))
finally:
# always do this, to avoid hangs :-)
self._to_fetch.task_done()
if not url:
#logger.debug('Sentinel seen, quitting.')
break | [
"def",
"_fetch",
"(",
"self",
")",
":",
"while",
"True",
":",
"url",
"=",
"self",
".",
"_to_fetch",
".",
"get",
"(",
")",
"try",
":",
"if",
"url",
":",
"page",
"=",
"self",
".",
"get_page",
"(",
"url",
")",
"if",
"page",
"is",
"None",
":",
"# e.g. after an error",
"continue",
"for",
"link",
",",
"rel",
"in",
"page",
".",
"links",
":",
"if",
"link",
"not",
"in",
"self",
".",
"_seen",
":",
"try",
":",
"self",
".",
"_seen",
".",
"add",
"(",
"link",
")",
"if",
"(",
"not",
"self",
".",
"_process_download",
"(",
"link",
")",
"and",
"self",
".",
"_should_queue",
"(",
"link",
",",
"url",
",",
"rel",
")",
")",
":",
"logger",
".",
"debug",
"(",
"'Queueing %s from %s'",
",",
"link",
",",
"url",
")",
"self",
".",
"_to_fetch",
".",
"put",
"(",
"link",
")",
"except",
"MetadataInvalidError",
":",
"# e.g. invalid versions",
"pass",
"except",
"Exception",
"as",
"e",
":",
"# pragma: no cover",
"self",
".",
"errors",
".",
"put",
"(",
"text_type",
"(",
"e",
")",
")",
"finally",
":",
"# always do this, to avoid hangs :-)",
"self",
".",
"_to_fetch",
".",
"task_done",
"(",
")",
"if",
"not",
"url",
":",
"#logger.debug('Sentinel seen, quitting.')",
"break"
] | [
728,
4
] | [
759,
21
] | python | en | ['en', 'error', 'th'] | False |
SimpleScrapingLocator.get_page | (self, url) |
Get the HTML for an URL, possibly from an in-memory cache.
XXX TODO Note: this cache is never actually cleared. It's assumed that
the data won't get stale over the lifetime of a locator instance (not
necessarily true for the default_locator).
|
Get the HTML for an URL, possibly from an in-memory cache. | def get_page(self, url):
"""
Get the HTML for an URL, possibly from an in-memory cache.
XXX TODO Note: this cache is never actually cleared. It's assumed that
the data won't get stale over the lifetime of a locator instance (not
necessarily true for the default_locator).
"""
# http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api
scheme, netloc, path, _, _, _ = urlparse(url)
if scheme == 'file' and os.path.isdir(url2pathname(path)):
url = urljoin(ensure_slash(url), 'index.html')
if url in self._page_cache:
result = self._page_cache[url]
logger.debug('Returning %s from cache: %s', url, result)
else:
host = netloc.split(':', 1)[0]
result = None
if host in self._bad_hosts:
logger.debug('Skipping %s due to bad host %s', url, host)
else:
req = Request(url, headers={'Accept-encoding': 'identity'})
try:
logger.debug('Fetching %s', url)
resp = self.opener.open(req, timeout=self.timeout)
logger.debug('Fetched %s', url)
headers = resp.info()
content_type = headers.get('Content-Type', '')
if HTML_CONTENT_TYPE.match(content_type):
final_url = resp.geturl()
data = resp.read()
encoding = headers.get('Content-Encoding')
if encoding:
decoder = self.decoders[encoding] # fail if not found
data = decoder(data)
encoding = 'utf-8'
m = CHARSET.search(content_type)
if m:
encoding = m.group(1)
try:
data = data.decode(encoding)
except UnicodeError: # pragma: no cover
data = data.decode('latin-1') # fallback
result = Page(data, final_url)
self._page_cache[final_url] = result
except HTTPError as e:
if e.code != 404:
logger.exception('Fetch failed: %s: %s', url, e)
except URLError as e: # pragma: no cover
logger.exception('Fetch failed: %s: %s', url, e)
with self._lock:
self._bad_hosts.add(host)
except Exception as e: # pragma: no cover
logger.exception('Fetch failed: %s: %s', url, e)
finally:
self._page_cache[url] = result # even if None (failure)
return result | [
"def",
"get_page",
"(",
"self",
",",
"url",
")",
":",
"# http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api",
"scheme",
",",
"netloc",
",",
"path",
",",
"_",
",",
"_",
",",
"_",
"=",
"urlparse",
"(",
"url",
")",
"if",
"scheme",
"==",
"'file'",
"and",
"os",
".",
"path",
".",
"isdir",
"(",
"url2pathname",
"(",
"path",
")",
")",
":",
"url",
"=",
"urljoin",
"(",
"ensure_slash",
"(",
"url",
")",
",",
"'index.html'",
")",
"if",
"url",
"in",
"self",
".",
"_page_cache",
":",
"result",
"=",
"self",
".",
"_page_cache",
"[",
"url",
"]",
"logger",
".",
"debug",
"(",
"'Returning %s from cache: %s'",
",",
"url",
",",
"result",
")",
"else",
":",
"host",
"=",
"netloc",
".",
"split",
"(",
"':'",
",",
"1",
")",
"[",
"0",
"]",
"result",
"=",
"None",
"if",
"host",
"in",
"self",
".",
"_bad_hosts",
":",
"logger",
".",
"debug",
"(",
"'Skipping %s due to bad host %s'",
",",
"url",
",",
"host",
")",
"else",
":",
"req",
"=",
"Request",
"(",
"url",
",",
"headers",
"=",
"{",
"'Accept-encoding'",
":",
"'identity'",
"}",
")",
"try",
":",
"logger",
".",
"debug",
"(",
"'Fetching %s'",
",",
"url",
")",
"resp",
"=",
"self",
".",
"opener",
".",
"open",
"(",
"req",
",",
"timeout",
"=",
"self",
".",
"timeout",
")",
"logger",
".",
"debug",
"(",
"'Fetched %s'",
",",
"url",
")",
"headers",
"=",
"resp",
".",
"info",
"(",
")",
"content_type",
"=",
"headers",
".",
"get",
"(",
"'Content-Type'",
",",
"''",
")",
"if",
"HTML_CONTENT_TYPE",
".",
"match",
"(",
"content_type",
")",
":",
"final_url",
"=",
"resp",
".",
"geturl",
"(",
")",
"data",
"=",
"resp",
".",
"read",
"(",
")",
"encoding",
"=",
"headers",
".",
"get",
"(",
"'Content-Encoding'",
")",
"if",
"encoding",
":",
"decoder",
"=",
"self",
".",
"decoders",
"[",
"encoding",
"]",
"# fail if not found",
"data",
"=",
"decoder",
"(",
"data",
")",
"encoding",
"=",
"'utf-8'",
"m",
"=",
"CHARSET",
".",
"search",
"(",
"content_type",
")",
"if",
"m",
":",
"encoding",
"=",
"m",
".",
"group",
"(",
"1",
")",
"try",
":",
"data",
"=",
"data",
".",
"decode",
"(",
"encoding",
")",
"except",
"UnicodeError",
":",
"# pragma: no cover",
"data",
"=",
"data",
".",
"decode",
"(",
"'latin-1'",
")",
"# fallback",
"result",
"=",
"Page",
"(",
"data",
",",
"final_url",
")",
"self",
".",
"_page_cache",
"[",
"final_url",
"]",
"=",
"result",
"except",
"HTTPError",
"as",
"e",
":",
"if",
"e",
".",
"code",
"!=",
"404",
":",
"logger",
".",
"exception",
"(",
"'Fetch failed: %s: %s'",
",",
"url",
",",
"e",
")",
"except",
"URLError",
"as",
"e",
":",
"# pragma: no cover",
"logger",
".",
"exception",
"(",
"'Fetch failed: %s: %s'",
",",
"url",
",",
"e",
")",
"with",
"self",
".",
"_lock",
":",
"self",
".",
"_bad_hosts",
".",
"add",
"(",
"host",
")",
"except",
"Exception",
"as",
"e",
":",
"# pragma: no cover",
"logger",
".",
"exception",
"(",
"'Fetch failed: %s: %s'",
",",
"url",
",",
"e",
")",
"finally",
":",
"self",
".",
"_page_cache",
"[",
"url",
"]",
"=",
"result",
"# even if None (failure)",
"return",
"result"
] | [
761,
4
] | [
818,
21
] | python | en | ['en', 'error', 'th'] | False |
SimpleScrapingLocator.get_distribution_names | (self) |
Return all the distribution names known to this locator.
|
Return all the distribution names known to this locator.
| def get_distribution_names(self):
"""
Return all the distribution names known to this locator.
"""
result = set()
page = self.get_page(self.base_url)
if not page:
raise DistlibException('Unable to get %s' % self.base_url)
for match in self._distname_re.finditer(page.data):
result.add(match.group(1))
return result | [
"def",
"get_distribution_names",
"(",
"self",
")",
":",
"result",
"=",
"set",
"(",
")",
"page",
"=",
"self",
".",
"get_page",
"(",
"self",
".",
"base_url",
")",
"if",
"not",
"page",
":",
"raise",
"DistlibException",
"(",
"'Unable to get %s'",
"%",
"self",
".",
"base_url",
")",
"for",
"match",
"in",
"self",
".",
"_distname_re",
".",
"finditer",
"(",
"page",
".",
"data",
")",
":",
"result",
".",
"add",
"(",
"match",
".",
"group",
"(",
"1",
")",
")",
"return",
"result"
] | [
822,
4
] | [
832,
21
] | python | en | ['en', 'error', 'th'] | False |
DirectoryLocator.__init__ | (self, path, **kwargs) |
Initialise an instance.
:param path: The root of the directory tree to search.
:param kwargs: Passed to the superclass constructor,
except for:
* recursive - if True (the default), subdirectories are
recursed into. If False, only the top-level directory
is searched,
|
Initialise an instance.
:param path: The root of the directory tree to search.
:param kwargs: Passed to the superclass constructor,
except for:
* recursive - if True (the default), subdirectories are
recursed into. If False, only the top-level directory
is searched,
| def __init__(self, path, **kwargs):
"""
Initialise an instance.
:param path: The root of the directory tree to search.
:param kwargs: Passed to the superclass constructor,
except for:
* recursive - if True (the default), subdirectories are
recursed into. If False, only the top-level directory
is searched,
"""
self.recursive = kwargs.pop('recursive', True)
super(DirectoryLocator, self).__init__(**kwargs)
path = os.path.abspath(path)
if not os.path.isdir(path): # pragma: no cover
raise DistlibException('Not a directory: %r' % path)
self.base_dir = path | [
"def",
"__init__",
"(",
"self",
",",
"path",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"recursive",
"=",
"kwargs",
".",
"pop",
"(",
"'recursive'",
",",
"True",
")",
"super",
"(",
"DirectoryLocator",
",",
"self",
")",
".",
"__init__",
"(",
"*",
"*",
"kwargs",
")",
"path",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"path",
")",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
":",
"# pragma: no cover",
"raise",
"DistlibException",
"(",
"'Not a directory: %r'",
"%",
"path",
")",
"self",
".",
"base_dir",
"=",
"path"
] | [
839,
4
] | [
854,
28
] | python | en | ['en', 'error', 'th'] | False |
DirectoryLocator.should_include | (self, filename, parent) |
Should a filename be considered as a candidate for a distribution
archive? As well as the filename, the directory which contains it
is provided, though not used by the current implementation.
|
Should a filename be considered as a candidate for a distribution
archive? As well as the filename, the directory which contains it
is provided, though not used by the current implementation.
| def should_include(self, filename, parent):
"""
Should a filename be considered as a candidate for a distribution
archive? As well as the filename, the directory which contains it
is provided, though not used by the current implementation.
"""
return filename.endswith(self.downloadable_extensions) | [
"def",
"should_include",
"(",
"self",
",",
"filename",
",",
"parent",
")",
":",
"return",
"filename",
".",
"endswith",
"(",
"self",
".",
"downloadable_extensions",
")"
] | [
856,
4
] | [
862,
62
] | python | en | ['en', 'error', 'th'] | False |
DirectoryLocator.get_distribution_names | (self) |
Return all the distribution names known to this locator.
|
Return all the distribution names known to this locator.
| def get_distribution_names(self):
"""
Return all the distribution names known to this locator.
"""
result = set()
for root, dirs, files in os.walk(self.base_dir):
for fn in files:
if self.should_include(fn, root):
fn = os.path.join(root, fn)
url = urlunparse(('file', '',
pathname2url(os.path.abspath(fn)),
'', '', ''))
info = self.convert_url_to_download_info(url, None)
if info:
result.add(info['name'])
if not self.recursive:
break
return result | [
"def",
"get_distribution_names",
"(",
"self",
")",
":",
"result",
"=",
"set",
"(",
")",
"for",
"root",
",",
"dirs",
",",
"files",
"in",
"os",
".",
"walk",
"(",
"self",
".",
"base_dir",
")",
":",
"for",
"fn",
"in",
"files",
":",
"if",
"self",
".",
"should_include",
"(",
"fn",
",",
"root",
")",
":",
"fn",
"=",
"os",
".",
"path",
".",
"join",
"(",
"root",
",",
"fn",
")",
"url",
"=",
"urlunparse",
"(",
"(",
"'file'",
",",
"''",
",",
"pathname2url",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"fn",
")",
")",
",",
"''",
",",
"''",
",",
"''",
")",
")",
"info",
"=",
"self",
".",
"convert_url_to_download_info",
"(",
"url",
",",
"None",
")",
"if",
"info",
":",
"result",
".",
"add",
"(",
"info",
"[",
"'name'",
"]",
")",
"if",
"not",
"self",
".",
"recursive",
":",
"break",
"return",
"result"
] | [
880,
4
] | [
897,
21
] | python | en | ['en', 'error', 'th'] | False |
JSONLocator.get_distribution_names | (self) |
Return all the distribution names known to this locator.
|
Return all the distribution names known to this locator.
| def get_distribution_names(self):
"""
Return all the distribution names known to this locator.
"""
raise NotImplementedError('Not available from this locator') | [
"def",
"get_distribution_names",
"(",
"self",
")",
":",
"raise",
"NotImplementedError",
"(",
"'Not available from this locator'",
")"
] | [
906,
4
] | [
910,
68
] | python | en | ['en', 'error', 'th'] | False |
DistPathLocator.__init__ | (self, distpath, **kwargs) |
Initialise an instance.
:param distpath: A :class:`DistributionPath` instance to search.
|
Initialise an instance. | def __init__(self, distpath, **kwargs):
"""
Initialise an instance.
:param distpath: A :class:`DistributionPath` instance to search.
"""
super(DistPathLocator, self).__init__(**kwargs)
assert isinstance(distpath, DistributionPath)
self.distpath = distpath | [
"def",
"__init__",
"(",
"self",
",",
"distpath",
",",
"*",
"*",
"kwargs",
")",
":",
"super",
"(",
"DistPathLocator",
",",
"self",
")",
".",
"__init__",
"(",
"*",
"*",
"kwargs",
")",
"assert",
"isinstance",
"(",
"distpath",
",",
"DistributionPath",
")",
"self",
".",
"distpath",
"=",
"distpath"
] | [
942,
4
] | [
950,
32
] | python | en | ['en', 'error', 'th'] | False |
AggregatingLocator.__init__ | (self, *locators, **kwargs) |
Initialise an instance.
:param locators: The list of locators to search.
:param kwargs: Passed to the superclass constructor,
except for:
* merge - if False (the default), the first successful
search from any of the locators is returned. If True,
the results from all locators are merged (this can be
slow).
|
Initialise an instance. | def __init__(self, *locators, **kwargs):
"""
Initialise an instance.
:param locators: The list of locators to search.
:param kwargs: Passed to the superclass constructor,
except for:
* merge - if False (the default), the first successful
search from any of the locators is returned. If True,
the results from all locators are merged (this can be
slow).
"""
self.merge = kwargs.pop('merge', False)
self.locators = locators
super(AggregatingLocator, self).__init__(**kwargs) | [
"def",
"__init__",
"(",
"self",
",",
"*",
"locators",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"merge",
"=",
"kwargs",
".",
"pop",
"(",
"'merge'",
",",
"False",
")",
"self",
".",
"locators",
"=",
"locators",
"super",
"(",
"AggregatingLocator",
",",
"self",
")",
".",
"__init__",
"(",
"*",
"*",
"kwargs",
")"
] | [
969,
4
] | [
983,
58
] | python | en | ['en', 'error', 'th'] | False |
AggregatingLocator.get_distribution_names | (self) |
Return all the distribution names known to this locator.
|
Return all the distribution names known to this locator.
| def get_distribution_names(self):
"""
Return all the distribution names known to this locator.
"""
result = set()
for locator in self.locators:
try:
result |= locator.get_distribution_names()
except NotImplementedError:
pass
return result | [
"def",
"get_distribution_names",
"(",
"self",
")",
":",
"result",
"=",
"set",
"(",
")",
"for",
"locator",
"in",
"self",
".",
"locators",
":",
"try",
":",
"result",
"|=",
"locator",
".",
"get_distribution_names",
"(",
")",
"except",
"NotImplementedError",
":",
"pass",
"return",
"result"
] | [
1041,
4
] | [
1051,
21
] | python | en | ['en', 'error', 'th'] | False |
DependencyFinder.__init__ | (self, locator=None) |
Initialise an instance, using the specified locator
to locate distributions.
|
Initialise an instance, using the specified locator
to locate distributions.
| def __init__(self, locator=None):
"""
Initialise an instance, using the specified locator
to locate distributions.
"""
self.locator = locator or default_locator
self.scheme = get_scheme(self.locator.scheme) | [
"def",
"__init__",
"(",
"self",
",",
"locator",
"=",
"None",
")",
":",
"self",
".",
"locator",
"=",
"locator",
"or",
"default_locator",
"self",
".",
"scheme",
"=",
"get_scheme",
"(",
"self",
".",
"locator",
".",
"scheme",
")"
] | [
1070,
4
] | [
1076,
53
] | python | en | ['en', 'error', 'th'] | False |
DependencyFinder.add_distribution | (self, dist) |
Add a distribution to the finder. This will update internal information
about who provides what.
:param dist: The distribution to add.
|
Add a distribution to the finder. This will update internal information
about who provides what.
:param dist: The distribution to add.
| def add_distribution(self, dist):
"""
Add a distribution to the finder. This will update internal information
about who provides what.
:param dist: The distribution to add.
"""
logger.debug('adding distribution %s', dist)
name = dist.key
self.dists_by_name[name] = dist
self.dists[(name, dist.version)] = dist
for p in dist.provides:
name, version = parse_name_and_version(p)
logger.debug('Add to provided: %s, %s, %s', name, version, dist)
self.provided.setdefault(name, set()).add((version, dist)) | [
"def",
"add_distribution",
"(",
"self",
",",
"dist",
")",
":",
"logger",
".",
"debug",
"(",
"'adding distribution %s'",
",",
"dist",
")",
"name",
"=",
"dist",
".",
"key",
"self",
".",
"dists_by_name",
"[",
"name",
"]",
"=",
"dist",
"self",
".",
"dists",
"[",
"(",
"name",
",",
"dist",
".",
"version",
")",
"]",
"=",
"dist",
"for",
"p",
"in",
"dist",
".",
"provides",
":",
"name",
",",
"version",
"=",
"parse_name_and_version",
"(",
"p",
")",
"logger",
".",
"debug",
"(",
"'Add to provided: %s, %s, %s'",
",",
"name",
",",
"version",
",",
"dist",
")",
"self",
".",
"provided",
".",
"setdefault",
"(",
"name",
",",
"set",
"(",
")",
")",
".",
"add",
"(",
"(",
"version",
",",
"dist",
")",
")"
] | [
1078,
4
] | [
1091,
70
] | python | en | ['en', 'error', 'th'] | False |
DependencyFinder.remove_distribution | (self, dist) |
Remove a distribution from the finder. This will update internal
information about who provides what.
:param dist: The distribution to remove.
|
Remove a distribution from the finder. This will update internal
information about who provides what.
:param dist: The distribution to remove.
| def remove_distribution(self, dist):
"""
Remove a distribution from the finder. This will update internal
information about who provides what.
:param dist: The distribution to remove.
"""
logger.debug('removing distribution %s', dist)
name = dist.key
del self.dists_by_name[name]
del self.dists[(name, dist.version)]
for p in dist.provides:
name, version = parse_name_and_version(p)
logger.debug('Remove from provided: %s, %s, %s', name, version, dist)
s = self.provided[name]
s.remove((version, dist))
if not s:
del self.provided[name] | [
"def",
"remove_distribution",
"(",
"self",
",",
"dist",
")",
":",
"logger",
".",
"debug",
"(",
"'removing distribution %s'",
",",
"dist",
")",
"name",
"=",
"dist",
".",
"key",
"del",
"self",
".",
"dists_by_name",
"[",
"name",
"]",
"del",
"self",
".",
"dists",
"[",
"(",
"name",
",",
"dist",
".",
"version",
")",
"]",
"for",
"p",
"in",
"dist",
".",
"provides",
":",
"name",
",",
"version",
"=",
"parse_name_and_version",
"(",
"p",
")",
"logger",
".",
"debug",
"(",
"'Remove from provided: %s, %s, %s'",
",",
"name",
",",
"version",
",",
"dist",
")",
"s",
"=",
"self",
".",
"provided",
"[",
"name",
"]",
"s",
".",
"remove",
"(",
"(",
"version",
",",
"dist",
")",
")",
"if",
"not",
"s",
":",
"del",
"self",
".",
"provided",
"[",
"name",
"]"
] | [
1093,
4
] | [
1109,
39
] | python | en | ['en', 'error', 'th'] | False |
DependencyFinder.get_matcher | (self, reqt) |
Get a version matcher for a requirement.
:param reqt: The requirement
:type reqt: str
:return: A version matcher (an instance of
:class:`distlib.version.Matcher`).
|
Get a version matcher for a requirement.
:param reqt: The requirement
:type reqt: str
:return: A version matcher (an instance of
:class:`distlib.version.Matcher`).
| def get_matcher(self, reqt):
"""
Get a version matcher for a requirement.
:param reqt: The requirement
:type reqt: str
:return: A version matcher (an instance of
:class:`distlib.version.Matcher`).
"""
try:
matcher = self.scheme.matcher(reqt)
except UnsupportedVersionError: # pragma: no cover
# XXX compat-mode if cannot read the version
name = reqt.split()[0]
matcher = self.scheme.matcher(name)
return matcher | [
"def",
"get_matcher",
"(",
"self",
",",
"reqt",
")",
":",
"try",
":",
"matcher",
"=",
"self",
".",
"scheme",
".",
"matcher",
"(",
"reqt",
")",
"except",
"UnsupportedVersionError",
":",
"# pragma: no cover",
"# XXX compat-mode if cannot read the version",
"name",
"=",
"reqt",
".",
"split",
"(",
")",
"[",
"0",
"]",
"matcher",
"=",
"self",
".",
"scheme",
".",
"matcher",
"(",
"name",
")",
"return",
"matcher"
] | [
1111,
4
] | [
1125,
22
] | python | en | ['en', 'error', 'th'] | False |
DependencyFinder.find_providers | (self, reqt) |
Find the distributions which can fulfill a requirement.
:param reqt: The requirement.
:type reqt: str
:return: A set of distribution which can fulfill the requirement.
|
Find the distributions which can fulfill a requirement. | def find_providers(self, reqt):
"""
Find the distributions which can fulfill a requirement.
:param reqt: The requirement.
:type reqt: str
:return: A set of distribution which can fulfill the requirement.
"""
matcher = self.get_matcher(reqt)
name = matcher.key # case-insensitive
result = set()
provided = self.provided
if name in provided:
for version, provider in provided[name]:
try:
match = matcher.match(version)
except UnsupportedVersionError:
match = False
if match:
result.add(provider)
break
return result | [
"def",
"find_providers",
"(",
"self",
",",
"reqt",
")",
":",
"matcher",
"=",
"self",
".",
"get_matcher",
"(",
"reqt",
")",
"name",
"=",
"matcher",
".",
"key",
"# case-insensitive",
"result",
"=",
"set",
"(",
")",
"provided",
"=",
"self",
".",
"provided",
"if",
"name",
"in",
"provided",
":",
"for",
"version",
",",
"provider",
"in",
"provided",
"[",
"name",
"]",
":",
"try",
":",
"match",
"=",
"matcher",
".",
"match",
"(",
"version",
")",
"except",
"UnsupportedVersionError",
":",
"match",
"=",
"False",
"if",
"match",
":",
"result",
".",
"add",
"(",
"provider",
")",
"break",
"return",
"result"
] | [
1127,
4
] | [
1149,
21
] | python | en | ['en', 'error', 'th'] | False |
DependencyFinder.try_to_replace | (self, provider, other, problems) |
Attempt to replace one provider with another. This is typically used
when resolving dependencies from multiple sources, e.g. A requires
(B >= 1.0) while C requires (B >= 1.1).
For successful replacement, ``provider`` must meet all the requirements
which ``other`` fulfills.
:param provider: The provider we are trying to replace with.
:param other: The provider we're trying to replace.
:param problems: If False is returned, this will contain what
problems prevented replacement. This is currently
a tuple of the literal string 'cantreplace',
``provider``, ``other`` and the set of requirements
that ``provider`` couldn't fulfill.
:return: True if we can replace ``other`` with ``provider``, else
False.
|
Attempt to replace one provider with another. This is typically used
when resolving dependencies from multiple sources, e.g. A requires
(B >= 1.0) while C requires (B >= 1.1). | def try_to_replace(self, provider, other, problems):
"""
Attempt to replace one provider with another. This is typically used
when resolving dependencies from multiple sources, e.g. A requires
(B >= 1.0) while C requires (B >= 1.1).
For successful replacement, ``provider`` must meet all the requirements
which ``other`` fulfills.
:param provider: The provider we are trying to replace with.
:param other: The provider we're trying to replace.
:param problems: If False is returned, this will contain what
problems prevented replacement. This is currently
a tuple of the literal string 'cantreplace',
``provider``, ``other`` and the set of requirements
that ``provider`` couldn't fulfill.
:return: True if we can replace ``other`` with ``provider``, else
False.
"""
rlist = self.reqts[other]
unmatched = set()
for s in rlist:
matcher = self.get_matcher(s)
if not matcher.match(provider.version):
unmatched.add(s)
if unmatched:
# can't replace other with provider
problems.add(('cantreplace', provider, other,
frozenset(unmatched)))
result = False
else:
# can replace other with provider
self.remove_distribution(other)
del self.reqts[other]
for s in rlist:
self.reqts.setdefault(provider, set()).add(s)
self.add_distribution(provider)
result = True
return result | [
"def",
"try_to_replace",
"(",
"self",
",",
"provider",
",",
"other",
",",
"problems",
")",
":",
"rlist",
"=",
"self",
".",
"reqts",
"[",
"other",
"]",
"unmatched",
"=",
"set",
"(",
")",
"for",
"s",
"in",
"rlist",
":",
"matcher",
"=",
"self",
".",
"get_matcher",
"(",
"s",
")",
"if",
"not",
"matcher",
".",
"match",
"(",
"provider",
".",
"version",
")",
":",
"unmatched",
".",
"add",
"(",
"s",
")",
"if",
"unmatched",
":",
"# can't replace other with provider",
"problems",
".",
"add",
"(",
"(",
"'cantreplace'",
",",
"provider",
",",
"other",
",",
"frozenset",
"(",
"unmatched",
")",
")",
")",
"result",
"=",
"False",
"else",
":",
"# can replace other with provider",
"self",
".",
"remove_distribution",
"(",
"other",
")",
"del",
"self",
".",
"reqts",
"[",
"other",
"]",
"for",
"s",
"in",
"rlist",
":",
"self",
".",
"reqts",
".",
"setdefault",
"(",
"provider",
",",
"set",
"(",
")",
")",
".",
"add",
"(",
"s",
")",
"self",
".",
"add_distribution",
"(",
"provider",
")",
"result",
"=",
"True",
"return",
"result"
] | [
1151,
4
] | [
1189,
21
] | python | en | ['en', 'error', 'th'] | False |
DependencyFinder.find | (self, requirement, meta_extras=None, prereleases=False) |
Find a distribution and all distributions it depends on.
:param requirement: The requirement specifying the distribution to
find, or a Distribution instance.
:param meta_extras: A list of meta extras such as :test:, :build: and
so on.
:param prereleases: If ``True``, allow pre-release versions to be
returned - otherwise, don't return prereleases
unless they're all that's available.
Return a set of :class:`Distribution` instances and a set of
problems.
The distributions returned should be such that they have the
:attr:`required` attribute set to ``True`` if they were
from the ``requirement`` passed to ``find()``, and they have the
:attr:`build_time_dependency` attribute set to ``True`` unless they
are post-installation dependencies of the ``requirement``.
The problems should be a tuple consisting of the string
``'unsatisfied'`` and the requirement which couldn't be satisfied
by any distribution known to the locator.
|
Find a distribution and all distributions it depends on. | def find(self, requirement, meta_extras=None, prereleases=False):
"""
Find a distribution and all distributions it depends on.
:param requirement: The requirement specifying the distribution to
find, or a Distribution instance.
:param meta_extras: A list of meta extras such as :test:, :build: and
so on.
:param prereleases: If ``True``, allow pre-release versions to be
returned - otherwise, don't return prereleases
unless they're all that's available.
Return a set of :class:`Distribution` instances and a set of
problems.
The distributions returned should be such that they have the
:attr:`required` attribute set to ``True`` if they were
from the ``requirement`` passed to ``find()``, and they have the
:attr:`build_time_dependency` attribute set to ``True`` unless they
are post-installation dependencies of the ``requirement``.
The problems should be a tuple consisting of the string
``'unsatisfied'`` and the requirement which couldn't be satisfied
by any distribution known to the locator.
"""
self.provided = {}
self.dists = {}
self.dists_by_name = {}
self.reqts = {}
meta_extras = set(meta_extras or [])
if ':*:' in meta_extras:
meta_extras.remove(':*:')
# :meta: and :run: are implicitly included
meta_extras |= set([':test:', ':build:', ':dev:'])
if isinstance(requirement, Distribution):
dist = odist = requirement
logger.debug('passed %s as requirement', odist)
else:
dist = odist = self.locator.locate(requirement,
prereleases=prereleases)
if dist is None:
raise DistlibException('Unable to locate %r' % requirement)
logger.debug('located %s', odist)
dist.requested = True
problems = set()
todo = set([dist])
install_dists = set([odist])
while todo:
dist = todo.pop()
name = dist.key # case-insensitive
if name not in self.dists_by_name:
self.add_distribution(dist)
else:
#import pdb; pdb.set_trace()
other = self.dists_by_name[name]
if other != dist:
self.try_to_replace(dist, other, problems)
ireqts = dist.run_requires | dist.meta_requires
sreqts = dist.build_requires
ereqts = set()
if meta_extras and dist in install_dists:
for key in ('test', 'build', 'dev'):
e = ':%s:' % key
if e in meta_extras:
ereqts |= getattr(dist, '%s_requires' % key)
all_reqts = ireqts | sreqts | ereqts
for r in all_reqts:
providers = self.find_providers(r)
if not providers:
logger.debug('No providers found for %r', r)
provider = self.locator.locate(r, prereleases=prereleases)
# If no provider is found and we didn't consider
# prereleases, consider them now.
if provider is None and not prereleases:
provider = self.locator.locate(r, prereleases=True)
if provider is None:
logger.debug('Cannot satisfy %r', r)
problems.add(('unsatisfied', r))
else:
n, v = provider.key, provider.version
if (n, v) not in self.dists:
todo.add(provider)
providers.add(provider)
if r in ireqts and dist in install_dists:
install_dists.add(provider)
logger.debug('Adding %s to install_dists',
provider.name_and_version)
for p in providers:
name = p.key
if name not in self.dists_by_name:
self.reqts.setdefault(p, set()).add(r)
else:
other = self.dists_by_name[name]
if other != p:
# see if other can be replaced by p
self.try_to_replace(p, other, problems)
dists = set(self.dists.values())
for dist in dists:
dist.build_time_dependency = dist not in install_dists
if dist.build_time_dependency:
logger.debug('%s is a build-time dependency only.',
dist.name_and_version)
logger.debug('find done for %s', odist)
return dists, problems | [
"def",
"find",
"(",
"self",
",",
"requirement",
",",
"meta_extras",
"=",
"None",
",",
"prereleases",
"=",
"False",
")",
":",
"self",
".",
"provided",
"=",
"{",
"}",
"self",
".",
"dists",
"=",
"{",
"}",
"self",
".",
"dists_by_name",
"=",
"{",
"}",
"self",
".",
"reqts",
"=",
"{",
"}",
"meta_extras",
"=",
"set",
"(",
"meta_extras",
"or",
"[",
"]",
")",
"if",
"':*:'",
"in",
"meta_extras",
":",
"meta_extras",
".",
"remove",
"(",
"':*:'",
")",
"# :meta: and :run: are implicitly included",
"meta_extras",
"|=",
"set",
"(",
"[",
"':test:'",
",",
"':build:'",
",",
"':dev:'",
"]",
")",
"if",
"isinstance",
"(",
"requirement",
",",
"Distribution",
")",
":",
"dist",
"=",
"odist",
"=",
"requirement",
"logger",
".",
"debug",
"(",
"'passed %s as requirement'",
",",
"odist",
")",
"else",
":",
"dist",
"=",
"odist",
"=",
"self",
".",
"locator",
".",
"locate",
"(",
"requirement",
",",
"prereleases",
"=",
"prereleases",
")",
"if",
"dist",
"is",
"None",
":",
"raise",
"DistlibException",
"(",
"'Unable to locate %r'",
"%",
"requirement",
")",
"logger",
".",
"debug",
"(",
"'located %s'",
",",
"odist",
")",
"dist",
".",
"requested",
"=",
"True",
"problems",
"=",
"set",
"(",
")",
"todo",
"=",
"set",
"(",
"[",
"dist",
"]",
")",
"install_dists",
"=",
"set",
"(",
"[",
"odist",
"]",
")",
"while",
"todo",
":",
"dist",
"=",
"todo",
".",
"pop",
"(",
")",
"name",
"=",
"dist",
".",
"key",
"# case-insensitive",
"if",
"name",
"not",
"in",
"self",
".",
"dists_by_name",
":",
"self",
".",
"add_distribution",
"(",
"dist",
")",
"else",
":",
"#import pdb; pdb.set_trace()",
"other",
"=",
"self",
".",
"dists_by_name",
"[",
"name",
"]",
"if",
"other",
"!=",
"dist",
":",
"self",
".",
"try_to_replace",
"(",
"dist",
",",
"other",
",",
"problems",
")",
"ireqts",
"=",
"dist",
".",
"run_requires",
"|",
"dist",
".",
"meta_requires",
"sreqts",
"=",
"dist",
".",
"build_requires",
"ereqts",
"=",
"set",
"(",
")",
"if",
"meta_extras",
"and",
"dist",
"in",
"install_dists",
":",
"for",
"key",
"in",
"(",
"'test'",
",",
"'build'",
",",
"'dev'",
")",
":",
"e",
"=",
"':%s:'",
"%",
"key",
"if",
"e",
"in",
"meta_extras",
":",
"ereqts",
"|=",
"getattr",
"(",
"dist",
",",
"'%s_requires'",
"%",
"key",
")",
"all_reqts",
"=",
"ireqts",
"|",
"sreqts",
"|",
"ereqts",
"for",
"r",
"in",
"all_reqts",
":",
"providers",
"=",
"self",
".",
"find_providers",
"(",
"r",
")",
"if",
"not",
"providers",
":",
"logger",
".",
"debug",
"(",
"'No providers found for %r'",
",",
"r",
")",
"provider",
"=",
"self",
".",
"locator",
".",
"locate",
"(",
"r",
",",
"prereleases",
"=",
"prereleases",
")",
"# If no provider is found and we didn't consider",
"# prereleases, consider them now.",
"if",
"provider",
"is",
"None",
"and",
"not",
"prereleases",
":",
"provider",
"=",
"self",
".",
"locator",
".",
"locate",
"(",
"r",
",",
"prereleases",
"=",
"True",
")",
"if",
"provider",
"is",
"None",
":",
"logger",
".",
"debug",
"(",
"'Cannot satisfy %r'",
",",
"r",
")",
"problems",
".",
"add",
"(",
"(",
"'unsatisfied'",
",",
"r",
")",
")",
"else",
":",
"n",
",",
"v",
"=",
"provider",
".",
"key",
",",
"provider",
".",
"version",
"if",
"(",
"n",
",",
"v",
")",
"not",
"in",
"self",
".",
"dists",
":",
"todo",
".",
"add",
"(",
"provider",
")",
"providers",
".",
"add",
"(",
"provider",
")",
"if",
"r",
"in",
"ireqts",
"and",
"dist",
"in",
"install_dists",
":",
"install_dists",
".",
"add",
"(",
"provider",
")",
"logger",
".",
"debug",
"(",
"'Adding %s to install_dists'",
",",
"provider",
".",
"name_and_version",
")",
"for",
"p",
"in",
"providers",
":",
"name",
"=",
"p",
".",
"key",
"if",
"name",
"not",
"in",
"self",
".",
"dists_by_name",
":",
"self",
".",
"reqts",
".",
"setdefault",
"(",
"p",
",",
"set",
"(",
")",
")",
".",
"add",
"(",
"r",
")",
"else",
":",
"other",
"=",
"self",
".",
"dists_by_name",
"[",
"name",
"]",
"if",
"other",
"!=",
"p",
":",
"# see if other can be replaced by p",
"self",
".",
"try_to_replace",
"(",
"p",
",",
"other",
",",
"problems",
")",
"dists",
"=",
"set",
"(",
"self",
".",
"dists",
".",
"values",
"(",
")",
")",
"for",
"dist",
"in",
"dists",
":",
"dist",
".",
"build_time_dependency",
"=",
"dist",
"not",
"in",
"install_dists",
"if",
"dist",
".",
"build_time_dependency",
":",
"logger",
".",
"debug",
"(",
"'%s is a build-time dependency only.'",
",",
"dist",
".",
"name_and_version",
")",
"logger",
".",
"debug",
"(",
"'find done for %s'",
",",
"odist",
")",
"return",
"dists",
",",
"problems"
] | [
1191,
4
] | [
1299,
30
] | python | en | ['en', 'error', 'th'] | False |
update_or_create_token | (token, vegetariano, notificacao_almoco="11:00", notificacao_jantar="17:00") |
Registra device token ou atualiza os seus parametros "last_used" e/ou "vegetariano".
Acho que tenho que deixas os valores default do almoco e janta como os originais, para manter as notificacoes dos usuarios que nao atualizarem o app.
:param token: token a ser registrado ou atualizado.
:param vegetariano: preferencia de cardapio do usuario.
:return: True caso nao haja erros durante o processo.
|
Registra device token ou atualiza os seus parametros "last_used" e/ou "vegetariano". | def update_or_create_token(token, vegetariano, notificacao_almoco="11:00", notificacao_jantar="17:00"):
"""
Registra device token ou atualiza os seus parametros "last_used" e/ou "vegetariano".
Acho que tenho que deixas os valores default do almoco e janta como os originais, para manter as notificacoes dos usuarios que nao atualizarem o app.
:param token: token a ser registrado ou atualizado.
:param vegetariano: preferencia de cardapio do usuario.
:return: True caso nao haja erros durante o processo.
"""
new_dict = {"last_used": date.today().strftime("%y-%m-%d"), "vegetariano": vegetariano }
if notificacao_almoco is not None:
new_dict["almoco"] = notificacao_almoco
if notificacao_jantar is not None:
new_dict["jantar"] = notificacao_jantar
db = setup_firebase()
db.child('tokens').child(token).set(new_dict)
print("Device token {} registrado com sucesso.".format(token))
return True | [
"def",
"update_or_create_token",
"(",
"token",
",",
"vegetariano",
",",
"notificacao_almoco",
"=",
"\"11:00\"",
",",
"notificacao_jantar",
"=",
"\"17:00\"",
")",
":",
"new_dict",
"=",
"{",
"\"last_used\"",
":",
"date",
".",
"today",
"(",
")",
".",
"strftime",
"(",
"\"%y-%m-%d\"",
")",
",",
"\"vegetariano\"",
":",
"vegetariano",
"}",
"if",
"notificacao_almoco",
"is",
"not",
"None",
":",
"new_dict",
"[",
"\"almoco\"",
"]",
"=",
"notificacao_almoco",
"if",
"notificacao_jantar",
"is",
"not",
"None",
":",
"new_dict",
"[",
"\"jantar\"",
"]",
"=",
"notificacao_jantar",
"db",
"=",
"setup_firebase",
"(",
")",
"db",
".",
"child",
"(",
"'tokens'",
")",
".",
"child",
"(",
"token",
")",
".",
"set",
"(",
"new_dict",
")",
"print",
"(",
"\"Device token {} registrado com sucesso.\"",
".",
"format",
"(",
"token",
")",
")",
"return",
"True"
] | [
74,
0
] | [
102,
15
] | python | en | ['en', 'error', 'th'] | False |
delete_token | (token) |
Remove device token do BD (firebase).
:param token: token a ser removido.
:return: True caso nao haja erros durante o processo.
|
Remove device token do BD (firebase). | def delete_token(token):
"""
Remove device token do BD (firebase).
:param token: token a ser removido.
:return: True caso nao haja erros durante o processo.
"""
db = setup_firebase()
db.child('tokens').child(token).remove()
print("Device token {} removido com sucesso.".format(token))
return True | [
"def",
"delete_token",
"(",
"token",
")",
":",
"db",
"=",
"setup_firebase",
"(",
")",
"db",
".",
"child",
"(",
"'tokens'",
")",
".",
"child",
"(",
"token",
")",
".",
"remove",
"(",
")",
"print",
"(",
"\"Device token {} removido com sucesso.\"",
".",
"format",
"(",
"token",
")",
")",
"return",
"True"
] | [
107,
0
] | [
119,
15
] | python | en | ['en', 'error', 'th'] | False |
same_time_with_margin | (hora) |
Compara um horario fornecido com o horario atual para decidir se uma notificacao deve ser enviada para o token correspondente a esse horario. Ele leva em consideracao um possivel atraso de ate 3min no heroku scheduler.
:param hora: horario a ser verificado
:return: booleano dizendo esta proximo o suficiente a hora atual para receber notificacao no momento.
|
Compara um horario fornecido com o horario atual para decidir se uma notificacao deve ser enviada para o token correspondente a esse horario. Ele leva em consideracao um possivel atraso de ate 3min no heroku scheduler. | def same_time_with_margin(hora):
"""
Compara um horario fornecido com o horario atual para decidir se uma notificacao deve ser enviada para o token correspondente a esse horario. Ele leva em consideracao um possivel atraso de ate 3min no heroku scheduler.
:param hora: horario a ser verificado
:return: booleano dizendo esta proximo o suficiente a hora atual para receber notificacao no momento.
"""
if hora is None:
return False
tz = pytz.timezone('America/Sao_Paulo')
today = datetime.now(tz)
minutes_today = int(today.hour * 60 + today.minute)
hours, minutes = map(int, hora.split(':'))
minutes_notification = hours * 60 + minutes
# print(hora, "{}:{}".format(today.hour, today.minute))
return abs(minutes_today - minutes_notification) <= 5 | [
"def",
"same_time_with_margin",
"(",
"hora",
")",
":",
"if",
"hora",
"is",
"None",
":",
"return",
"False",
"tz",
"=",
"pytz",
".",
"timezone",
"(",
"'America/Sao_Paulo'",
")",
"today",
"=",
"datetime",
".",
"now",
"(",
"tz",
")",
"minutes_today",
"=",
"int",
"(",
"today",
".",
"hour",
"*",
"60",
"+",
"today",
".",
"minute",
")",
"hours",
",",
"minutes",
"=",
"map",
"(",
"int",
",",
"hora",
".",
"split",
"(",
"':'",
")",
")",
"minutes_notification",
"=",
"hours",
"*",
"60",
"+",
"minutes",
"# print(hora, \"{}:{}\".format(today.hour, today.minute))",
"return",
"abs",
"(",
"minutes_today",
"-",
"minutes_notification",
")",
"<=",
"5"
] | [
126,
0
] | [
147,
57
] | python | en | ['en', 'error', 'th'] | False |
get_device_tokens | (refeicao) |
Pega os device tokens no firebase, separa os tradicionais dos vegetarianos e filtra os tokens que devem receber notificacao no momento. Lembrando que o heroku scheduler vai rodar o script de notificacao a cada 10min.
:return: uma tupla (toks_trad, toks_veg) contendo os tokens tradicionais e vegetarianos.
|
Pega os device tokens no firebase, separa os tradicionais dos vegetarianos e filtra os tokens que devem receber notificacao no momento. Lembrando que o heroku scheduler vai rodar o script de notificacao a cada 10min. | def get_device_tokens(refeicao):
"""
Pega os device tokens no firebase, separa os tradicionais dos vegetarianos e filtra os tokens que devem receber notificacao no momento. Lembrando que o heroku scheduler vai rodar o script de notificacao a cada 10min.
:return: uma tupla (toks_trad, toks_veg) contendo os tokens tradicionais e vegetarianos.
"""
# obtems device tokens dos usuarios registrados
db = setup_firebase()
tokens = db.child('tokens').get().val()
# pprint("All tokens: {}".format(tokens))
if refeicao == "almoço":
refeicao = "almoco" # consertando inconsistencia nos nomes de chaves e da mensagem...
try:
# filtra os tokens que devem receber notificacao no momento.
tokens = [(t, d) for t, d in tokens.items() if refeicao in d and same_time_with_margin(d[refeicao])]
except KeyError:
# o usuario nao quer receber notificacao nessa refeicao.
print("KeyError em get_device_token!")
return [], []
# pprint("Tokens filtrados pelo horario da notificacao: {}".format(tokens))
# separa usuarios vegetarianos
tokens_tradicional = [t for t, d in tokens if d["vegetariano"] == False]
tokens_vegetariano = [t for t, d in tokens if d["vegetariano"]]
# print("Tokens tradicionais: ", tokens_tradicional)
# print("Tokens vegetarianos: ", tokens_vegetariano)
return tokens_tradicional, tokens_vegetariano | [
"def",
"get_device_tokens",
"(",
"refeicao",
")",
":",
"# obtems device tokens dos usuarios registrados",
"db",
"=",
"setup_firebase",
"(",
")",
"tokens",
"=",
"db",
".",
"child",
"(",
"'tokens'",
")",
".",
"get",
"(",
")",
".",
"val",
"(",
")",
"# pprint(\"All tokens: {}\".format(tokens))",
"if",
"refeicao",
"==",
"\"almoço\":",
"",
"refeicao",
"=",
"\"almoco\"",
"# consertando inconsistencia nos nomes de chaves e da mensagem...",
"try",
":",
"# filtra os tokens que devem receber notificacao no momento.",
"tokens",
"=",
"[",
"(",
"t",
",",
"d",
")",
"for",
"t",
",",
"d",
"in",
"tokens",
".",
"items",
"(",
")",
"if",
"refeicao",
"in",
"d",
"and",
"same_time_with_margin",
"(",
"d",
"[",
"refeicao",
"]",
")",
"]",
"except",
"KeyError",
":",
"# o usuario nao quer receber notificacao nessa refeicao.",
"print",
"(",
"\"KeyError em get_device_token!\"",
")",
"return",
"[",
"]",
",",
"[",
"]",
"# pprint(\"Tokens filtrados pelo horario da notificacao: {}\".format(tokens))",
"# separa usuarios vegetarianos",
"tokens_tradicional",
"=",
"[",
"t",
"for",
"t",
",",
"d",
"in",
"tokens",
"if",
"d",
"[",
"\"vegetariano\"",
"]",
"==",
"False",
"]",
"tokens_vegetariano",
"=",
"[",
"t",
"for",
"t",
",",
"d",
"in",
"tokens",
"if",
"d",
"[",
"\"vegetariano\"",
"]",
"]",
"# print(\"Tokens tradicionais: \", tokens_tradicional)",
"# print(\"Tokens vegetarianos: \", tokens_vegetariano)",
"return",
"tokens_tradicional",
",",
"tokens_vegetariano"
] | [
151,
0
] | [
187,
49
] | python | en | ['en', 'error', 'th'] | False |
get_notification_objects | (msg_tradicional, msg_vegetariano, tokens_tradicional, tokens_vegetariano) |
Instancia os objetos de notificacao utilizados pelo modulo apns2. Para entender mais, olhar a documentacao dessa biblioteca.
:param msg_tradicional: mensagem da notificacao do cardapio tradicional.
:param msg_vegetariano: mensagem da notificacao do cardapio vegetariano.
:return: objetos de notificacao a serem enviados.
| def get_notification_objects(msg_tradicional, msg_vegetariano, tokens_tradicional, tokens_vegetariano):
"""
Instancia os objetos de notificacao utilizados pelo modulo apns2. Para entender mais, olhar a documentacao dessa biblioteca.
:param msg_tradicional: mensagem da notificacao do cardapio tradicional.
:param msg_vegetariano: mensagem da notificacao do cardapio vegetariano.
:return: objetos de notificacao a serem enviados.
"""
# cria 2 payloads diferentes para tradicional e vegetariano
payload_tradicional = Payload(alert=msg_tradicional, sound="default", badge=1)
payload_vegetariano = Payload(alert=msg_vegetariano, sound="default", badge=1)
# adiciona os objetos Notification (olhar codigo do apns2) para serem enviados em batch.
notifications = []
for t in tokens_tradicional:
notifications.append(Notification(t, payload_tradicional))
for t in tokens_vegetariano:
notifications.append(Notification(t, payload_vegetariano))
return notifications | [
"def",
"get_notification_objects",
"(",
"msg_tradicional",
",",
"msg_vegetariano",
",",
"tokens_tradicional",
",",
"tokens_vegetariano",
")",
":",
"# cria 2 payloads diferentes para tradicional e vegetariano",
"payload_tradicional",
"=",
"Payload",
"(",
"alert",
"=",
"msg_tradicional",
",",
"sound",
"=",
"\"default\"",
",",
"badge",
"=",
"1",
")",
"payload_vegetariano",
"=",
"Payload",
"(",
"alert",
"=",
"msg_vegetariano",
",",
"sound",
"=",
"\"default\"",
",",
"badge",
"=",
"1",
")",
"# adiciona os objetos Notification (olhar codigo do apns2) para serem enviados em batch.",
"notifications",
"=",
"[",
"]",
"for",
"t",
"in",
"tokens_tradicional",
":",
"notifications",
".",
"append",
"(",
"Notification",
"(",
"t",
",",
"payload_tradicional",
")",
")",
"for",
"t",
"in",
"tokens_vegetariano",
":",
"notifications",
".",
"append",
"(",
"Notification",
"(",
"t",
",",
"payload_vegetariano",
")",
")",
"return",
"notifications"
] | [
191,
0
] | [
215,
24
] | python | en | ['en', 'error', 'th'] | False |
|
setup_apns_client | (use_sandbox) |
Configura um cliente do servico apns2. Para mais informacoes, olhar a documentacao e o codigo dessa biblioteca.
:param use_sandbox:
:return: um objeto do tipo APNsClient para enviar push notifications.
|
Configura um cliente do servico apns2. Para mais informacoes, olhar a documentacao e o codigo dessa biblioteca.
:param use_sandbox:
:return: um objeto do tipo APNsClient para enviar push notifications.
| def setup_apns_client(use_sandbox):
"""
Configura um cliente do servico apns2. Para mais informacoes, olhar a documentacao e o codigo dessa biblioteca.
:param use_sandbox:
:return: um objeto do tipo APNsClient para enviar push notifications.
"""
try:
apns_key = environment_vars.APNS_PROD_KEY_CONTENT
f = open('./apns_key.pem', 'w')
f.write(apns_key)
f.close()
except Exception as e:
os.remove('./apns_key.pem')
print("Erro ao escrever no arquivo apns_key.pem: ", e)
if os.path.exists('./apns_key.pem'):
print("Executando no heroku")
file_path = "./apns_key.pem"
else: # local development. Usar o certificado armazenado localmente para development.
print("Usando chave de development localmente...")
file_path = './../Certificates/bandex_push_notifications_dev_key.pem'
client = APNsClient(file_path, use_sandbox=use_sandbox, use_alternative_port=False)
return client | [
"def",
"setup_apns_client",
"(",
"use_sandbox",
")",
":",
"try",
":",
"apns_key",
"=",
"environment_vars",
".",
"APNS_PROD_KEY_CONTENT",
"f",
"=",
"open",
"(",
"'./apns_key.pem'",
",",
"'w'",
")",
"f",
".",
"write",
"(",
"apns_key",
")",
"f",
".",
"close",
"(",
")",
"except",
"Exception",
"as",
"e",
":",
"os",
".",
"remove",
"(",
"'./apns_key.pem'",
")",
"print",
"(",
"\"Erro ao escrever no arquivo apns_key.pem: \"",
",",
"e",
")",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"'./apns_key.pem'",
")",
":",
"print",
"(",
"\"Executando no heroku\"",
")",
"file_path",
"=",
"\"./apns_key.pem\"",
"else",
":",
"# local development. Usar o certificado armazenado localmente para development.",
"print",
"(",
"\"Usando chave de development localmente...\"",
")",
"file_path",
"=",
"'./../Certificates/bandex_push_notifications_dev_key.pem'",
"client",
"=",
"APNsClient",
"(",
"file_path",
",",
"use_sandbox",
"=",
"use_sandbox",
",",
"use_alternative_port",
"=",
"False",
")",
"return",
"client"
] | [
218,
0
] | [
242,
17
] | python | en | ['en', 'error', 'th'] | False |
push_next_notification | (msg_tradicional, msg_vegetariano, refeicao) |
Utiliza a biblioteca apns2 para enviar push notifications para os usuarios registrados.
:param msg_tradicional: string de notificacao para o cardapio tradicional.
:param msg_vegetariano: string de notificacao para o cardapio vegetariano.
:return: None
|
Utiliza a biblioteca apns2 para enviar push notifications para os usuarios registrados. | def push_next_notification(msg_tradicional, msg_vegetariano, refeicao):
"""
Utiliza a biblioteca apns2 para enviar push notifications para os usuarios registrados.
:param msg_tradicional: string de notificacao para o cardapio tradicional.
:param msg_vegetariano: string de notificacao para o cardapio vegetariano.
:return: None
"""
# separa tradicional e vegetariano e filtra tokens que querem receber notificacao agora para a refeicao fornecida.
tokens_tradicional, tokens_vegetariano = get_device_tokens(refeicao)
# instancia objetos de notificacao a serem enviados usando a biblioteca apns2.
notifications = get_notification_objects(msg_tradicional, msg_vegetariano, tokens_tradicional, tokens_vegetariano)
topic = 'com.Gustavo.Avena.BandecoUnicamp'
# IMPORTANTE: define se estamos em development ou production! Com use_sandbox = True, os devices em modo teste recebem notificacoes. MUITO cuidado ao mexer aqui.
use_sandbox = False if os.environ.get('PRODUCTION_ENVIRONMENT') != None else True
client = setup_apns_client(use_sandbox)
response = client.send_notification_batch(notifications, topic)
# pprint(response)
successful = [t for t, d in response.items() if d == "Success"]
failed = [t for t, d in response.items() if d != "Success"]
tz = pytz.timezone('America/Sao_Paulo')
today = datetime.now(tz)
env_name = "[TESTING] " if use_sandbox else ""
print("{}Push notifications sent successfully on {} to {} devices.".format(env_name, today.strftime("%A, %b %d, %H:%M:%S"), len(successful)))
print("Notifications failed for {} devices.".format(len(failed)))
if(len(failed) > 0):
pprint([d for t, d in response.items() if d != "Success"]) | [
"def",
"push_next_notification",
"(",
"msg_tradicional",
",",
"msg_vegetariano",
",",
"refeicao",
")",
":",
"# separa tradicional e vegetariano e filtra tokens que querem receber notificacao agora para a refeicao fornecida.",
"tokens_tradicional",
",",
"tokens_vegetariano",
"=",
"get_device_tokens",
"(",
"refeicao",
")",
"# instancia objetos de notificacao a serem enviados usando a biblioteca apns2.",
"notifications",
"=",
"get_notification_objects",
"(",
"msg_tradicional",
",",
"msg_vegetariano",
",",
"tokens_tradicional",
",",
"tokens_vegetariano",
")",
"topic",
"=",
"'com.Gustavo.Avena.BandecoUnicamp'",
"# IMPORTANTE: define se estamos em development ou production! Com use_sandbox = True, os devices em modo teste recebem notificacoes. MUITO cuidado ao mexer aqui.",
"use_sandbox",
"=",
"False",
"if",
"os",
".",
"environ",
".",
"get",
"(",
"'PRODUCTION_ENVIRONMENT'",
")",
"!=",
"None",
"else",
"True",
"client",
"=",
"setup_apns_client",
"(",
"use_sandbox",
")",
"response",
"=",
"client",
".",
"send_notification_batch",
"(",
"notifications",
",",
"topic",
")",
"# pprint(response)",
"successful",
"=",
"[",
"t",
"for",
"t",
",",
"d",
"in",
"response",
".",
"items",
"(",
")",
"if",
"d",
"==",
"\"Success\"",
"]",
"failed",
"=",
"[",
"t",
"for",
"t",
",",
"d",
"in",
"response",
".",
"items",
"(",
")",
"if",
"d",
"!=",
"\"Success\"",
"]",
"tz",
"=",
"pytz",
".",
"timezone",
"(",
"'America/Sao_Paulo'",
")",
"today",
"=",
"datetime",
".",
"now",
"(",
"tz",
")",
"env_name",
"=",
"\"[TESTING] \"",
"if",
"use_sandbox",
"else",
"\"\"",
"print",
"(",
"\"{}Push notifications sent successfully on {} to {} devices.\"",
".",
"format",
"(",
"env_name",
",",
"today",
".",
"strftime",
"(",
"\"%A, %b %d, %H:%M:%S\"",
")",
",",
"len",
"(",
"successful",
")",
")",
")",
"print",
"(",
"\"Notifications failed for {} devices.\"",
".",
"format",
"(",
"len",
"(",
"failed",
")",
")",
")",
"if",
"(",
"len",
"(",
"failed",
")",
">",
"0",
")",
":",
"pprint",
"(",
"[",
"d",
"for",
"t",
",",
"d",
"in",
"response",
".",
"items",
"(",
")",
"if",
"d",
"!=",
"\"Success\"",
"]",
")"
] | [
244,
0
] | [
285,
66
] | python | en | ['en', 'error', 'th'] | False |
cardapio_valido | () |
Pega o proximo cardapio disponivel e confere se é do dia de hoje. Retorna None quando nao há cardapio disponivel ou
quando for fim de semana ou feriado.
:return: o proximo cardapio, se for valido, ou None caso contrário.
|
Pega o proximo cardapio disponivel e confere se é do dia de hoje. Retorna None quando nao há cardapio disponivel ou
quando for fim de semana ou feriado. | def cardapio_valido():
"""
Pega o proximo cardapio disponivel e confere se é do dia de hoje. Retorna None quando nao há cardapio disponivel ou
quando for fim de semana ou feriado.
:return: o proximo cardapio, se for valido, ou None caso contrário.
"""
cardapios = get_all_cardapios()
if len(cardapios) == 0:
return None
prox = cardapios[0]
today = date.today().strftime("%Y-%m-%d")
if prox.data == today:
return prox
else:
print(today)
print(prox.data)
return None | [
"def",
"cardapio_valido",
"(",
")",
":",
"cardapios",
"=",
"get_all_cardapios",
"(",
")",
"if",
"len",
"(",
"cardapios",
")",
"==",
"0",
":",
"return",
"None",
"prox",
"=",
"cardapios",
"[",
"0",
"]",
"today",
"=",
"date",
".",
"today",
"(",
")",
".",
"strftime",
"(",
"\"%Y-%m-%d\"",
")",
"if",
"prox",
".",
"data",
"==",
"today",
":",
"return",
"prox",
"else",
":",
"print",
"(",
"today",
")",
"print",
"(",
"prox",
".",
"data",
")",
"return",
"None"
] | [
290,
0
] | [
311,
19
] | python | en | ['en', 'error', 'th'] | False |
mandar_proxima_refeicao | (refeicao) |
Recebendo a refeicao (almoço ou jantar) a ser enviada, esse metodo cria a string da notificação e chama o método para envia-la
caso exista um cardapio valido (dia útil).
:param refeicao: string com valor "almoço" ou "jantar", indicando qual a refeicao a ser informada.
|
Recebendo a refeicao (almoço ou jantar) a ser enviada, esse metodo cria a string da notificação e chama o método para envia-la
caso exista um cardapio valido (dia útil). | def mandar_proxima_refeicao(refeicao):
"""
Recebendo a refeicao (almoço ou jantar) a ser enviada, esse metodo cria a string da notificação e chama o método para envia-la
caso exista um cardapio valido (dia útil).
:param refeicao: string com valor "almoço" ou "jantar", indicando qual a refeicao a ser informada.
"""
if not segunda_a_sexta():
print("Nao deve haver notificação no sábado ou domingo.")
return None
cardapio = cardapio_valido()
template = "Hoje tem {} no {}."
if cardapio != None:
if refeicao == "almoço":
tradicional = template.format(cardapio.almoco.prato_principal.lower(), refeicao)
vegetariano = template.format(cardapio.almoco_vegetariano.prato_principal.lower(), refeicao)
elif refeicao == "jantar":
tradicional = template.format(cardapio.jantar.prato_principal.lower(), refeicao)
vegetariano = template.format(cardapio.jantar_vegetariano.prato_principal.lower(), refeicao)
else:
print("Erro ao determinar refeicao.")
return
push_next_notification(tradicional, vegetariano, refeicao)
else:
print("Agora não há um cardápio válido.") | [
"def",
"mandar_proxima_refeicao",
"(",
"refeicao",
")",
":",
"if",
"not",
"segunda_a_sexta",
"(",
")",
":",
"print",
"(",
"\"Nao deve haver notificação no sábado ou domingo.\")",
"",
"return",
"None",
"cardapio",
"=",
"cardapio_valido",
"(",
")",
"template",
"=",
"\"Hoje tem {} no {}.\"",
"if",
"cardapio",
"!=",
"None",
":",
"if",
"refeicao",
"==",
"\"almoço\":",
"",
"tradicional",
"=",
"template",
".",
"format",
"(",
"cardapio",
".",
"almoco",
".",
"prato_principal",
".",
"lower",
"(",
")",
",",
"refeicao",
")",
"vegetariano",
"=",
"template",
".",
"format",
"(",
"cardapio",
".",
"almoco_vegetariano",
".",
"prato_principal",
".",
"lower",
"(",
")",
",",
"refeicao",
")",
"elif",
"refeicao",
"==",
"\"jantar\"",
":",
"tradicional",
"=",
"template",
".",
"format",
"(",
"cardapio",
".",
"jantar",
".",
"prato_principal",
".",
"lower",
"(",
")",
",",
"refeicao",
")",
"vegetariano",
"=",
"template",
".",
"format",
"(",
"cardapio",
".",
"jantar_vegetariano",
".",
"prato_principal",
".",
"lower",
"(",
")",
",",
"refeicao",
")",
"else",
":",
"print",
"(",
"\"Erro ao determinar refeicao.\"",
")",
"return",
"push_next_notification",
"(",
"tradicional",
",",
"vegetariano",
",",
"refeicao",
")",
"else",
":",
"print",
"(",
"\"Agora não há um cardápio válido.\")",
""
] | [
314,
0
] | [
345,
53
] | python | en | ['en', 'error', 'th'] | False |
testar_notificacao | () |
Funcao para testar o envio de notificacoes.
TODO: refatorar isso para mandar notificacoes para todos os tokens em development, independente do horario da notificacao.
|
Funcao para testar o envio de notificacoes. | def testar_notificacao():
"""
Funcao para testar o envio de notificacoes.
TODO: refatorar isso para mandar notificacoes para todos os tokens em development, independente do horario da notificacao.
"""
template = "Hoje tem {} no {}."
cardapios = get_all_cardapios()
cardapio = cardapios[0]
tradicional = template.format(cardapio.almoco.prato_principal.lower(), "almoço")
vegetariano = template.format(cardapio.almoco_vegetariano.prato_principal.lower(), "almoço")
tz = pytz.timezone('America/Sao_Paulo')
today = datetime.now(tz)
hour = today.hour
if hour >= 7 and hour <= 13:
refeicao = "almoço"
elif hour >= 14 and hour <= 19:
refeicao = "jantar"
push_next_notification(tradicional, vegetariano, refeicao) | [
"def",
"testar_notificacao",
"(",
")",
":",
"template",
"=",
"\"Hoje tem {} no {}.\"",
"cardapios",
"=",
"get_all_cardapios",
"(",
")",
"cardapio",
"=",
"cardapios",
"[",
"0",
"]",
"tradicional",
"=",
"template",
".",
"format",
"(",
"cardapio",
".",
"almoco",
".",
"prato_principal",
".",
"lower",
"(",
")",
",",
"\"almoço\")",
"",
"vegetariano",
"=",
"template",
".",
"format",
"(",
"cardapio",
".",
"almoco_vegetariano",
".",
"prato_principal",
".",
"lower",
"(",
")",
",",
"\"almoço\")",
"",
"tz",
"=",
"pytz",
".",
"timezone",
"(",
"'America/Sao_Paulo'",
")",
"today",
"=",
"datetime",
".",
"now",
"(",
"tz",
")",
"hour",
"=",
"today",
".",
"hour",
"if",
"hour",
">=",
"7",
"and",
"hour",
"<=",
"13",
":",
"refeicao",
"=",
"\"almoço\"",
"elif",
"hour",
">=",
"14",
"and",
"hour",
"<=",
"19",
":",
"refeicao",
"=",
"\"jantar\"",
"push_next_notification",
"(",
"tradicional",
",",
"vegetariano",
",",
"refeicao",
")"
] | [
349,
0
] | [
374,
62
] | python | en | ['en', 'error', 'th'] | False |
PostGISGeometryColumns.table_name_col | (cls) |
Return the name of the metadata column used to store the feature table
name.
|
Return the name of the metadata column used to store the feature table
name.
| def table_name_col(cls):
"""
Return the name of the metadata column used to store the feature table
name.
"""
return 'f_table_name' | [
"def",
"table_name_col",
"(",
"cls",
")",
":",
"return",
"'f_table_name'"
] | [
35,
4
] | [
40,
29
] | python | en | ['en', 'error', 'th'] | False |
PostGISGeometryColumns.geom_col_name | (cls) |
Return the name of the metadata column used to store the feature
geometry column.
|
Return the name of the metadata column used to store the feature
geometry column.
| def geom_col_name(cls):
"""
Return the name of the metadata column used to store the feature
geometry column.
"""
return 'f_geometry_column' | [
"def",
"geom_col_name",
"(",
"cls",
")",
":",
"return",
"'f_geometry_column'"
] | [
43,
4
] | [
48,
34
] | python | en | ['en', 'error', 'th'] | False |
Deserializer | (object_list, *, using=DEFAULT_DB_ALIAS, ignorenonexistent=False, **options) |
Deserialize simple Python objects back into Django ORM instances.
It's expected that you pass the Python objects themselves (instead of a
stream or a string) to the constructor
|
Deserialize simple Python objects back into Django ORM instances. | def Deserializer(object_list, *, using=DEFAULT_DB_ALIAS, ignorenonexistent=False, **options):
"""
Deserialize simple Python objects back into Django ORM instances.
It's expected that you pass the Python objects themselves (instead of a
stream or a string) to the constructor
"""
handle_forward_references = options.pop('handle_forward_references', False)
field_names_cache = {} # Model: <list of field_names>
for d in object_list:
# Look up the model and starting build a dict of data for it.
try:
Model = _get_model(d["model"])
except base.DeserializationError:
if ignorenonexistent:
continue
else:
raise
data = {}
if 'pk' in d:
try:
data[Model._meta.pk.attname] = Model._meta.pk.to_python(d.get('pk'))
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), None)
m2m_data = {}
deferred_fields = {}
if Model not in field_names_cache:
field_names_cache[Model] = {f.name for f in Model._meta.get_fields()}
field_names = field_names_cache[Model]
# Handle each field
for (field_name, field_value) in d["fields"].items():
if ignorenonexistent and field_name not in field_names:
# skip fields no longer on model
continue
field = Model._meta.get_field(field_name)
# Handle M2M relations
if field.remote_field and isinstance(field.remote_field, models.ManyToManyRel):
try:
values = base.deserialize_m2m_values(field, field_value, using, handle_forward_references)
except base.M2MDeserializationError as e:
raise base.DeserializationError.WithData(e.original_exc, d['model'], d.get('pk'), e.pk)
if values == base.DEFER_FIELD:
deferred_fields[field] = field_value
else:
m2m_data[field.name] = values
# Handle FK fields
elif field.remote_field and isinstance(field.remote_field, models.ManyToOneRel):
try:
value = base.deserialize_fk_value(field, field_value, using, handle_forward_references)
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), field_value)
if value == base.DEFER_FIELD:
deferred_fields[field] = field_value
else:
data[field.attname] = value
# Handle all other fields
else:
try:
data[field.name] = field.to_python(field_value)
except Exception as e:
raise base.DeserializationError.WithData(e, d['model'], d.get('pk'), field_value)
obj = base.build_instance(Model, data, using)
yield base.DeserializedObject(obj, m2m_data, deferred_fields) | [
"def",
"Deserializer",
"(",
"object_list",
",",
"*",
",",
"using",
"=",
"DEFAULT_DB_ALIAS",
",",
"ignorenonexistent",
"=",
"False",
",",
"*",
"*",
"options",
")",
":",
"handle_forward_references",
"=",
"options",
".",
"pop",
"(",
"'handle_forward_references'",
",",
"False",
")",
"field_names_cache",
"=",
"{",
"}",
"# Model: <list of field_names>",
"for",
"d",
"in",
"object_list",
":",
"# Look up the model and starting build a dict of data for it.",
"try",
":",
"Model",
"=",
"_get_model",
"(",
"d",
"[",
"\"model\"",
"]",
")",
"except",
"base",
".",
"DeserializationError",
":",
"if",
"ignorenonexistent",
":",
"continue",
"else",
":",
"raise",
"data",
"=",
"{",
"}",
"if",
"'pk'",
"in",
"d",
":",
"try",
":",
"data",
"[",
"Model",
".",
"_meta",
".",
"pk",
".",
"attname",
"]",
"=",
"Model",
".",
"_meta",
".",
"pk",
".",
"to_python",
"(",
"d",
".",
"get",
"(",
"'pk'",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"base",
".",
"DeserializationError",
".",
"WithData",
"(",
"e",
",",
"d",
"[",
"'model'",
"]",
",",
"d",
".",
"get",
"(",
"'pk'",
")",
",",
"None",
")",
"m2m_data",
"=",
"{",
"}",
"deferred_fields",
"=",
"{",
"}",
"if",
"Model",
"not",
"in",
"field_names_cache",
":",
"field_names_cache",
"[",
"Model",
"]",
"=",
"{",
"f",
".",
"name",
"for",
"f",
"in",
"Model",
".",
"_meta",
".",
"get_fields",
"(",
")",
"}",
"field_names",
"=",
"field_names_cache",
"[",
"Model",
"]",
"# Handle each field",
"for",
"(",
"field_name",
",",
"field_value",
")",
"in",
"d",
"[",
"\"fields\"",
"]",
".",
"items",
"(",
")",
":",
"if",
"ignorenonexistent",
"and",
"field_name",
"not",
"in",
"field_names",
":",
"# skip fields no longer on model",
"continue",
"field",
"=",
"Model",
".",
"_meta",
".",
"get_field",
"(",
"field_name",
")",
"# Handle M2M relations",
"if",
"field",
".",
"remote_field",
"and",
"isinstance",
"(",
"field",
".",
"remote_field",
",",
"models",
".",
"ManyToManyRel",
")",
":",
"try",
":",
"values",
"=",
"base",
".",
"deserialize_m2m_values",
"(",
"field",
",",
"field_value",
",",
"using",
",",
"handle_forward_references",
")",
"except",
"base",
".",
"M2MDeserializationError",
"as",
"e",
":",
"raise",
"base",
".",
"DeserializationError",
".",
"WithData",
"(",
"e",
".",
"original_exc",
",",
"d",
"[",
"'model'",
"]",
",",
"d",
".",
"get",
"(",
"'pk'",
")",
",",
"e",
".",
"pk",
")",
"if",
"values",
"==",
"base",
".",
"DEFER_FIELD",
":",
"deferred_fields",
"[",
"field",
"]",
"=",
"field_value",
"else",
":",
"m2m_data",
"[",
"field",
".",
"name",
"]",
"=",
"values",
"# Handle FK fields",
"elif",
"field",
".",
"remote_field",
"and",
"isinstance",
"(",
"field",
".",
"remote_field",
",",
"models",
".",
"ManyToOneRel",
")",
":",
"try",
":",
"value",
"=",
"base",
".",
"deserialize_fk_value",
"(",
"field",
",",
"field_value",
",",
"using",
",",
"handle_forward_references",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"base",
".",
"DeserializationError",
".",
"WithData",
"(",
"e",
",",
"d",
"[",
"'model'",
"]",
",",
"d",
".",
"get",
"(",
"'pk'",
")",
",",
"field_value",
")",
"if",
"value",
"==",
"base",
".",
"DEFER_FIELD",
":",
"deferred_fields",
"[",
"field",
"]",
"=",
"field_value",
"else",
":",
"data",
"[",
"field",
".",
"attname",
"]",
"=",
"value",
"# Handle all other fields",
"else",
":",
"try",
":",
"data",
"[",
"field",
".",
"name",
"]",
"=",
"field",
".",
"to_python",
"(",
"field_value",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"base",
".",
"DeserializationError",
".",
"WithData",
"(",
"e",
",",
"d",
"[",
"'model'",
"]",
",",
"d",
".",
"get",
"(",
"'pk'",
")",
",",
"field_value",
")",
"obj",
"=",
"base",
".",
"build_instance",
"(",
"Model",
",",
"data",
",",
"using",
")",
"yield",
"base",
".",
"DeserializedObject",
"(",
"obj",
",",
"m2m_data",
",",
"deferred_fields",
")"
] | [
79,
0
] | [
148,
69
] | python | en | ['en', 'error', 'th'] | False |
_get_model | (model_identifier) | Look up a model from an "app_label.model_name" string. | Look up a model from an "app_label.model_name" string. | def _get_model(model_identifier):
"""Look up a model from an "app_label.model_name" string."""
try:
return apps.get_model(model_identifier)
except (LookupError, TypeError):
raise base.DeserializationError("Invalid model identifier: '%s'" % model_identifier) | [
"def",
"_get_model",
"(",
"model_identifier",
")",
":",
"try",
":",
"return",
"apps",
".",
"get_model",
"(",
"model_identifier",
")",
"except",
"(",
"LookupError",
",",
"TypeError",
")",
":",
"raise",
"base",
".",
"DeserializationError",
"(",
"\"Invalid model identifier: '%s'\"",
"%",
"model_identifier",
")"
] | [
151,
0
] | [
156,
92
] | python | en | ['en', 'en', 'en'] | True |
Kernel.pred_bias | (self) |
閾値の枚数からの推定
:return: 閾値の百分率
|
閾値の枚数からの推定
:return: 閾値の百分率
| def pred_bias(self):
"""
閾値の枚数からの推定
:return: 閾値の百分率
"""
default = 90
bias = 0.0
IMG_NUM = adjuster = self.params['crawler']['target_num']
if IMG_NUM >= 100:
bias += 2.0
adjuster -= IMG_NUM
while True:
if adjuster <= 0 or bias >= 8.0:
break
else:
bias += 0.3
adjuster -= 20
elif IMG_NUM < 100:
while True:
if adjuster <= 0:
break
else:
bias -= 0.5
adjuster -= 5
default += bias
return default | [
"def",
"pred_bias",
"(",
"self",
")",
":",
"default",
"=",
"90",
"bias",
"=",
"0.0",
"IMG_NUM",
"=",
"adjuster",
"=",
"self",
".",
"params",
"[",
"'crawler'",
"]",
"[",
"'target_num'",
"]",
"if",
"IMG_NUM",
">=",
"100",
":",
"bias",
"+=",
"2.0",
"adjuster",
"-=",
"IMG_NUM",
"while",
"True",
":",
"if",
"adjuster",
"<=",
"0",
"or",
"bias",
">=",
"8.0",
":",
"break",
"else",
":",
"bias",
"+=",
"0.3",
"adjuster",
"-=",
"20",
"elif",
"IMG_NUM",
"<",
"100",
":",
"while",
"True",
":",
"if",
"adjuster",
"<=",
"0",
":",
"break",
"else",
":",
"bias",
"-=",
"0.5",
"adjuster",
"-=",
"5",
"default",
"+=",
"bias",
"return",
"default"
] | [
96,
4
] | [
121,
22
] | python | en | ['en', 'error', 'th'] | False |
Highmap.__init__ | (self, **kwargs) |
This is the base class for all the charts. The following keywords are
accepted:
:keyword: **display_container** - default: ``True``
|
This is the base class for all the charts. The following keywords are
accepted:
:keyword: **display_container** - default: ``True``
| def __init__(self, **kwargs):
"""
This is the base class for all the charts. The following keywords are
accepted:
:keyword: **display_container** - default: ``True``
"""
# Set the model
self.model = self.__class__.__name__ #: The chart model,
self.div_name = kwargs.get("renderTo", "container")
# An Instance of Jinja2 template
self.template_page_highcharts = template_page
self.template_content_highcharts = template_content
# Set Javascript src
self.JSsource = [
'https://ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js',
'https://code.highcharts.com/maps/6/highmaps.js',
'https://code.highcharts.com/6/highcharts.js',
'https://code.highcharts.com/maps/6/modules/map.js',
'https://code.highcharts.com/maps/6/modules/data.js',
'https://code.highcharts.com/maps/6/modules/exporting.js'
]
# set CSS src
self.CSSsource = [
'https://www.highcharts.com/highslide/highslide.css',
]
# Set data
self.data = []
self.data_temp = []
self.data_is_coordinate = False
# Data from jsonp
self.jsonp_data_flag = False
# Set drilldown data
self.drilldown_data = []
self.drilldown_data_temp = []
# Map
self.mapdata_flag = False
self.map = None
# Jsonp map
self.jsonp_map_flag = kwargs.get('jsonp_map_flag', False)
# Javascript
self.jscript_head_flag = False
self.jscript_head = kwargs.get('jscript_head', None)
self.jscript_end_flag = False
self.jscript_end = kwargs.get('jscript_end', None)
# Accepted keywords
self.div_style = kwargs.get('style', '')
self.drilldown_flag = kwargs.get('drilldown_flag', False)
# None keywords attribute that should be modified by methods
# We should change all these to _attr
self._htmlcontent = '' #: written by buildhtml
self.htmlheader = ''
# Place holder for the graph (the HTML div)
# Written by ``buildcontainer``
self.container = u''
# Header for javascript code
self.containerheader = u''
# Loading message
self.loading = 'Loading....'
# Bind Base Classes to self
self.options = {
"chart": ChartOptions(),
#"colorAxis": # cannot input until there is data, do it later
"colors": ColorsOptions(),
"credits": CreditsOptions(),
#"data": #NotImplemented
"drilldown": DrilldownOptions(),
"exporting": ExportingOptions(),
"labels": LabelsOptions(),
"legend": LegendOptions(),
"loading": LoadingOptions(),
"mapNavigation": MapNavigationOptions(),
"navigation": NavigationOptions(),
"plotOptions": PlotOptions(),
"series": SeriesData(),
"subtitle": SubtitleOptions(),
"title": TitleOptions(),
"tooltip": TooltipOptions(),
"xAxis": xAxisOptions(),
"yAxis": yAxisOptions(),
}
self.setOptions = {
"global": GlobalOptions(),
"lang": LangOptions(),
}
self.__load_defaults__()
# Process kwargs
allowed_kwargs = [
"width",
"height",
"renderTo",
"backgroundColor",
"events",
"marginBottom",
"marginTop",
"marginRight",
"marginLeft"
]
for keyword in allowed_kwargs:
if keyword in kwargs:
self.options['chart'].update_dict(**{keyword:kwargs[keyword]})
# Some Extra Vals to store:
self.data_set_count = 0
self.drilldown_data_set_count = 0 | [
"def",
"__init__",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"# Set the model",
"self",
".",
"model",
"=",
"self",
".",
"__class__",
".",
"__name__",
"#: The chart model,",
"self",
".",
"div_name",
"=",
"kwargs",
".",
"get",
"(",
"\"renderTo\"",
",",
"\"container\"",
")",
"# An Instance of Jinja2 template",
"self",
".",
"template_page_highcharts",
"=",
"template_page",
"self",
".",
"template_content_highcharts",
"=",
"template_content",
"# Set Javascript src",
"self",
".",
"JSsource",
"=",
"[",
"'https://ajax.googleapis.com/ajax/libs/jquery/1.9.1/jquery.min.js'",
",",
"'https://code.highcharts.com/maps/6/highmaps.js'",
",",
"'https://code.highcharts.com/6/highcharts.js'",
",",
"'https://code.highcharts.com/maps/6/modules/map.js'",
",",
"'https://code.highcharts.com/maps/6/modules/data.js'",
",",
"'https://code.highcharts.com/maps/6/modules/exporting.js'",
"]",
"# set CSS src",
"self",
".",
"CSSsource",
"=",
"[",
"'https://www.highcharts.com/highslide/highslide.css'",
",",
"]",
"# Set data",
"self",
".",
"data",
"=",
"[",
"]",
"self",
".",
"data_temp",
"=",
"[",
"]",
"self",
".",
"data_is_coordinate",
"=",
"False",
"# Data from jsonp",
"self",
".",
"jsonp_data_flag",
"=",
"False",
"# Set drilldown data",
"self",
".",
"drilldown_data",
"=",
"[",
"]",
"self",
".",
"drilldown_data_temp",
"=",
"[",
"]",
"# Map",
"self",
".",
"mapdata_flag",
"=",
"False",
"self",
".",
"map",
"=",
"None",
"# Jsonp map",
"self",
".",
"jsonp_map_flag",
"=",
"kwargs",
".",
"get",
"(",
"'jsonp_map_flag'",
",",
"False",
")",
"# Javascript",
"self",
".",
"jscript_head_flag",
"=",
"False",
"self",
".",
"jscript_head",
"=",
"kwargs",
".",
"get",
"(",
"'jscript_head'",
",",
"None",
")",
"self",
".",
"jscript_end_flag",
"=",
"False",
"self",
".",
"jscript_end",
"=",
"kwargs",
".",
"get",
"(",
"'jscript_end'",
",",
"None",
")",
"# Accepted keywords",
"self",
".",
"div_style",
"=",
"kwargs",
".",
"get",
"(",
"'style'",
",",
"''",
")",
"self",
".",
"drilldown_flag",
"=",
"kwargs",
".",
"get",
"(",
"'drilldown_flag'",
",",
"False",
")",
"# None keywords attribute that should be modified by methods",
"# We should change all these to _attr",
"self",
".",
"_htmlcontent",
"=",
"''",
"#: written by buildhtml",
"self",
".",
"htmlheader",
"=",
"''",
"# Place holder for the graph (the HTML div)",
"# Written by ``buildcontainer``",
"self",
".",
"container",
"=",
"u''",
"# Header for javascript code",
"self",
".",
"containerheader",
"=",
"u''",
"# Loading message",
"self",
".",
"loading",
"=",
"'Loading....'",
"# Bind Base Classes to self",
"self",
".",
"options",
"=",
"{",
"\"chart\"",
":",
"ChartOptions",
"(",
")",
",",
"#\"colorAxis\": # cannot input until there is data, do it later",
"\"colors\"",
":",
"ColorsOptions",
"(",
")",
",",
"\"credits\"",
":",
"CreditsOptions",
"(",
")",
",",
"#\"data\": #NotImplemented",
"\"drilldown\"",
":",
"DrilldownOptions",
"(",
")",
",",
"\"exporting\"",
":",
"ExportingOptions",
"(",
")",
",",
"\"labels\"",
":",
"LabelsOptions",
"(",
")",
",",
"\"legend\"",
":",
"LegendOptions",
"(",
")",
",",
"\"loading\"",
":",
"LoadingOptions",
"(",
")",
",",
"\"mapNavigation\"",
":",
"MapNavigationOptions",
"(",
")",
",",
"\"navigation\"",
":",
"NavigationOptions",
"(",
")",
",",
"\"plotOptions\"",
":",
"PlotOptions",
"(",
")",
",",
"\"series\"",
":",
"SeriesData",
"(",
")",
",",
"\"subtitle\"",
":",
"SubtitleOptions",
"(",
")",
",",
"\"title\"",
":",
"TitleOptions",
"(",
")",
",",
"\"tooltip\"",
":",
"TooltipOptions",
"(",
")",
",",
"\"xAxis\"",
":",
"xAxisOptions",
"(",
")",
",",
"\"yAxis\"",
":",
"yAxisOptions",
"(",
")",
",",
"}",
"self",
".",
"setOptions",
"=",
"{",
"\"global\"",
":",
"GlobalOptions",
"(",
")",
",",
"\"lang\"",
":",
"LangOptions",
"(",
")",
",",
"}",
"self",
".",
"__load_defaults__",
"(",
")",
"# Process kwargs",
"allowed_kwargs",
"=",
"[",
"\"width\"",
",",
"\"height\"",
",",
"\"renderTo\"",
",",
"\"backgroundColor\"",
",",
"\"events\"",
",",
"\"marginBottom\"",
",",
"\"marginTop\"",
",",
"\"marginRight\"",
",",
"\"marginLeft\"",
"]",
"for",
"keyword",
"in",
"allowed_kwargs",
":",
"if",
"keyword",
"in",
"kwargs",
":",
"self",
".",
"options",
"[",
"'chart'",
"]",
".",
"update_dict",
"(",
"*",
"*",
"{",
"keyword",
":",
"kwargs",
"[",
"keyword",
"]",
"}",
")",
"# Some Extra Vals to store:",
"self",
".",
"data_set_count",
"=",
"0",
"self",
".",
"drilldown_data_set_count",
"=",
"0"
] | [
51,
4
] | [
169,
41
] | python | en | ['en', 'error', 'th'] | False |
Highmap.add_JSsource | (self, new_src) | add additional js script source(s) | add additional js script source(s) | def add_JSsource(self, new_src):
"""add additional js script source(s)"""
if isinstance(new_src, list):
for h in new_src:
self.JSsource.append(h)
elif isinstance(new_src, basestring):
self.JSsource.append(new_src)
else:
raise OptionTypeError("Option: %s Not Allowed For Series Type: %s" % type(new_src)) | [
"def",
"add_JSsource",
"(",
"self",
",",
"new_src",
")",
":",
"if",
"isinstance",
"(",
"new_src",
",",
"list",
")",
":",
"for",
"h",
"in",
"new_src",
":",
"self",
".",
"JSsource",
".",
"append",
"(",
"h",
")",
"elif",
"isinstance",
"(",
"new_src",
",",
"basestring",
")",
":",
"self",
".",
"JSsource",
".",
"append",
"(",
"new_src",
")",
"else",
":",
"raise",
"OptionTypeError",
"(",
"\"Option: %s Not Allowed For Series Type: %s\"",
"%",
"type",
"(",
"new_src",
")",
")"
] | [
178,
4
] | [
186,
95
] | python | en | ['en', 'co', 'en'] | True |
Highmap.add_CSSsource | (self, new_src) | add additional css source(s) | add additional css source(s) | def add_CSSsource(self, new_src):
"""add additional css source(s)"""
if isinstance(new_src, list):
for h in new_src:
self.CSSsource.append(h)
elif isinstance(new_src, basestring):
self.CSSsource.append(new_src)
else:
raise OptionTypeError("Option: %s Not Allowed For Series Type: %s" % type(new_src)) | [
"def",
"add_CSSsource",
"(",
"self",
",",
"new_src",
")",
":",
"if",
"isinstance",
"(",
"new_src",
",",
"list",
")",
":",
"for",
"h",
"in",
"new_src",
":",
"self",
".",
"CSSsource",
".",
"append",
"(",
"h",
")",
"elif",
"isinstance",
"(",
"new_src",
",",
"basestring",
")",
":",
"self",
".",
"CSSsource",
".",
"append",
"(",
"new_src",
")",
"else",
":",
"raise",
"OptionTypeError",
"(",
"\"Option: %s Not Allowed For Series Type: %s\"",
"%",
"type",
"(",
"new_src",
")",
")"
] | [
189,
4
] | [
197,
95
] | python | en | ['en', 'en', 'en'] | True |
Highmap.add_data_set | (self, data, series_type="map", name=None, is_coordinate = False, **kwargs) | set data for series option in highmaps | set data for series option in highmaps | def add_data_set(self, data, series_type="map", name=None, is_coordinate = False, **kwargs):
"""set data for series option in highmaps """
self.data_set_count += 1
if not name:
name = "Series %d" % self.data_set_count
kwargs.update({'name':name})
if is_coordinate:
self.data_is_coordinate = True
self.add_JSsource('https://cdnjs.cloudflare.com/ajax/libs/proj4js/2.3.6/proj4.js')
if self.map and not self.data_temp:
series_data = Series([], series_type='map', **{'mapData': self.map})
series_data.__options__().update(SeriesOptions(series_type='map', **{'mapData': self.map}).__options__())
self.data_temp.append(series_data)
if self.map and 'mapData' in kwargs.keys():
kwargs.update({'mapData': self.map})
series_data = Series(data, series_type=series_type, **kwargs)
series_data.__options__().update(SeriesOptions(series_type=series_type, **kwargs).__options__())
self.data_temp.append(series_data) | [
"def",
"add_data_set",
"(",
"self",
",",
"data",
",",
"series_type",
"=",
"\"map\"",
",",
"name",
"=",
"None",
",",
"is_coordinate",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"data_set_count",
"+=",
"1",
"if",
"not",
"name",
":",
"name",
"=",
"\"Series %d\"",
"%",
"self",
".",
"data_set_count",
"kwargs",
".",
"update",
"(",
"{",
"'name'",
":",
"name",
"}",
")",
"if",
"is_coordinate",
":",
"self",
".",
"data_is_coordinate",
"=",
"True",
"self",
".",
"add_JSsource",
"(",
"'https://cdnjs.cloudflare.com/ajax/libs/proj4js/2.3.6/proj4.js'",
")",
"if",
"self",
".",
"map",
"and",
"not",
"self",
".",
"data_temp",
":",
"series_data",
"=",
"Series",
"(",
"[",
"]",
",",
"series_type",
"=",
"'map'",
",",
"*",
"*",
"{",
"'mapData'",
":",
"self",
".",
"map",
"}",
")",
"series_data",
".",
"__options__",
"(",
")",
".",
"update",
"(",
"SeriesOptions",
"(",
"series_type",
"=",
"'map'",
",",
"*",
"*",
"{",
"'mapData'",
":",
"self",
".",
"map",
"}",
")",
".",
"__options__",
"(",
")",
")",
"self",
".",
"data_temp",
".",
"append",
"(",
"series_data",
")",
"if",
"self",
".",
"map",
"and",
"'mapData'",
"in",
"kwargs",
".",
"keys",
"(",
")",
":",
"kwargs",
".",
"update",
"(",
"{",
"'mapData'",
":",
"self",
".",
"map",
"}",
")",
"series_data",
"=",
"Series",
"(",
"data",
",",
"series_type",
"=",
"series_type",
",",
"*",
"*",
"kwargs",
")",
"series_data",
".",
"__options__",
"(",
")",
".",
"update",
"(",
"SeriesOptions",
"(",
"series_type",
"=",
"series_type",
",",
"*",
"*",
"kwargs",
")",
".",
"__options__",
"(",
")",
")",
"self",
".",
"data_temp",
".",
"append",
"(",
"series_data",
")"
] | [
200,
4
] | [
222,
42
] | python | en | ['en', 'en', 'en'] | True |
Highmap.add_drilldown_data_set | (self, data, series_type, id, **kwargs) | set data for drilldown option in highmaps
id must be input and corresponding to drilldown arguments in data series
| set data for drilldown option in highmaps
id must be input and corresponding to drilldown arguments in data series
| def add_drilldown_data_set(self, data, series_type, id, **kwargs):
"""set data for drilldown option in highmaps
id must be input and corresponding to drilldown arguments in data series
"""
self.drilldown_data_set_count += 1
if self.drilldown_flag == False:
self.drilldown_flag = True
kwargs.update({'id':id})
series_data = Series(data, series_type=series_type, **kwargs)
series_data.__options__().update(SeriesOptions(series_type=series_type, **kwargs).__options__())
self.drilldown_data_temp.append(series_data) | [
"def",
"add_drilldown_data_set",
"(",
"self",
",",
"data",
",",
"series_type",
",",
"id",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"drilldown_data_set_count",
"+=",
"1",
"if",
"self",
".",
"drilldown_flag",
"==",
"False",
":",
"self",
".",
"drilldown_flag",
"=",
"True",
"kwargs",
".",
"update",
"(",
"{",
"'id'",
":",
"id",
"}",
")",
"series_data",
"=",
"Series",
"(",
"data",
",",
"series_type",
"=",
"series_type",
",",
"*",
"*",
"kwargs",
")",
"series_data",
".",
"__options__",
"(",
")",
".",
"update",
"(",
"SeriesOptions",
"(",
"series_type",
"=",
"series_type",
",",
"*",
"*",
"kwargs",
")",
".",
"__options__",
"(",
")",
")",
"self",
".",
"drilldown_data_temp",
".",
"append",
"(",
"series_data",
")"
] | [
225,
4
] | [
236,
52
] | python | en | ['en', 'en', 'en'] | True |
Highmap.add_data_from_jsonp | (self, data_src, data_name = 'json_data', series_type="map", name=None, **kwargs) | add data directly from a https source
the data_src is the https link for data using jsonp
| add data directly from a https source
the data_src is the https link for data using jsonp
| def add_data_from_jsonp(self, data_src, data_name = 'json_data', series_type="map", name=None, **kwargs):
"""add data directly from a https source
the data_src is the https link for data using jsonp
"""
self.jsonp_data_flag = True
self.jsonp_data_url = json.dumps(data_src)
if data_name == 'data':
data_name = 'json_'+ data_name
self.jsonp_data = data_name
self.add_data_set(RawJavaScriptText(data_name), series_type, name=name, **kwargs) | [
"def",
"add_data_from_jsonp",
"(",
"self",
",",
"data_src",
",",
"data_name",
"=",
"'json_data'",
",",
"series_type",
"=",
"\"map\"",
",",
"name",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"jsonp_data_flag",
"=",
"True",
"self",
".",
"jsonp_data_url",
"=",
"json",
".",
"dumps",
"(",
"data_src",
")",
"if",
"data_name",
"==",
"'data'",
":",
"data_name",
"=",
"'json_'",
"+",
"data_name",
"self",
".",
"jsonp_data",
"=",
"data_name",
"self",
".",
"add_data_set",
"(",
"RawJavaScriptText",
"(",
"data_name",
")",
",",
"series_type",
",",
"name",
"=",
"name",
",",
"*",
"*",
"kwargs",
")"
] | [
239,
4
] | [
248,
89
] | python | en | ['en', 'en', 'en'] | True |
Highmap.add_JSscript | (self, js_script, js_loc) | add (highcharts) javascript in the beginning or at the end of script
use only if necessary
| add (highcharts) javascript in the beginning or at the end of script
use only if necessary
| def add_JSscript(self, js_script, js_loc):
"""add (highcharts) javascript in the beginning or at the end of script
use only if necessary
"""
if js_loc == 'head':
self.jscript_head_flag = True
if self.jscript_head:
self.jscript_head = self.jscript_head + '\n' + js_script
else:
self.jscript_head = js_script
elif js_loc == 'end':
self.jscript_end_flag = True
if self.jscript_end:
self.jscript_end = self.jscript_end + '\n' + js_script
else:
self.jscript_end = js_script
else:
raise OptionTypeError("Not An Accepted script location: %s, either 'head' or 'end'"
% js_loc) | [
"def",
"add_JSscript",
"(",
"self",
",",
"js_script",
",",
"js_loc",
")",
":",
"if",
"js_loc",
"==",
"'head'",
":",
"self",
".",
"jscript_head_flag",
"=",
"True",
"if",
"self",
".",
"jscript_head",
":",
"self",
".",
"jscript_head",
"=",
"self",
".",
"jscript_head",
"+",
"'\\n'",
"+",
"js_script",
"else",
":",
"self",
".",
"jscript_head",
"=",
"js_script",
"elif",
"js_loc",
"==",
"'end'",
":",
"self",
".",
"jscript_end_flag",
"=",
"True",
"if",
"self",
".",
"jscript_end",
":",
"self",
".",
"jscript_end",
"=",
"self",
".",
"jscript_end",
"+",
"'\\n'",
"+",
"js_script",
"else",
":",
"self",
".",
"jscript_end",
"=",
"js_script",
"else",
":",
"raise",
"OptionTypeError",
"(",
"\"Not An Accepted script location: %s, either 'head' or 'end'\"",
"%",
"js_loc",
")"
] | [
251,
4
] | [
269,
41
] | python | en | ['en', 'en', 'en'] | True |
Highmap.set_map_source | (self, map_src, jsonp_map = False) | set map data
use if the mapData is loaded directly from a https source
the map_src is the https link for the mapData
geojson (from jsonp) or .js formates are acceptable
default is js script from highcharts' map collection: https://code.highcharts.com/mapdata/
| set map data
use if the mapData is loaded directly from a https source
the map_src is the https link for the mapData
geojson (from jsonp) or .js formates are acceptable
default is js script from highcharts' map collection: https://code.highcharts.com/mapdata/
| def set_map_source(self, map_src, jsonp_map = False):
"""set map data
use if the mapData is loaded directly from a https source
the map_src is the https link for the mapData
geojson (from jsonp) or .js formates are acceptable
default is js script from highcharts' map collection: https://code.highcharts.com/mapdata/
"""
if not map_src:
raise OptionTypeError("No map source input, please refer to: https://code.highcharts.com/mapdata/")
if jsonp_map:
self.jsonp_map_flag = True
self.map = 'geojson'
self.jsonp_map_url = json.dumps(map_src)
else:
self.add_JSsource(map_src)
map_name = self._get_jsmap_name(map_src)
self.map = 'geojson'
self.jsmap = self.map + ' = Highcharts.geojson(' + map_name + ');'
self.add_JSscript('var ' + self.jsmap, 'head')
if self.data_temp:
self.data_temp[0].__options__().update({'mapData': MapObject(self.map)}) | [
"def",
"set_map_source",
"(",
"self",
",",
"map_src",
",",
"jsonp_map",
"=",
"False",
")",
":",
"if",
"not",
"map_src",
":",
"raise",
"OptionTypeError",
"(",
"\"No map source input, please refer to: https://code.highcharts.com/mapdata/\"",
")",
"if",
"jsonp_map",
":",
"self",
".",
"jsonp_map_flag",
"=",
"True",
"self",
".",
"map",
"=",
"'geojson'",
"self",
".",
"jsonp_map_url",
"=",
"json",
".",
"dumps",
"(",
"map_src",
")",
"else",
":",
"self",
".",
"add_JSsource",
"(",
"map_src",
")",
"map_name",
"=",
"self",
".",
"_get_jsmap_name",
"(",
"map_src",
")",
"self",
".",
"map",
"=",
"'geojson'",
"self",
".",
"jsmap",
"=",
"self",
".",
"map",
"+",
"' = Highcharts.geojson('",
"+",
"map_name",
"+",
"');'",
"self",
".",
"add_JSscript",
"(",
"'var '",
"+",
"self",
".",
"jsmap",
",",
"'head'",
")",
"if",
"self",
".",
"data_temp",
":",
"self",
".",
"data_temp",
"[",
"0",
"]",
".",
"__options__",
"(",
")",
".",
"update",
"(",
"{",
"'mapData'",
":",
"MapObject",
"(",
"self",
".",
"map",
")",
"}",
")"
] | [
291,
4
] | [
314,
84
] | python | en | ['nl', 'jv', 'en'] | False |
Highmap.set_options | (self, option_type, option_dict, force_options=False) | set plot options | set plot options | def set_options(self, option_type, option_dict, force_options=False):
"""set plot options"""
if force_options: # not to use unless it is really needed
self.options[option_type].update(option_dict)
elif (option_type == 'yAxis' or option_type == 'xAxis') and isinstance(option_dict, list):
self.options[option_type] = MultiAxis(option_type)
for each_dict in option_dict:
self.options[option_type].update(**each_dict)
elif option_type == 'colors':
self.options["colors"].set_colors(option_dict) # option_dict should be a list
elif option_type == 'colorAxis':
self.options.update({'colorAxis': self.options.get('colorAxis', ColorAxisOptions())})
self.options[option_type].update_dict(**option_dict)
elif option_type in ["global" , "lang"]:
self.setOptions[option_type].update_dict(**option_dict)
else:
self.options[option_type].update_dict(**option_dict) | [
"def",
"set_options",
"(",
"self",
",",
"option_type",
",",
"option_dict",
",",
"force_options",
"=",
"False",
")",
":",
"if",
"force_options",
":",
"# not to use unless it is really needed",
"self",
".",
"options",
"[",
"option_type",
"]",
".",
"update",
"(",
"option_dict",
")",
"elif",
"(",
"option_type",
"==",
"'yAxis'",
"or",
"option_type",
"==",
"'xAxis'",
")",
"and",
"isinstance",
"(",
"option_dict",
",",
"list",
")",
":",
"self",
".",
"options",
"[",
"option_type",
"]",
"=",
"MultiAxis",
"(",
"option_type",
")",
"for",
"each_dict",
"in",
"option_dict",
":",
"self",
".",
"options",
"[",
"option_type",
"]",
".",
"update",
"(",
"*",
"*",
"each_dict",
")",
"elif",
"option_type",
"==",
"'colors'",
":",
"self",
".",
"options",
"[",
"\"colors\"",
"]",
".",
"set_colors",
"(",
"option_dict",
")",
"# option_dict should be a list",
"elif",
"option_type",
"==",
"'colorAxis'",
":",
"self",
".",
"options",
".",
"update",
"(",
"{",
"'colorAxis'",
":",
"self",
".",
"options",
".",
"get",
"(",
"'colorAxis'",
",",
"ColorAxisOptions",
"(",
")",
")",
"}",
")",
"self",
".",
"options",
"[",
"option_type",
"]",
".",
"update_dict",
"(",
"*",
"*",
"option_dict",
")",
"elif",
"option_type",
"in",
"[",
"\"global\"",
",",
"\"lang\"",
"]",
":",
"self",
".",
"setOptions",
"[",
"option_type",
"]",
".",
"update_dict",
"(",
"*",
"*",
"option_dict",
")",
"else",
":",
"self",
".",
"options",
"[",
"option_type",
"]",
".",
"update_dict",
"(",
"*",
"*",
"option_dict",
")"
] | [
316,
4
] | [
333,
64
] | python | en | ['en', 'bg', 'en'] | True |
Highmap.set_dict_options | (self, options) | for dictionary-like inputs (as object in Javascript)
options must be in python dictionary format
| for dictionary-like inputs (as object in Javascript)
options must be in python dictionary format
| def set_dict_options(self, options):
"""for dictionary-like inputs (as object in Javascript)
options must be in python dictionary format
"""
if isinstance(options, dict):
for key, option_data in options.items():
self.set_options(key, option_data)
else:
raise OptionTypeError("Not An Accepted Input Format: %s. Must be Dictionary" %type(options)) | [
"def",
"set_dict_options",
"(",
"self",
",",
"options",
")",
":",
"if",
"isinstance",
"(",
"options",
",",
"dict",
")",
":",
"for",
"key",
",",
"option_data",
"in",
"options",
".",
"items",
"(",
")",
":",
"self",
".",
"set_options",
"(",
"key",
",",
"option_data",
")",
"else",
":",
"raise",
"OptionTypeError",
"(",
"\"Not An Accepted Input Format: %s. Must be Dictionary\"",
"%",
"type",
"(",
"options",
")",
")"
] | [
335,
4
] | [
343,
104
] | python | en | ['en', 'en', 'en'] | True |
Highmap._get_jsmap_name | (self, url) | return 'name' of the map in .js format | return 'name' of the map in .js format | def _get_jsmap_name(self, url):
"""return 'name' of the map in .js format"""
ret = urlopen(url)
return ret.read().decode('utf-8').split('=')[0].replace(" ", "") | [
"def",
"_get_jsmap_name",
"(",
"self",
",",
"url",
")",
":",
"ret",
"=",
"urlopen",
"(",
"url",
")",
"return",
"ret",
".",
"read",
"(",
")",
".",
"decode",
"(",
"'utf-8'",
")",
".",
"split",
"(",
"'='",
")",
"[",
"0",
"]",
".",
"replace",
"(",
"\" \"",
",",
"\"\"",
")"
] | [
346,
4
] | [
350,
72
] | python | en | ['en', 'en', 'en'] | True |
Highmap.buildcontent | (self) | build HTML content only, no header or body tags | build HTML content only, no header or body tags | def buildcontent(self):
"""build HTML content only, no header or body tags"""
self.buildcontainer()
self.option = json.dumps(self.options, cls = HighchartsEncoder)
self.setoption = json.dumps(self.setOptions, cls = HighchartsEncoder)
self.data = json.dumps(self.data_temp, cls = HighchartsEncoder)
if self.drilldown_flag:
self.drilldown_data = json.dumps(self.drilldown_data_temp, cls = HighchartsEncoder)
self._htmlcontent = self.template_content_highcharts.render(chart=self).encode('utf-8') | [
"def",
"buildcontent",
"(",
"self",
")",
":",
"self",
".",
"buildcontainer",
"(",
")",
"self",
".",
"option",
"=",
"json",
".",
"dumps",
"(",
"self",
".",
"options",
",",
"cls",
"=",
"HighchartsEncoder",
")",
"self",
".",
"setoption",
"=",
"json",
".",
"dumps",
"(",
"self",
".",
"setOptions",
",",
"cls",
"=",
"HighchartsEncoder",
")",
"self",
".",
"data",
"=",
"json",
".",
"dumps",
"(",
"self",
".",
"data_temp",
",",
"cls",
"=",
"HighchartsEncoder",
")",
"if",
"self",
".",
"drilldown_flag",
":",
"self",
".",
"drilldown_data",
"=",
"json",
".",
"dumps",
"(",
"self",
".",
"drilldown_data_temp",
",",
"cls",
"=",
"HighchartsEncoder",
")",
"self",
".",
"_htmlcontent",
"=",
"self",
".",
"template_content_highcharts",
".",
"render",
"(",
"chart",
"=",
"self",
")",
".",
"encode",
"(",
"'utf-8'",
")"
] | [
353,
4
] | [
363,
95
] | python | en | ['en', 'en', 'en'] | True |
Highmap.buildhtml | (self) | Build the HTML page
Create the htmlheader with css / js
Create html page
| Build the HTML page
Create the htmlheader with css / js
Create html page
| def buildhtml(self):
"""Build the HTML page
Create the htmlheader with css / js
Create html page
"""
self.buildcontent()
self.buildhtmlheader()
self.content = self._htmlcontent.decode('utf-8') # need to ensure unicode
self._htmlcontent = self.template_page_highcharts.render(chart=self)
return self._htmlcontent | [
"def",
"buildhtml",
"(",
"self",
")",
":",
"self",
".",
"buildcontent",
"(",
")",
"self",
".",
"buildhtmlheader",
"(",
")",
"self",
".",
"content",
"=",
"self",
".",
"_htmlcontent",
".",
"decode",
"(",
"'utf-8'",
")",
"# need to ensure unicode",
"self",
".",
"_htmlcontent",
"=",
"self",
".",
"template_page_highcharts",
".",
"render",
"(",
"chart",
"=",
"self",
")",
"return",
"self",
".",
"_htmlcontent"
] | [
366,
4
] | [
375,
32
] | python | en | ['en', 'en', 'en'] | True |
Highmap.buildhtmlheader | (self) | generate HTML header content | generate HTML header content | def buildhtmlheader(self):
"""generate HTML header content"""
#Highcharts lib/ needs to make sure it's up to date
if self.drilldown_flag:
self.add_JSsource('https://code.highcharts.com/maps/modules/drilldown.js')
self.header_css = [
'<link href="%s" rel="stylesheet" />' % h for h in self.CSSsource
]
self.header_js = [
'<script type="text/javascript" src="%s"></script>' % h for h in self.JSsource
]
self.htmlheader = ''
for css in self.header_css:
self.htmlheader += css
for js in self.header_js:
self.htmlheader += js | [
"def",
"buildhtmlheader",
"(",
"self",
")",
":",
"#Highcharts lib/ needs to make sure it's up to date",
"if",
"self",
".",
"drilldown_flag",
":",
"self",
".",
"add_JSsource",
"(",
"'https://code.highcharts.com/maps/modules/drilldown.js'",
")",
"self",
".",
"header_css",
"=",
"[",
"'<link href=\"%s\" rel=\"stylesheet\" />'",
"%",
"h",
"for",
"h",
"in",
"self",
".",
"CSSsource",
"]",
"self",
".",
"header_js",
"=",
"[",
"'<script type=\"text/javascript\" src=\"%s\"></script>'",
"%",
"h",
"for",
"h",
"in",
"self",
".",
"JSsource",
"]",
"self",
".",
"htmlheader",
"=",
"''",
"for",
"css",
"in",
"self",
".",
"header_css",
":",
"self",
".",
"htmlheader",
"+=",
"css",
"for",
"js",
"in",
"self",
".",
"header_js",
":",
"self",
".",
"htmlheader",
"+=",
"js"
] | [
377,
4
] | [
396,
33
] | python | en | ['en', 'en', 'en'] | True |
Highmap.buildcontainer | (self) | generate HTML div | generate HTML div | def buildcontainer(self):
"""generate HTML div"""
if self.container:
return
# Create HTML div with style
if self.options['chart'].width:
if str(self.options['chart'].width)[-1] != '%':
self.div_style += 'width:%spx;' % self.options['chart'].width
else:
self.div_style += 'width:%s;' % self.options['chart'].width
if self.options['chart'].height:
if str(self.options['chart'].height)[-1] != '%':
self.div_style += 'height:%spx;' % self.options['chart'].height
else:
self.div_style += 'height:%s;' % self.options['chart'].height
self.div_name = self.options['chart'].__dict__['renderTo'] # recheck div name
self.container = self.containerheader + \
'<div id="%s" style="%s">%s</div>\n' % (self.div_name, self.div_style, self.loading) | [
"def",
"buildcontainer",
"(",
"self",
")",
":",
"if",
"self",
".",
"container",
":",
"return",
"# Create HTML div with style",
"if",
"self",
".",
"options",
"[",
"'chart'",
"]",
".",
"width",
":",
"if",
"str",
"(",
"self",
".",
"options",
"[",
"'chart'",
"]",
".",
"width",
")",
"[",
"-",
"1",
"]",
"!=",
"'%'",
":",
"self",
".",
"div_style",
"+=",
"'width:%spx;'",
"%",
"self",
".",
"options",
"[",
"'chart'",
"]",
".",
"width",
"else",
":",
"self",
".",
"div_style",
"+=",
"'width:%s;'",
"%",
"self",
".",
"options",
"[",
"'chart'",
"]",
".",
"width",
"if",
"self",
".",
"options",
"[",
"'chart'",
"]",
".",
"height",
":",
"if",
"str",
"(",
"self",
".",
"options",
"[",
"'chart'",
"]",
".",
"height",
")",
"[",
"-",
"1",
"]",
"!=",
"'%'",
":",
"self",
".",
"div_style",
"+=",
"'height:%spx;'",
"%",
"self",
".",
"options",
"[",
"'chart'",
"]",
".",
"height",
"else",
":",
"self",
".",
"div_style",
"+=",
"'height:%s;'",
"%",
"self",
".",
"options",
"[",
"'chart'",
"]",
".",
"height",
"self",
".",
"div_name",
"=",
"self",
".",
"options",
"[",
"'chart'",
"]",
".",
"__dict__",
"[",
"'renderTo'",
"]",
"# recheck div name",
"self",
".",
"container",
"=",
"self",
".",
"containerheader",
"+",
"'<div id=\"%s\" style=\"%s\">%s</div>\\n'",
"%",
"(",
"self",
".",
"div_name",
",",
"self",
".",
"div_style",
",",
"self",
".",
"loading",
")"
] | [
399,
4
] | [
417,
96
] | python | en | ['en', 'en', 'en'] | True |
Highmap.__str__ | (self) | return htmlcontent | return htmlcontent | def __str__(self):
"""return htmlcontent"""
#self.buildhtml()
return self.htmlcontent | [
"def",
"__str__",
"(",
"self",
")",
":",
"#self.buildhtml()",
"return",
"self",
".",
"htmlcontent"
] | [
440,
4
] | [
443,
31
] | python | en | ['en', 'no', 'en'] | False |
Highmap.save_file | (self, filename = 'Map') | save htmlcontent as .html file | save htmlcontent as .html file | def save_file(self, filename = 'Map'):
""" save htmlcontent as .html file """
filename = filename + '.html'
with open(filename, 'w') as f:
#self.buildhtml()
f.write(self.htmlcontent)
f.closed | [
"def",
"save_file",
"(",
"self",
",",
"filename",
"=",
"'Map'",
")",
":",
"filename",
"=",
"filename",
"+",
"'.html'",
"with",
"open",
"(",
"filename",
",",
"'w'",
")",
"as",
"f",
":",
"#self.buildhtml()",
"f",
".",
"write",
"(",
"self",
".",
"htmlcontent",
")",
"f",
".",
"closed"
] | [
445,
4
] | [
453,
16
] | python | en | ['en', 'en', 'en'] | True |
get_asgi_application | () |
The public interface to Django's ASGI support. Return an ASGI 3 callable.
Avoids making django.core.handlers.ASGIHandler a public API, in case the
internal implementation changes or moves in the future.
|
The public interface to Django's ASGI support. Return an ASGI 3 callable. | def get_asgi_application():
"""
The public interface to Django's ASGI support. Return an ASGI 3 callable.
Avoids making django.core.handlers.ASGIHandler a public API, in case the
internal implementation changes or moves in the future.
"""
django.setup(set_prefix=False)
return ASGIHandler() | [
"def",
"get_asgi_application",
"(",
")",
":",
"django",
".",
"setup",
"(",
"set_prefix",
"=",
"False",
")",
"return",
"ASGIHandler",
"(",
")"
] | [
4,
0
] | [
12,
24
] | python | en | ['en', 'error', 'th'] | False |
AsyncToSync._run_event_loop | (self, loop, coro) |
Runs the given event loop (designed to be called in a thread).
|
Runs the given event loop (designed to be called in a thread).
| def _run_event_loop(self, loop, coro):
"""
Runs the given event loop (designed to be called in a thread).
"""
asyncio.set_event_loop(loop)
try:
loop.run_until_complete(coro)
finally:
try:
# mimic asyncio.run() behavior
# cancel unexhausted async generators
if sys.version_info >= (3, 7, 0):
tasks = asyncio.all_tasks(loop)
else:
tasks = asyncio.Task.all_tasks(loop)
for task in tasks:
task.cancel()
async def gather():
await asyncio.gather(*tasks, return_exceptions=True)
loop.run_until_complete(gather())
for task in tasks:
if task.cancelled():
continue
if task.exception() is not None:
loop.call_exception_handler(
{
"message": "unhandled exception during loop shutdown",
"exception": task.exception(),
"task": task,
}
)
if hasattr(loop, "shutdown_asyncgens"):
loop.run_until_complete(loop.shutdown_asyncgens())
finally:
loop.close()
asyncio.set_event_loop(self.main_event_loop) | [
"def",
"_run_event_loop",
"(",
"self",
",",
"loop",
",",
"coro",
")",
":",
"asyncio",
".",
"set_event_loop",
"(",
"loop",
")",
"try",
":",
"loop",
".",
"run_until_complete",
"(",
"coro",
")",
"finally",
":",
"try",
":",
"# mimic asyncio.run() behavior",
"# cancel unexhausted async generators",
"if",
"sys",
".",
"version_info",
">=",
"(",
"3",
",",
"7",
",",
"0",
")",
":",
"tasks",
"=",
"asyncio",
".",
"all_tasks",
"(",
"loop",
")",
"else",
":",
"tasks",
"=",
"asyncio",
".",
"Task",
".",
"all_tasks",
"(",
"loop",
")",
"for",
"task",
"in",
"tasks",
":",
"task",
".",
"cancel",
"(",
")",
"async",
"def",
"gather",
"(",
")",
":",
"await",
"asyncio",
".",
"gather",
"(",
"*",
"tasks",
",",
"return_exceptions",
"=",
"True",
")",
"loop",
".",
"run_until_complete",
"(",
"gather",
"(",
")",
")",
"for",
"task",
"in",
"tasks",
":",
"if",
"task",
".",
"cancelled",
"(",
")",
":",
"continue",
"if",
"task",
".",
"exception",
"(",
")",
"is",
"not",
"None",
":",
"loop",
".",
"call_exception_handler",
"(",
"{",
"\"message\"",
":",
"\"unhandled exception during loop shutdown\"",
",",
"\"exception\"",
":",
"task",
".",
"exception",
"(",
")",
",",
"\"task\"",
":",
"task",
",",
"}",
")",
"if",
"hasattr",
"(",
"loop",
",",
"\"shutdown_asyncgens\"",
")",
":",
"loop",
".",
"run_until_complete",
"(",
"loop",
".",
"shutdown_asyncgens",
"(",
")",
")",
"finally",
":",
"loop",
".",
"close",
"(",
")",
"asyncio",
".",
"set_event_loop",
"(",
"self",
".",
"main_event_loop",
")"
] | [
224,
4
] | [
261,
60
] | python | en | ['en', 'error', 'th'] | False |
AsyncToSync.__get__ | (self, parent, objtype) |
Include self for methods
|
Include self for methods
| def __get__(self, parent, objtype):
"""
Include self for methods
"""
func = functools.partial(self.__call__, parent)
return functools.update_wrapper(func, self.awaitable) | [
"def",
"__get__",
"(",
"self",
",",
"parent",
",",
"objtype",
")",
":",
"func",
"=",
"functools",
".",
"partial",
"(",
"self",
".",
"__call__",
",",
"parent",
")",
"return",
"functools",
".",
"update_wrapper",
"(",
"func",
",",
"self",
".",
"awaitable",
")"
] | [
263,
4
] | [
268,
61
] | python | en | ['en', 'error', 'th'] | False |
AsyncToSync.main_wrap | (
self, args, kwargs, call_result, source_thread, exc_info, context
) |
Wraps the awaitable with something that puts the result into the
result/exception future.
|
Wraps the awaitable with something that puts the result into the
result/exception future.
| async def main_wrap(
self, args, kwargs, call_result, source_thread, exc_info, context
):
"""
Wraps the awaitable with something that puts the result into the
result/exception future.
"""
if context is not None:
_restore_context(context[0])
current_task = SyncToAsync.get_current_task()
self.launch_map[current_task] = source_thread
try:
# If we have an exception, run the function inside the except block
# after raising it so exc_info is correctly populated.
if exc_info[1]:
try:
raise exc_info[1]
except BaseException:
result = await self.awaitable(*args, **kwargs)
else:
result = await self.awaitable(*args, **kwargs)
except BaseException as e:
call_result.set_exception(e)
else:
call_result.set_result(result)
finally:
del self.launch_map[current_task]
if context is not None:
context[0] = contextvars.copy_context() | [
"async",
"def",
"main_wrap",
"(",
"self",
",",
"args",
",",
"kwargs",
",",
"call_result",
",",
"source_thread",
",",
"exc_info",
",",
"context",
")",
":",
"if",
"context",
"is",
"not",
"None",
":",
"_restore_context",
"(",
"context",
"[",
"0",
"]",
")",
"current_task",
"=",
"SyncToAsync",
".",
"get_current_task",
"(",
")",
"self",
".",
"launch_map",
"[",
"current_task",
"]",
"=",
"source_thread",
"try",
":",
"# If we have an exception, run the function inside the except block",
"# after raising it so exc_info is correctly populated.",
"if",
"exc_info",
"[",
"1",
"]",
":",
"try",
":",
"raise",
"exc_info",
"[",
"1",
"]",
"except",
"BaseException",
":",
"result",
"=",
"await",
"self",
".",
"awaitable",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"result",
"=",
"await",
"self",
".",
"awaitable",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"except",
"BaseException",
"as",
"e",
":",
"call_result",
".",
"set_exception",
"(",
"e",
")",
"else",
":",
"call_result",
".",
"set_result",
"(",
"result",
")",
"finally",
":",
"del",
"self",
".",
"launch_map",
"[",
"current_task",
"]",
"if",
"context",
"is",
"not",
"None",
":",
"context",
"[",
"0",
"]",
"=",
"contextvars",
".",
"copy_context",
"(",
")"
] | [
270,
4
] | [
300,
55
] | python | en | ['en', 'error', 'th'] | False |
SyncToAsync.__get__ | (self, parent, objtype) |
Include self for methods
|
Include self for methods
| def __get__(self, parent, objtype):
"""
Include self for methods
"""
return functools.partial(self.__call__, parent) | [
"def",
"__get__",
"(",
"self",
",",
"parent",
",",
"objtype",
")",
":",
"return",
"functools",
".",
"partial",
"(",
"self",
".",
"__call__",
",",
"parent",
")"
] | [
453,
4
] | [
457,
55
] | python | en | ['en', 'error', 'th'] | False |
SyncToAsync.thread_handler | (self, loop, source_task, exc_info, func, *args, **kwargs) |
Wraps the sync application with exception handling.
|
Wraps the sync application with exception handling.
| def thread_handler(self, loop, source_task, exc_info, func, *args, **kwargs):
"""
Wraps the sync application with exception handling.
"""
# Set the threadlocal for AsyncToSync
self.threadlocal.main_event_loop = loop
self.threadlocal.main_event_loop_pid = os.getpid()
# Set the task mapping (used for the locals module)
current_thread = threading.current_thread()
if AsyncToSync.launch_map.get(source_task) == current_thread:
# Our parent task was launched from this same thread, so don't make
# a launch map entry - let it shortcut over us! (and stop infinite loops)
parent_set = False
else:
self.launch_map[current_thread] = source_task
parent_set = True
# Run the function
try:
# If we have an exception, run the function inside the except block
# after raising it so exc_info is correctly populated.
if exc_info[1]:
try:
raise exc_info[1]
except BaseException:
return func(*args, **kwargs)
else:
return func(*args, **kwargs)
finally:
# Only delete the launch_map parent if we set it, otherwise it is
# from someone else.
if parent_set:
del self.launch_map[current_thread] | [
"def",
"thread_handler",
"(",
"self",
",",
"loop",
",",
"source_task",
",",
"exc_info",
",",
"func",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# Set the threadlocal for AsyncToSync",
"self",
".",
"threadlocal",
".",
"main_event_loop",
"=",
"loop",
"self",
".",
"threadlocal",
".",
"main_event_loop_pid",
"=",
"os",
".",
"getpid",
"(",
")",
"# Set the task mapping (used for the locals module)",
"current_thread",
"=",
"threading",
".",
"current_thread",
"(",
")",
"if",
"AsyncToSync",
".",
"launch_map",
".",
"get",
"(",
"source_task",
")",
"==",
"current_thread",
":",
"# Our parent task was launched from this same thread, so don't make",
"# a launch map entry - let it shortcut over us! (and stop infinite loops)",
"parent_set",
"=",
"False",
"else",
":",
"self",
".",
"launch_map",
"[",
"current_thread",
"]",
"=",
"source_task",
"parent_set",
"=",
"True",
"# Run the function",
"try",
":",
"# If we have an exception, run the function inside the except block",
"# after raising it so exc_info is correctly populated.",
"if",
"exc_info",
"[",
"1",
"]",
":",
"try",
":",
"raise",
"exc_info",
"[",
"1",
"]",
"except",
"BaseException",
":",
"return",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"return",
"func",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"finally",
":",
"# Only delete the launch_map parent if we set it, otherwise it is",
"# from someone else.",
"if",
"parent_set",
":",
"del",
"self",
".",
"launch_map",
"[",
"current_thread",
"]"
] | [
459,
4
] | [
490,
51
] | python | en | ['en', 'error', 'th'] | False |
SyncToAsync.get_current_task | () |
Cross-version implementation of asyncio.current_task()
Returns None if there is no task.
|
Cross-version implementation of asyncio.current_task() | def get_current_task():
"""
Cross-version implementation of asyncio.current_task()
Returns None if there is no task.
"""
try:
if hasattr(asyncio, "current_task"):
# Python 3.7 and up
return asyncio.current_task()
else:
# Python 3.6
return asyncio.Task.current_task()
except RuntimeError:
return None | [
"def",
"get_current_task",
"(",
")",
":",
"try",
":",
"if",
"hasattr",
"(",
"asyncio",
",",
"\"current_task\"",
")",
":",
"# Python 3.7 and up",
"return",
"asyncio",
".",
"current_task",
"(",
")",
"else",
":",
"# Python 3.6",
"return",
"asyncio",
".",
"Task",
".",
"current_task",
"(",
")",
"except",
"RuntimeError",
":",
"return",
"None"
] | [
493,
4
] | [
507,
23
] | python | en | ['en', 'error', 'th'] | False |
ugettext_noop | (message) |
A legacy compatibility wrapper for Unicode handling on Python 2.
Alias of gettext_noop() since Django 2.0.
|
A legacy compatibility wrapper for Unicode handling on Python 2.
Alias of gettext_noop() since Django 2.0.
| def ugettext_noop(message):
"""
A legacy compatibility wrapper for Unicode handling on Python 2.
Alias of gettext_noop() since Django 2.0.
"""
warnings.warn(
'django.utils.translation.ugettext_noop() is deprecated in favor of '
'django.utils.translation.gettext_noop().',
RemovedInDjango40Warning, stacklevel=2,
)
return gettext_noop(message) | [
"def",
"ugettext_noop",
"(",
"message",
")",
":",
"warnings",
".",
"warn",
"(",
"'django.utils.translation.ugettext_noop() is deprecated in favor of '",
"'django.utils.translation.gettext_noop().'",
",",
"RemovedInDjango40Warning",
",",
"stacklevel",
"=",
"2",
",",
")",
"return",
"gettext_noop",
"(",
"message",
")"
] | [
79,
0
] | [
89,
32
] | python | en | ['en', 'error', 'th'] | False |
ugettext | (message) |
A legacy compatibility wrapper for Unicode handling on Python 2.
Alias of gettext() since Django 2.0.
|
A legacy compatibility wrapper for Unicode handling on Python 2.
Alias of gettext() since Django 2.0.
| def ugettext(message):
"""
A legacy compatibility wrapper for Unicode handling on Python 2.
Alias of gettext() since Django 2.0.
"""
warnings.warn(
'django.utils.translation.ugettext() is deprecated in favor of '
'django.utils.translation.gettext().',
RemovedInDjango40Warning, stacklevel=2,
)
return gettext(message) | [
"def",
"ugettext",
"(",
"message",
")",
":",
"warnings",
".",
"warn",
"(",
"'django.utils.translation.ugettext() is deprecated in favor of '",
"'django.utils.translation.gettext().'",
",",
"RemovedInDjango40Warning",
",",
"stacklevel",
"=",
"2",
",",
")",
"return",
"gettext",
"(",
"message",
")"
] | [
96,
0
] | [
106,
27
] | python | en | ['en', 'error', 'th'] | False |
ungettext | (singular, plural, number) |
A legacy compatibility wrapper for Unicode handling on Python 2.
Alias of ngettext() since Django 2.0.
|
A legacy compatibility wrapper for Unicode handling on Python 2.
Alias of ngettext() since Django 2.0.
| def ungettext(singular, plural, number):
"""
A legacy compatibility wrapper for Unicode handling on Python 2.
Alias of ngettext() since Django 2.0.
"""
warnings.warn(
'django.utils.translation.ungettext() is deprecated in favor of '
'django.utils.translation.ngettext().',
RemovedInDjango40Warning, stacklevel=2,
)
return ngettext(singular, plural, number) | [
"def",
"ungettext",
"(",
"singular",
",",
"plural",
",",
"number",
")",
":",
"warnings",
".",
"warn",
"(",
"'django.utils.translation.ungettext() is deprecated in favor of '",
"'django.utils.translation.ngettext().'",
",",
"RemovedInDjango40Warning",
",",
"stacklevel",
"=",
"2",
",",
")",
"return",
"ngettext",
"(",
"singular",
",",
"plural",
",",
"number",
")"
] | [
113,
0
] | [
123,
45
] | python | en | ['en', 'error', 'th'] | False |
ugettext_lazy | (message) |
A legacy compatibility wrapper for Unicode handling on Python 2. Has been
Alias of gettext_lazy since Django 2.0.
|
A legacy compatibility wrapper for Unicode handling on Python 2. Has been
Alias of gettext_lazy since Django 2.0.
| def ugettext_lazy(message):
"""
A legacy compatibility wrapper for Unicode handling on Python 2. Has been
Alias of gettext_lazy since Django 2.0.
"""
warnings.warn(
'django.utils.translation.ugettext_lazy() is deprecated in favor of '
'django.utils.translation.gettext_lazy().',
RemovedInDjango40Warning, stacklevel=2,
)
return gettext_lazy(message) | [
"def",
"ugettext_lazy",
"(",
"message",
")",
":",
"warnings",
".",
"warn",
"(",
"'django.utils.translation.ugettext_lazy() is deprecated in favor of '",
"'django.utils.translation.gettext_lazy().'",
",",
"RemovedInDjango40Warning",
",",
"stacklevel",
"=",
"2",
",",
")",
"return",
"gettext_lazy",
"(",
"message",
")"
] | [
138,
0
] | [
148,
32
] | python | en | ['en', 'error', 'th'] | False |
ungettext_lazy | (singular, plural, number=None) |
A legacy compatibility wrapper for Unicode handling on Python 2.
An alias of ungettext_lazy() since Django 2.0.
|
A legacy compatibility wrapper for Unicode handling on Python 2.
An alias of ungettext_lazy() since Django 2.0.
| def ungettext_lazy(singular, plural, number=None):
"""
A legacy compatibility wrapper for Unicode handling on Python 2.
An alias of ungettext_lazy() since Django 2.0.
"""
warnings.warn(
'django.utils.translation.ungettext_lazy() is deprecated in favor of '
'django.utils.translation.ngettext_lazy().',
RemovedInDjango40Warning, stacklevel=2,
)
return ngettext_lazy(singular, plural, number) | [
"def",
"ungettext_lazy",
"(",
"singular",
",",
"plural",
",",
"number",
"=",
"None",
")",
":",
"warnings",
".",
"warn",
"(",
"'django.utils.translation.ungettext_lazy() is deprecated in favor of '",
"'django.utils.translation.ngettext_lazy().'",
",",
"RemovedInDjango40Warning",
",",
"stacklevel",
"=",
"2",
",",
")",
"return",
"ngettext_lazy",
"(",
"singular",
",",
"plural",
",",
"number",
")"
] | [
206,
0
] | [
216,
50
] | python | en | ['en', 'error', 'th'] | False |
to_language | (locale) | Turn a locale name (en_US) into a language name (en-us). | Turn a locale name (en_US) into a language name (en-us). | def to_language(locale):
"""Turn a locale name (en_US) into a language name (en-us)."""
p = locale.find('_')
if p >= 0:
return locale[:p].lower() + '-' + locale[p + 1:].lower()
else:
return locale.lower() | [
"def",
"to_language",
"(",
"locale",
")",
":",
"p",
"=",
"locale",
".",
"find",
"(",
"'_'",
")",
"if",
"p",
">=",
"0",
":",
"return",
"locale",
"[",
":",
"p",
"]",
".",
"lower",
"(",
")",
"+",
"'-'",
"+",
"locale",
"[",
"p",
"+",
"1",
":",
"]",
".",
"lower",
"(",
")",
"else",
":",
"return",
"locale",
".",
"lower",
"(",
")"
] | [
264,
0
] | [
270,
29
] | python | en | ['es', 'en', 'en'] | True |
to_locale | (language) | Turn a language name (en-us) into a locale name (en_US). | Turn a language name (en-us) into a locale name (en_US). | def to_locale(language):
"""Turn a language name (en-us) into a locale name (en_US)."""
language, _, country = language.lower().partition('-')
if not country:
return language
# A language with > 2 characters after the dash only has its first
# character after the dash capitalized; e.g. sr-latn becomes sr_Latn.
# A language with 2 characters after the dash has both characters
# capitalized; e.g. en-us becomes en_US.
country, _, tail = country.partition('-')
country = country.title() if len(country) > 2 else country.upper()
if tail:
country += '-' + tail
return language + '_' + country | [
"def",
"to_locale",
"(",
"language",
")",
":",
"language",
",",
"_",
",",
"country",
"=",
"language",
".",
"lower",
"(",
")",
".",
"partition",
"(",
"'-'",
")",
"if",
"not",
"country",
":",
"return",
"language",
"# A language with > 2 characters after the dash only has its first",
"# character after the dash capitalized; e.g. sr-latn becomes sr_Latn.",
"# A language with 2 characters after the dash has both characters",
"# capitalized; e.g. en-us becomes en_US.",
"country",
",",
"_",
",",
"tail",
"=",
"country",
".",
"partition",
"(",
"'-'",
")",
"country",
"=",
"country",
".",
"title",
"(",
")",
"if",
"len",
"(",
"country",
")",
">",
"2",
"else",
"country",
".",
"upper",
"(",
")",
"if",
"tail",
":",
"country",
"+=",
"'-'",
"+",
"tail",
"return",
"language",
"+",
"'_'",
"+",
"country"
] | [
273,
0
] | [
286,
35
] | python | en | ['es', 'en', 'en'] | True |
recommendation | () | Given a user id, return a list of recommended item ids. | Given a user id, return a list of recommended item ids. | def recommendation():
"""Given a user id, return a list of recommended item ids."""
user_id = request.args.get('userId')
num_recs = request.args.get('numRecs')
# validate args
if user_id is None:
return 'No User Id provided.', 400
if num_recs is None:
num_recs = DEFAULT_RECS
try:
uid_int = int(user_id)
nrecs_int = int(num_recs)
except:
return 'User id and number of recs arguments must be integers.', 400
# get recommended articles
rec_list = rec_util.get_recommendations(uid_int, nrecs_int)
if rec_list is None:
return 'User Id not found : %s' % user_id, 400
json_response = jsonify({'articles': [str(i) for i in rec_list]})
return json_response, 200 | [
"def",
"recommendation",
"(",
")",
":",
"user_id",
"=",
"request",
".",
"args",
".",
"get",
"(",
"'userId'",
")",
"num_recs",
"=",
"request",
".",
"args",
".",
"get",
"(",
"'numRecs'",
")",
"# validate args",
"if",
"user_id",
"is",
"None",
":",
"return",
"'No User Id provided.'",
",",
"400",
"if",
"num_recs",
"is",
"None",
":",
"num_recs",
"=",
"DEFAULT_RECS",
"try",
":",
"uid_int",
"=",
"int",
"(",
"user_id",
")",
"nrecs_int",
"=",
"int",
"(",
"num_recs",
")",
"except",
":",
"return",
"'User id and number of recs arguments must be integers.'",
",",
"400",
"# get recommended articles",
"rec_list",
"=",
"rec_util",
".",
"get_recommendations",
"(",
"uid_int",
",",
"nrecs_int",
")",
"if",
"rec_list",
"is",
"None",
":",
"return",
"'User Id not found : %s'",
"%",
"user_id",
",",
"400",
"json_response",
"=",
"jsonify",
"(",
"{",
"'articles'",
":",
"[",
"str",
"(",
"i",
")",
"for",
"i",
"in",
"rec_list",
"]",
"}",
")",
"return",
"json_response",
",",
"200"
] | [
28,
0
] | [
51,
27
] | python | en | ['en', 'en', 'en'] | True |
CSSFeatures.fit | (self, blocks, y=None) |
This method returns the current instance unchanged, since no fitting is
required for this ``Feature``. It's here only for API consistency.
|
This method returns the current instance unchanged, since no fitting is
required for this ``Feature``. It's here only for API consistency.
| def fit(self, blocks, y=None):
"""
This method returns the current instance unchanged, since no fitting is
required for this ``Feature``. It's here only for API consistency.
"""
return self | [
"def",
"fit",
"(",
"self",
",",
"blocks",
",",
"y",
"=",
"None",
")",
":",
"return",
"self"
] | [
30,
4
] | [
35,
19
] | python | en | ['en', 'error', 'th'] | False |
CSSFeatures.transform | (self, blocks, y=None) |
Transform an ordered sequence of blocks into a 2D features matrix with
shape (num blocks, num features).
Args:
blocks (List[Block]): as output by :class:`Blockifier.blockify`
y (None): This isn't used, it's only here for API consistency.
Returns:
`np.ndarray`: 2D array of shape (num blocks, num CSS attributes),
where values are either 0 or 1, indicating the absence or
presence of a given token in a CSS attribute on a given block.
|
Transform an ordered sequence of blocks into a 2D features matrix with
shape (num blocks, num features). | def transform(self, blocks, y=None):
"""
Transform an ordered sequence of blocks into a 2D features matrix with
shape (num blocks, num features).
Args:
blocks (List[Block]): as output by :class:`Blockifier.blockify`
y (None): This isn't used, it's only here for API consistency.
Returns:
`np.ndarray`: 2D array of shape (num blocks, num CSS attributes),
where values are either 0 or 1, indicating the absence or
presence of a given token in a CSS attribute on a given block.
"""
feature_vecs = (
tuple(re.search(token, block.css[attrib]) is not None
for block in blocks)
for attrib, tokens in self.attribute_tokens
for token in tokens
)
return np.column_stack(tuple(feature_vecs)).astype(int) | [
"def",
"transform",
"(",
"self",
",",
"blocks",
",",
"y",
"=",
"None",
")",
":",
"feature_vecs",
"=",
"(",
"tuple",
"(",
"re",
".",
"search",
"(",
"token",
",",
"block",
".",
"css",
"[",
"attrib",
"]",
")",
"is",
"not",
"None",
"for",
"block",
"in",
"blocks",
")",
"for",
"attrib",
",",
"tokens",
"in",
"self",
".",
"attribute_tokens",
"for",
"token",
"in",
"tokens",
")",
"return",
"np",
".",
"column_stack",
"(",
"tuple",
"(",
"feature_vecs",
")",
")",
".",
"astype",
"(",
"int",
")"
] | [
37,
4
] | [
57,
63
] | python | en | ['en', 'error', 'th'] | False |
Dataset.scheduler | (self, epoch: int) |
Epoch lr rate scheduler for your dataset
|
Epoch lr rate scheduler for your dataset
| def scheduler(self, epoch: int) -> float:
"""
Epoch lr rate scheduler for your dataset
"""
raise NotImplementedError | [
"def",
"scheduler",
"(",
"self",
",",
"epoch",
":",
"int",
")",
"->",
"float",
":",
"raise",
"NotImplementedError"
] | [
14,
4
] | [
18,
33
] | python | en | ['en', 'error', 'th'] | False |
Dataset.classes | (self) |
:return: number of classes in this dataset
|
:return: number of classes in this dataset
| def classes(self) -> int:
"""
:return: number of classes in this dataset
"""
return self._classes | [
"def",
"classes",
"(",
"self",
")",
"->",
"int",
":",
"return",
"self",
".",
"_classes"
] | [
21,
4
] | [
25,
28
] | python | en | ['en', 'error', 'th'] | False |
Dataset.load_train_datasets | (self) |
:return: training dataset(tf.data.Dataset), size
|
:return: training dataset(tf.data.Dataset), size
| def load_train_datasets(self) -> (tf.data.Dataset, float):
"""
:return: training dataset(tf.data.Dataset), size
"""
pass | [
"def",
"load_train_datasets",
"(",
"self",
")",
"->",
"(",
"tf",
".",
"data",
".",
"Dataset",
",",
"float",
")",
":",
"pass"
] | [
28,
4
] | [
32,
12
] | python | en | ['en', 'error', 'th'] | False |
Dataset.load_validation_datasets | (self) |
:return: validation dataset (tf.data.Dataset), size
|
:return: validation dataset (tf.data.Dataset), size
| def load_validation_datasets(self) -> (tf.data.Dataset, float):
"""
:return: validation dataset (tf.data.Dataset), size
"""
pass | [
"def",
"load_validation_datasets",
"(",
"self",
")",
"->",
"(",
"tf",
".",
"data",
".",
"Dataset",
",",
"float",
")",
":",
"pass"
] | [
35,
4
] | [
39,
12
] | python | en | ['en', 'error', 'th'] | False |
Dataset.decode | (self, *args, **kwargs) |
:return: (image, labels, bboxes) where
|
:return: (image, labels, bboxes) where
| def decode(self, *args, **kwargs):
"""
:return: (image, labels, bboxes) where
"""
pass | [
"def",
"decode",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"pass"
] | [
42,
4
] | [
46,
12
] | python | en | ['en', 'error', 'th'] | False |
test_content_and_content_comments_extractor_blocks | (html) |
The content and content/comments extractor should return proper blocks
|
The content and content/comments extractor should return proper blocks
| def test_content_and_content_comments_extractor_blocks(html):
"""
The content and content/comments extractor should return proper blocks
"""
content = extract_content(html, as_blocks=True)
content_comments = extract_comments(html, as_blocks=True)
passed_content = False
passed_content_comments = False
for i in range(5):
# actual_content, actual_content_comments = \
# content_and_content_comments_extractor.analyze(
# html, blocks=True)
actual_content = extract_content(html, as_blocks=True)
actual_content_comments = extract_comments(html, as_blocks=True)
passed_content = (
[blk.text for blk in actual_content] ==
[blk.text for blk in content]
)
passed_content_comments = (
[blk.text for blk in actual_content_comments] ==
[blk.text for blk in content_comments]
)
if passed_content and passed_content_comments:
break
assert passed_content
assert passed_content_comments | [
"def",
"test_content_and_content_comments_extractor_blocks",
"(",
"html",
")",
":",
"content",
"=",
"extract_content",
"(",
"html",
",",
"as_blocks",
"=",
"True",
")",
"content_comments",
"=",
"extract_comments",
"(",
"html",
",",
"as_blocks",
"=",
"True",
")",
"passed_content",
"=",
"False",
"passed_content_comments",
"=",
"False",
"for",
"i",
"in",
"range",
"(",
"5",
")",
":",
"# actual_content, actual_content_comments = \\",
"# content_and_content_comments_extractor.analyze(",
"# html, blocks=True)",
"actual_content",
"=",
"extract_content",
"(",
"html",
",",
"as_blocks",
"=",
"True",
")",
"actual_content_comments",
"=",
"extract_comments",
"(",
"html",
",",
"as_blocks",
"=",
"True",
")",
"passed_content",
"=",
"(",
"[",
"blk",
".",
"text",
"for",
"blk",
"in",
"actual_content",
"]",
"==",
"[",
"blk",
".",
"text",
"for",
"blk",
"in",
"content",
"]",
")",
"passed_content_comments",
"=",
"(",
"[",
"blk",
".",
"text",
"for",
"blk",
"in",
"actual_content_comments",
"]",
"==",
"[",
"blk",
".",
"text",
"for",
"blk",
"in",
"content_comments",
"]",
")",
"if",
"passed_content",
"and",
"passed_content_comments",
":",
"break",
"assert",
"passed_content",
"assert",
"passed_content_comments"
] | [
59,
0
] | [
86,
34
] | python | en | ['en', 'error', 'th'] | False |
SSLTransport._validate_ssl_context_for_tls_in_tls | (ssl_context) |
Raises a ProxySchemeUnsupported if the provided ssl_context can't be used
for TLS in TLS.
The only requirement is that the ssl_context provides the 'wrap_bio'
methods.
|
Raises a ProxySchemeUnsupported if the provided ssl_context can't be used
for TLS in TLS. | def _validate_ssl_context_for_tls_in_tls(ssl_context):
"""
Raises a ProxySchemeUnsupported if the provided ssl_context can't be used
for TLS in TLS.
The only requirement is that the ssl_context provides the 'wrap_bio'
methods.
"""
if not hasattr(ssl_context, "wrap_bio"):
if six.PY2:
raise ProxySchemeUnsupported(
"TLS in TLS requires SSLContext.wrap_bio() which isn't "
"supported on Python 2"
)
else:
raise ProxySchemeUnsupported(
"TLS in TLS requires SSLContext.wrap_bio() which isn't "
"available on non-native SSLContext"
) | [
"def",
"_validate_ssl_context_for_tls_in_tls",
"(",
"ssl_context",
")",
":",
"if",
"not",
"hasattr",
"(",
"ssl_context",
",",
"\"wrap_bio\"",
")",
":",
"if",
"six",
".",
"PY2",
":",
"raise",
"ProxySchemeUnsupported",
"(",
"\"TLS in TLS requires SSLContext.wrap_bio() which isn't \"",
"\"supported on Python 2\"",
")",
"else",
":",
"raise",
"ProxySchemeUnsupported",
"(",
"\"TLS in TLS requires SSLContext.wrap_bio() which isn't \"",
"\"available on non-native SSLContext\"",
")"
] | [
22,
4
] | [
41,
17
] | python | en | ['en', 'error', 'th'] | False |
SSLTransport.__init__ | (
self, socket, ssl_context, server_hostname=None, suppress_ragged_eofs=True
) |
Create an SSLTransport around socket using the provided ssl_context.
|
Create an SSLTransport around socket using the provided ssl_context.
| def __init__(
self, socket, ssl_context, server_hostname=None, suppress_ragged_eofs=True
):
"""
Create an SSLTransport around socket using the provided ssl_context.
"""
self.incoming = ssl.MemoryBIO()
self.outgoing = ssl.MemoryBIO()
self.suppress_ragged_eofs = suppress_ragged_eofs
self.socket = socket
self.sslobj = ssl_context.wrap_bio(
self.incoming, self.outgoing, server_hostname=server_hostname
)
# Perform initial handshake.
self._ssl_io_loop(self.sslobj.do_handshake) | [
"def",
"__init__",
"(",
"self",
",",
"socket",
",",
"ssl_context",
",",
"server_hostname",
"=",
"None",
",",
"suppress_ragged_eofs",
"=",
"True",
")",
":",
"self",
".",
"incoming",
"=",
"ssl",
".",
"MemoryBIO",
"(",
")",
"self",
".",
"outgoing",
"=",
"ssl",
".",
"MemoryBIO",
"(",
")",
"self",
".",
"suppress_ragged_eofs",
"=",
"suppress_ragged_eofs",
"self",
".",
"socket",
"=",
"socket",
"self",
".",
"sslobj",
"=",
"ssl_context",
".",
"wrap_bio",
"(",
"self",
".",
"incoming",
",",
"self",
".",
"outgoing",
",",
"server_hostname",
"=",
"server_hostname",
")",
"# Perform initial handshake.",
"self",
".",
"_ssl_io_loop",
"(",
"self",
".",
"sslobj",
".",
"do_handshake",
")"
] | [
43,
4
] | [
60,
51
] | python | en | ['en', 'error', 'th'] | False |
SSLTransport.makefile | (
self, mode="r", buffering=None, encoding=None, errors=None, newline=None
) |
Python's httpclient uses makefile and buffered io when reading HTTP
messages and we need to support it.
This is unfortunately a copy and paste of socket.py makefile with small
changes to point to the socket directly.
|
Python's httpclient uses makefile and buffered io when reading HTTP
messages and we need to support it. | def makefile(
self, mode="r", buffering=None, encoding=None, errors=None, newline=None
):
"""
Python's httpclient uses makefile and buffered io when reading HTTP
messages and we need to support it.
This is unfortunately a copy and paste of socket.py makefile with small
changes to point to the socket directly.
"""
if not set(mode) <= {"r", "w", "b"}:
raise ValueError("invalid mode %r (only r, w, b allowed)" % (mode,))
writing = "w" in mode
reading = "r" in mode or not writing
assert reading or writing
binary = "b" in mode
rawmode = ""
if reading:
rawmode += "r"
if writing:
rawmode += "w"
raw = socket.SocketIO(self, rawmode)
self.socket._io_refs += 1
if buffering is None:
buffering = -1
if buffering < 0:
buffering = io.DEFAULT_BUFFER_SIZE
if buffering == 0:
if not binary:
raise ValueError("unbuffered streams must be binary")
return raw
if reading and writing:
buffer = io.BufferedRWPair(raw, raw, buffering)
elif reading:
buffer = io.BufferedReader(raw, buffering)
else:
assert writing
buffer = io.BufferedWriter(raw, buffering)
if binary:
return buffer
text = io.TextIOWrapper(buffer, encoding, errors, newline)
text.mode = mode
return text | [
"def",
"makefile",
"(",
"self",
",",
"mode",
"=",
"\"r\"",
",",
"buffering",
"=",
"None",
",",
"encoding",
"=",
"None",
",",
"errors",
"=",
"None",
",",
"newline",
"=",
"None",
")",
":",
"if",
"not",
"set",
"(",
"mode",
")",
"<=",
"{",
"\"r\"",
",",
"\"w\"",
",",
"\"b\"",
"}",
":",
"raise",
"ValueError",
"(",
"\"invalid mode %r (only r, w, b allowed)\"",
"%",
"(",
"mode",
",",
")",
")",
"writing",
"=",
"\"w\"",
"in",
"mode",
"reading",
"=",
"\"r\"",
"in",
"mode",
"or",
"not",
"writing",
"assert",
"reading",
"or",
"writing",
"binary",
"=",
"\"b\"",
"in",
"mode",
"rawmode",
"=",
"\"\"",
"if",
"reading",
":",
"rawmode",
"+=",
"\"r\"",
"if",
"writing",
":",
"rawmode",
"+=",
"\"w\"",
"raw",
"=",
"socket",
".",
"SocketIO",
"(",
"self",
",",
"rawmode",
")",
"self",
".",
"socket",
".",
"_io_refs",
"+=",
"1",
"if",
"buffering",
"is",
"None",
":",
"buffering",
"=",
"-",
"1",
"if",
"buffering",
"<",
"0",
":",
"buffering",
"=",
"io",
".",
"DEFAULT_BUFFER_SIZE",
"if",
"buffering",
"==",
"0",
":",
"if",
"not",
"binary",
":",
"raise",
"ValueError",
"(",
"\"unbuffered streams must be binary\"",
")",
"return",
"raw",
"if",
"reading",
"and",
"writing",
":",
"buffer",
"=",
"io",
".",
"BufferedRWPair",
"(",
"raw",
",",
"raw",
",",
"buffering",
")",
"elif",
"reading",
":",
"buffer",
"=",
"io",
".",
"BufferedReader",
"(",
"raw",
",",
"buffering",
")",
"else",
":",
"assert",
"writing",
"buffer",
"=",
"io",
".",
"BufferedWriter",
"(",
"raw",
",",
"buffering",
")",
"if",
"binary",
":",
"return",
"buffer",
"text",
"=",
"io",
".",
"TextIOWrapper",
"(",
"buffer",
",",
"encoding",
",",
"errors",
",",
"newline",
")",
"text",
".",
"mode",
"=",
"mode",
"return",
"text"
] | [
104,
4
] | [
147,
19
] | python | en | ['en', 'error', 'th'] | False |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.