nwo
stringlengths 5
106
| sha
stringlengths 40
40
| path
stringlengths 4
174
| language
stringclasses 1
value | identifier
stringlengths 1
140
| parameters
stringlengths 0
87.7k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
426k
| docstring
stringlengths 0
64.3k
| docstring_summary
stringlengths 0
26.3k
| docstring_tokens
list | function
stringlengths 18
4.83M
| function_tokens
list | url
stringlengths 83
304
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
Anaconda-Platform/anaconda-project
|
df5ec33c12591e6512436d38d36c6132fa2e9618
|
anaconda_project/project.py
|
python
|
_ConfigCache._parse_string_list_with_special
|
(self, problems, yaml_file, parent_dict, key, what, special_filter)
|
return (cleaned, special)
|
[] |
def _parse_string_list_with_special(self, problems, yaml_file, parent_dict, key, what, special_filter):
items = parent_dict.get(key, [])
if not is_list(items):
_file_problem(problems, yaml_file, "%s: value should be a list of %ss, not '%r'" % (key, what, items))
return ([], [])
cleaned = []
special = []
for item in items:
if is_string(item):
cleaned.append(item.strip())
elif special_filter(item):
special.append(item)
else:
_file_problem(problems, yaml_file,
("%s: value should be a %s (as a string) not '%r'" % (key, what, item)))
return (cleaned, special)
|
[
"def",
"_parse_string_list_with_special",
"(",
"self",
",",
"problems",
",",
"yaml_file",
",",
"parent_dict",
",",
"key",
",",
"what",
",",
"special_filter",
")",
":",
"items",
"=",
"parent_dict",
".",
"get",
"(",
"key",
",",
"[",
"]",
")",
"if",
"not",
"is_list",
"(",
"items",
")",
":",
"_file_problem",
"(",
"problems",
",",
"yaml_file",
",",
"\"%s: value should be a list of %ss, not '%r'\"",
"%",
"(",
"key",
",",
"what",
",",
"items",
")",
")",
"return",
"(",
"[",
"]",
",",
"[",
"]",
")",
"cleaned",
"=",
"[",
"]",
"special",
"=",
"[",
"]",
"for",
"item",
"in",
"items",
":",
"if",
"is_string",
"(",
"item",
")",
":",
"cleaned",
".",
"append",
"(",
"item",
".",
"strip",
"(",
")",
")",
"elif",
"special_filter",
"(",
"item",
")",
":",
"special",
".",
"append",
"(",
"item",
")",
"else",
":",
"_file_problem",
"(",
"problems",
",",
"yaml_file",
",",
"(",
"\"%s: value should be a %s (as a string) not '%r'\"",
"%",
"(",
"key",
",",
"what",
",",
"item",
")",
")",
")",
"return",
"(",
"cleaned",
",",
"special",
")"
] |
https://github.com/Anaconda-Platform/anaconda-project/blob/df5ec33c12591e6512436d38d36c6132fa2e9618/anaconda_project/project.py#L426-L441
|
|||
khalim19/gimp-plugin-export-layers
|
b37255f2957ad322f4d332689052351cdea6e563
|
export_layers/pygimplib/_lib/future/libpasteurize/fixes/fix_metaclass.py
|
python
|
FixMetaclass.transform
|
(self, node, results)
|
[] |
def transform(self, node, results):
meta_results = has_metaclass(node)
if not meta_results: return
for meta in meta_results:
meta.remove()
target = Leaf(token.NAME, u"__metaclass__")
equal = Leaf(token.EQUAL, u"=", prefix=u" ")
# meta is the last item in what was returned by has_metaclass(): name
name = meta
name.prefix = u" "
stmt_node = Node(syms.atom, [target, equal, name])
suitify(node)
for item in node.children:
if item.type == syms.suite:
for stmt in item.children:
if stmt.type == token.INDENT:
# Insert, in reverse order, the statement, a newline,
# and an indent right after the first indented line
loc = item.children.index(stmt) + 1
# Keep consistent indentation form
ident = Leaf(token.INDENT, stmt.value)
item.insert_child(loc, ident)
item.insert_child(loc, Newline())
item.insert_child(loc, stmt_node)
break
|
[
"def",
"transform",
"(",
"self",
",",
"node",
",",
"results",
")",
":",
"meta_results",
"=",
"has_metaclass",
"(",
"node",
")",
"if",
"not",
"meta_results",
":",
"return",
"for",
"meta",
"in",
"meta_results",
":",
"meta",
".",
"remove",
"(",
")",
"target",
"=",
"Leaf",
"(",
"token",
".",
"NAME",
",",
"u\"__metaclass__\"",
")",
"equal",
"=",
"Leaf",
"(",
"token",
".",
"EQUAL",
",",
"u\"=\"",
",",
"prefix",
"=",
"u\" \"",
")",
"# meta is the last item in what was returned by has_metaclass(): name",
"name",
"=",
"meta",
"name",
".",
"prefix",
"=",
"u\" \"",
"stmt_node",
"=",
"Node",
"(",
"syms",
".",
"atom",
",",
"[",
"target",
",",
"equal",
",",
"name",
"]",
")",
"suitify",
"(",
"node",
")",
"for",
"item",
"in",
"node",
".",
"children",
":",
"if",
"item",
".",
"type",
"==",
"syms",
".",
"suite",
":",
"for",
"stmt",
"in",
"item",
".",
"children",
":",
"if",
"stmt",
".",
"type",
"==",
"token",
".",
"INDENT",
":",
"# Insert, in reverse order, the statement, a newline,",
"# and an indent right after the first indented line",
"loc",
"=",
"item",
".",
"children",
".",
"index",
"(",
"stmt",
")",
"+",
"1",
"# Keep consistent indentation form",
"ident",
"=",
"Leaf",
"(",
"token",
".",
"INDENT",
",",
"stmt",
".",
"value",
")",
"item",
".",
"insert_child",
"(",
"loc",
",",
"ident",
")",
"item",
".",
"insert_child",
"(",
"loc",
",",
"Newline",
"(",
")",
")",
"item",
".",
"insert_child",
"(",
"loc",
",",
"stmt_node",
")",
"break"
] |
https://github.com/khalim19/gimp-plugin-export-layers/blob/b37255f2957ad322f4d332689052351cdea6e563/export_layers/pygimplib/_lib/future/libpasteurize/fixes/fix_metaclass.py#L53-L78
|
||||
dnsviz/dnsviz
|
9427a5c7d287664199315a2438b45521854a0c7d
|
dnsviz/analysis/online.py
|
python
|
OnlineDomainNameAnalysis.get_servers_in_child
|
(self)
|
return self._servers_in_child
|
Return the authoritative IP addresses of servers corresponding to
names in the authoritative NS records.
|
Return the authoritative IP addresses of servers corresponding to
names in the authoritative NS records.
|
[
"Return",
"the",
"authoritative",
"IP",
"addresses",
"of",
"servers",
"corresponding",
"to",
"names",
"in",
"the",
"authoritative",
"NS",
"records",
"."
] |
def get_servers_in_child(self):
'''Return the authoritative IP addresses of servers corresponding to
names in the authoritative NS records.'''
if not hasattr(self, '_servers_in_child') or self._servers_in_child is None:
servers = set()
auth_ips = self.get_auth_ns_ip_mapping()
for name in self.get_ns_names_in_child():
if name in auth_ips:
servers.update(auth_ips[name])
self._servers_in_child = servers
return self._servers_in_child
|
[
"def",
"get_servers_in_child",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'_servers_in_child'",
")",
"or",
"self",
".",
"_servers_in_child",
"is",
"None",
":",
"servers",
"=",
"set",
"(",
")",
"auth_ips",
"=",
"self",
".",
"get_auth_ns_ip_mapping",
"(",
")",
"for",
"name",
"in",
"self",
".",
"get_ns_names_in_child",
"(",
")",
":",
"if",
"name",
"in",
"auth_ips",
":",
"servers",
".",
"update",
"(",
"auth_ips",
"[",
"name",
"]",
")",
"self",
".",
"_servers_in_child",
"=",
"servers",
"return",
"self",
".",
"_servers_in_child"
] |
https://github.com/dnsviz/dnsviz/blob/9427a5c7d287664199315a2438b45521854a0c7d/dnsviz/analysis/online.py#L607-L618
|
|
larryhastings/gilectomy
|
4315ec3f1d6d4f813cc82ce27a24e7f784dbfc1a
|
Lib/tkinter/__init__.py
|
python
|
Misc.winfo_screencells
|
(self)
|
return self.tk.getint(
self.tk.call('winfo', 'screencells', self._w))
|
Return the number of the cells in the colormap of the screen
of this widget.
|
Return the number of the cells in the colormap of the screen
of this widget.
|
[
"Return",
"the",
"number",
"of",
"the",
"cells",
"in",
"the",
"colormap",
"of",
"the",
"screen",
"of",
"this",
"widget",
"."
] |
def winfo_screencells(self):
"""Return the number of the cells in the colormap of the screen
of this widget."""
return self.tk.getint(
self.tk.call('winfo', 'screencells', self._w))
|
[
"def",
"winfo_screencells",
"(",
"self",
")",
":",
"return",
"self",
".",
"tk",
".",
"getint",
"(",
"self",
".",
"tk",
".",
"call",
"(",
"'winfo'",
",",
"'screencells'",
",",
"self",
".",
"_w",
")",
")"
] |
https://github.com/larryhastings/gilectomy/blob/4315ec3f1d6d4f813cc82ce27a24e7f784dbfc1a/Lib/tkinter/__init__.py#L907-L911
|
|
andresriancho/w3af
|
cd22e5252243a87aaa6d0ddea47cf58dacfe00a9
|
w3af/plugins/grep/meta_generator.py
|
python
|
meta_generator._get_generators
|
(self, response)
|
return generators
|
:param response: The HTTP response
:return: A set with all generators
|
:param response: The HTTP response
:return: A set with all generators
|
[
":",
"param",
"response",
":",
"The",
"HTTP",
"response",
":",
"return",
":",
"A",
"set",
"with",
"all",
"generators"
] |
def _get_generators(self, response):
"""
:param response: The HTTP response
:return: A set with all generators
"""
generators = set()
for tag in parser_cache.dpc.get_tags_by_filter(response, ('meta',)):
# pylint: disable=E1101
name_attr_val = tag.attrib.get('name', None)
# pylint: enable=E1101
if name_attr_val is None:
continue
if 'generator' != name_attr_val.lower():
continue
# pylint: disable=E1101
content_attr_val = tag.attrib.get('content', None)
# pylint: enable=E1101
if not content_attr_val:
continue
generators.add(content_attr_val)
return generators
|
[
"def",
"_get_generators",
"(",
"self",
",",
"response",
")",
":",
"generators",
"=",
"set",
"(",
")",
"for",
"tag",
"in",
"parser_cache",
".",
"dpc",
".",
"get_tags_by_filter",
"(",
"response",
",",
"(",
"'meta'",
",",
")",
")",
":",
"# pylint: disable=E1101",
"name_attr_val",
"=",
"tag",
".",
"attrib",
".",
"get",
"(",
"'name'",
",",
"None",
")",
"# pylint: enable=E1101",
"if",
"name_attr_val",
"is",
"None",
":",
"continue",
"if",
"'generator'",
"!=",
"name_attr_val",
".",
"lower",
"(",
")",
":",
"continue",
"# pylint: disable=E1101",
"content_attr_val",
"=",
"tag",
".",
"attrib",
".",
"get",
"(",
"'content'",
",",
"None",
")",
"# pylint: enable=E1101",
"if",
"not",
"content_attr_val",
":",
"continue",
"generators",
".",
"add",
"(",
"content_attr_val",
")",
"return",
"generators"
] |
https://github.com/andresriancho/w3af/blob/cd22e5252243a87aaa6d0ddea47cf58dacfe00a9/w3af/plugins/grep/meta_generator.py#L68-L95
|
|
fsspec/filesystem_spec
|
76da18cf5a9697f480e5a0f6d1013d71676af131
|
fsspec/callbacks.py
|
python
|
Callback.call
|
(self, hook_name=None, **kwargs)
|
Execute hook(s) with current state
Each function is passed the internal size and current value
Parameters
----------
hook_name: str or None
If given, execute on this hook
kwargs: passed on to (all) hook(s)
|
Execute hook(s) with current state
|
[
"Execute",
"hook",
"(",
"s",
")",
"with",
"current",
"state"
] |
def call(self, hook_name=None, **kwargs):
"""
Execute hook(s) with current state
Each function is passed the internal size and current value
Parameters
----------
hook_name: str or None
If given, execute on this hook
kwargs: passed on to (all) hook(s)
"""
if not self.hooks:
return
kw = self.kw.copy()
kw.update(kwargs)
if hook_name:
if hook_name not in self.hooks:
return
return self.hooks[hook_name](self.size, self.value, **kw)
for hook in self.hooks.values() or []:
hook(self.size, self.value, **kw)
|
[
"def",
"call",
"(",
"self",
",",
"hook_name",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"self",
".",
"hooks",
":",
"return",
"kw",
"=",
"self",
".",
"kw",
".",
"copy",
"(",
")",
"kw",
".",
"update",
"(",
"kwargs",
")",
"if",
"hook_name",
":",
"if",
"hook_name",
"not",
"in",
"self",
".",
"hooks",
":",
"return",
"return",
"self",
".",
"hooks",
"[",
"hook_name",
"]",
"(",
"self",
".",
"size",
",",
"self",
".",
"value",
",",
"*",
"*",
"kw",
")",
"for",
"hook",
"in",
"self",
".",
"hooks",
".",
"values",
"(",
")",
"or",
"[",
"]",
":",
"hook",
"(",
"self",
".",
"size",
",",
"self",
".",
"value",
",",
"*",
"*",
"kw",
")"
] |
https://github.com/fsspec/filesystem_spec/blob/76da18cf5a9697f480e5a0f6d1013d71676af131/fsspec/callbacks.py#L68-L89
|
||
zhl2008/awd-platform
|
0416b31abea29743387b10b3914581fbe8e7da5e
|
web_hxb2/lib/python3.5/site-packages/wagtail_bak/contrib/modeladmin/views.py
|
python
|
ChooseParentView.dispatch
|
(self, request, *args, **kwargs)
|
return super(ChooseParentView, self).dispatch(request, *args, **kwargs)
|
[] |
def dispatch(self, request, *args, **kwargs):
if not self.permission_helper.user_can_create(request.user):
raise PermissionDenied
return super(ChooseParentView, self).dispatch(request, *args, **kwargs)
|
[
"def",
"dispatch",
"(",
"self",
",",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"self",
".",
"permission_helper",
".",
"user_can_create",
"(",
"request",
".",
"user",
")",
":",
"raise",
"PermissionDenied",
"return",
"super",
"(",
"ChooseParentView",
",",
"self",
")",
".",
"dispatch",
"(",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_hxb2/lib/python3.5/site-packages/wagtail_bak/contrib/modeladmin/views.py#L730-L733
|
|||
josw123/dart-fss
|
816d0fc6002aefb61912d5871af0438a6e1e7c99
|
dart_fss/filings/reports.py
|
python
|
Report.extract_attached_files
|
(self)
|
return self._attached_files
|
첨부된 파일 리스트 추출 및 반환
Returns
-------
list of AttachedFile
첨부된 파일리스트
|
첨부된 파일 리스트 추출 및 반환
|
[
"첨부된",
"파일",
"리스트",
"추출",
"및",
"반환"
] |
def extract_attached_files(self):
""" 첨부된 파일 리스트 추출 및 반환
Returns
-------
list of AttachedFile
첨부된 파일리스트
"""
if self.html is None:
self._get_report()
results = []
# tag 및 class 변경
a_href = self.html.find('button', class_='btnDown')
a_onclick = a_href.attrs.get('onclick', '')
raw_data = re.search(r'openPdfDownload\(.*?(\d+).*?(\d+).*?\)', a_onclick)
if raw_data is None:
return results
rcp_no = raw_data.group(1)
dcm_no = raw_data.group(2)
payload = dict(rcp_no=rcp_no, dcm_no=dcm_no)
resp = request.get(url=self._DOWNLOAD_URL_, payload=payload, referer=self._REPORT_URL_)
referer = resp.url
soup = BeautifulSoup(resp.text, 'html.parser')
tr_list = soup.find_all('tr')
attached_files = []
for tr in tr_list:
if tr.find('a'):
td_list = tr.find_all('td')
filename = td_list[0].text.strip()
file_url = td_list[1].a.get('href')
if not file_url:
continue
info = dict()
info['rcp_no'] = self.rcp_no
info['url'] = file_url
info['filename'] = filename
info['referer'] = referer
attached_files.append(AttachedFile(**info))
self._attached_files = attached_files
return self._attached_files
|
[
"def",
"extract_attached_files",
"(",
"self",
")",
":",
"if",
"self",
".",
"html",
"is",
"None",
":",
"self",
".",
"_get_report",
"(",
")",
"results",
"=",
"[",
"]",
"# tag 및 class 변경",
"a_href",
"=",
"self",
".",
"html",
".",
"find",
"(",
"'button'",
",",
"class_",
"=",
"'btnDown'",
")",
"a_onclick",
"=",
"a_href",
".",
"attrs",
".",
"get",
"(",
"'onclick'",
",",
"''",
")",
"raw_data",
"=",
"re",
".",
"search",
"(",
"r'openPdfDownload\\(.*?(\\d+).*?(\\d+).*?\\)'",
",",
"a_onclick",
")",
"if",
"raw_data",
"is",
"None",
":",
"return",
"results",
"rcp_no",
"=",
"raw_data",
".",
"group",
"(",
"1",
")",
"dcm_no",
"=",
"raw_data",
".",
"group",
"(",
"2",
")",
"payload",
"=",
"dict",
"(",
"rcp_no",
"=",
"rcp_no",
",",
"dcm_no",
"=",
"dcm_no",
")",
"resp",
"=",
"request",
".",
"get",
"(",
"url",
"=",
"self",
".",
"_DOWNLOAD_URL_",
",",
"payload",
"=",
"payload",
",",
"referer",
"=",
"self",
".",
"_REPORT_URL_",
")",
"referer",
"=",
"resp",
".",
"url",
"soup",
"=",
"BeautifulSoup",
"(",
"resp",
".",
"text",
",",
"'html.parser'",
")",
"tr_list",
"=",
"soup",
".",
"find_all",
"(",
"'tr'",
")",
"attached_files",
"=",
"[",
"]",
"for",
"tr",
"in",
"tr_list",
":",
"if",
"tr",
".",
"find",
"(",
"'a'",
")",
":",
"td_list",
"=",
"tr",
".",
"find_all",
"(",
"'td'",
")",
"filename",
"=",
"td_list",
"[",
"0",
"]",
".",
"text",
".",
"strip",
"(",
")",
"file_url",
"=",
"td_list",
"[",
"1",
"]",
".",
"a",
".",
"get",
"(",
"'href'",
")",
"if",
"not",
"file_url",
":",
"continue",
"info",
"=",
"dict",
"(",
")",
"info",
"[",
"'rcp_no'",
"]",
"=",
"self",
".",
"rcp_no",
"info",
"[",
"'url'",
"]",
"=",
"file_url",
"info",
"[",
"'filename'",
"]",
"=",
"filename",
"info",
"[",
"'referer'",
"]",
"=",
"referer",
"attached_files",
".",
"append",
"(",
"AttachedFile",
"(",
"*",
"*",
"info",
")",
")",
"self",
".",
"_attached_files",
"=",
"attached_files",
"return",
"self",
".",
"_attached_files"
] |
https://github.com/josw123/dart-fss/blob/816d0fc6002aefb61912d5871af0438a6e1e7c99/dart_fss/filings/reports.py#L217-L260
|
|
dmlc/dgl
|
8d14a739bc9e446d6c92ef83eafe5782398118de
|
python/dgl/_deprecate/graph.py
|
python
|
DGLGraph.nodes
|
(self)
|
return NodeView(self)
|
Return a node view that can used to set/get feature data.
Examples
--------
>>> G = dgl.DGLGraph()
>>> G.add_nodes(3)
Get nodes in graph G:
>>> G.nodes()
tensor([0, 1, 2])
Get feature dictionary of all nodes:
>>> G.nodes[:].data
{}
The above can be abbreviated as
>>> G.ndata
{}
Init all 3 nodes with zero vector(len=5)
.. note:: Here we use pytorch syntax for demo. The general idea applies
to other frameworks with minor syntax change (e.g. replace
``torch.tensor`` with ``mxnet.ndarray``).
>>> import torch as th
>>> G.ndata['x'] = th.zeros((3, 5))
>>> G.ndata['x']
{'x' : tensor([[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.]])}
Use G.nodes to get/set features for some nodes.
>>> G.nodes[[0, 2]].data['x'] = th.ones((2, 5))
>>> G.ndata
{'x' : tensor([[1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1.]])}
See Also
--------
dgl.DGLGraph.ndata
|
Return a node view that can used to set/get feature data.
|
[
"Return",
"a",
"node",
"view",
"that",
"can",
"used",
"to",
"set",
"/",
"get",
"feature",
"data",
"."
] |
def nodes(self):
"""Return a node view that can used to set/get feature data.
Examples
--------
>>> G = dgl.DGLGraph()
>>> G.add_nodes(3)
Get nodes in graph G:
>>> G.nodes()
tensor([0, 1, 2])
Get feature dictionary of all nodes:
>>> G.nodes[:].data
{}
The above can be abbreviated as
>>> G.ndata
{}
Init all 3 nodes with zero vector(len=5)
.. note:: Here we use pytorch syntax for demo. The general idea applies
to other frameworks with minor syntax change (e.g. replace
``torch.tensor`` with ``mxnet.ndarray``).
>>> import torch as th
>>> G.ndata['x'] = th.zeros((3, 5))
>>> G.ndata['x']
{'x' : tensor([[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.],
[0., 0., 0., 0., 0.]])}
Use G.nodes to get/set features for some nodes.
>>> G.nodes[[0, 2]].data['x'] = th.ones((2, 5))
>>> G.ndata
{'x' : tensor([[1., 1., 1., 1., 1.],
[0., 0., 0., 0., 0.],
[1., 1., 1., 1., 1.]])}
See Also
--------
dgl.DGLGraph.ndata
"""
return NodeView(self)
|
[
"def",
"nodes",
"(",
"self",
")",
":",
"return",
"NodeView",
"(",
"self",
")"
] |
https://github.com/dmlc/dgl/blob/8d14a739bc9e446d6c92ef83eafe5782398118de/python/dgl/_deprecate/graph.py#L2069-L2119
|
|
neptune-ai/open-solution-salt-identification
|
394f16b23b6e30543aee54701f81a06b5dd92a98
|
common_blocks/callbacks.py
|
python
|
NeptuneMonitor.on_epoch_end
|
(self, *args, **kwargs)
|
[] |
def on_epoch_end(self, *args, **kwargs):
self._send_numeric_channels()
if self.image_every is not None and self.epoch_id % self.image_every == 0:
self._send_image_channels()
self.epoch_id += 1
|
[
"def",
"on_epoch_end",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_send_numeric_channels",
"(",
")",
"if",
"self",
".",
"image_every",
"is",
"not",
"None",
"and",
"self",
".",
"epoch_id",
"%",
"self",
".",
"image_every",
"==",
"0",
":",
"self",
".",
"_send_image_channels",
"(",
")",
"self",
".",
"epoch_id",
"+=",
"1"
] |
https://github.com/neptune-ai/open-solution-salt-identification/blob/394f16b23b6e30543aee54701f81a06b5dd92a98/common_blocks/callbacks.py#L356-L360
|
||||
openshift/openshift-tools
|
1188778e728a6e4781acf728123e5b356380fe6f
|
openshift/installer/vendored/openshift-ansible-3.10.0-0.29.0/roles/lib_openshift/library/oc_objectvalidator.py
|
python
|
OpenShiftCLIConfig.config_options
|
(self)
|
return self._options
|
return config options
|
return config options
|
[
"return",
"config",
"options"
] |
def config_options(self):
''' return config options '''
return self._options
|
[
"def",
"config_options",
"(",
"self",
")",
":",
"return",
"self",
".",
"_options"
] |
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.10.0-0.29.0/roles/lib_openshift/library/oc_objectvalidator.py#L1386-L1388
|
|
Tautulli/Tautulli
|
2410eb33805aaac4bd1c5dad0f71e4f15afaf742
|
lib/musicbrainzngs/musicbrainz.py
|
python
|
_DigestAuthHandler._encode_utf8
|
(self, msg)
|
return msg.encode("utf-8")
|
The MusicBrainz server also accepts UTF-8 encoded passwords.
|
The MusicBrainz server also accepts UTF-8 encoded passwords.
|
[
"The",
"MusicBrainz",
"server",
"also",
"accepts",
"UTF",
"-",
"8",
"encoded",
"passwords",
"."
] |
def _encode_utf8(self, msg):
"""The MusicBrainz server also accepts UTF-8 encoded passwords."""
encoding = sys.stdin.encoding or locale.getpreferredencoding()
try:
# This works on Python 2 (msg in bytes)
msg = msg.decode(encoding)
except AttributeError:
# on Python 3 (msg is already in unicode)
pass
return msg.encode("utf-8")
|
[
"def",
"_encode_utf8",
"(",
"self",
",",
"msg",
")",
":",
"encoding",
"=",
"sys",
".",
"stdin",
".",
"encoding",
"or",
"locale",
".",
"getpreferredencoding",
"(",
")",
"try",
":",
"# This works on Python 2 (msg in bytes)",
"msg",
"=",
"msg",
".",
"decode",
"(",
"encoding",
")",
"except",
"AttributeError",
":",
"# on Python 3 (msg is already in unicode)",
"pass",
"return",
"msg",
".",
"encode",
"(",
"\"utf-8\"",
")"
] |
https://github.com/Tautulli/Tautulli/blob/2410eb33805aaac4bd1c5dad0f71e4f15afaf742/lib/musicbrainzngs/musicbrainz.py#L443-L452
|
|
Yukinoshita47/Yuki-Chan-The-Auto-Pentest
|
bea1af4e1d544eadc166f728be2f543ea10af191
|
Module/dnsrecon/dnsrecon.py
|
python
|
process_range
|
(arg)
|
return ip_list
|
Function will take a string representation of a range for IPv4 or IPv6 in
CIDR or Range format and return a list of IPs.
|
Function will take a string representation of a range for IPv4 or IPv6 in
CIDR or Range format and return a list of IPs.
|
[
"Function",
"will",
"take",
"a",
"string",
"representation",
"of",
"a",
"range",
"for",
"IPv4",
"or",
"IPv6",
"in",
"CIDR",
"or",
"Range",
"format",
"and",
"return",
"a",
"list",
"of",
"IPs",
"."
] |
def process_range(arg):
"""
Function will take a string representation of a range for IPv4 or IPv6 in
CIDR or Range format and return a list of IPs.
"""
try:
ip_list = None
range_vals = []
if re.match(r'\S*\/\S*', arg):
ip_list = IPNetwork(arg)
elif (re.match(r'\S*\-\S*', arg)):
range_vals.extend(arg.split("-"))
if len(range_vals) == 2:
ip_list = IPRange(range_vals[0], range_vals[1])
else:
print_error("Range provided is not valid")
return []
except:
print_error("Range provided is not valid")
return []
return ip_list
|
[
"def",
"process_range",
"(",
"arg",
")",
":",
"try",
":",
"ip_list",
"=",
"None",
"range_vals",
"=",
"[",
"]",
"if",
"re",
".",
"match",
"(",
"r'\\S*\\/\\S*'",
",",
"arg",
")",
":",
"ip_list",
"=",
"IPNetwork",
"(",
"arg",
")",
"elif",
"(",
"re",
".",
"match",
"(",
"r'\\S*\\-\\S*'",
",",
"arg",
")",
")",
":",
"range_vals",
".",
"extend",
"(",
"arg",
".",
"split",
"(",
"\"-\"",
")",
")",
"if",
"len",
"(",
"range_vals",
")",
"==",
"2",
":",
"ip_list",
"=",
"IPRange",
"(",
"range_vals",
"[",
"0",
"]",
",",
"range_vals",
"[",
"1",
"]",
")",
"else",
":",
"print_error",
"(",
"\"Range provided is not valid\"",
")",
"return",
"[",
"]",
"except",
":",
"print_error",
"(",
"\"Range provided is not valid\"",
")",
"return",
"[",
"]",
"return",
"ip_list"
] |
https://github.com/Yukinoshita47/Yuki-Chan-The-Auto-Pentest/blob/bea1af4e1d544eadc166f728be2f543ea10af191/Module/dnsrecon/dnsrecon.py#L152-L173
|
|
leo-editor/leo-editor
|
383d6776d135ef17d73d935a2f0ecb3ac0e99494
|
leo/plugins/obsolete/tkGui.py
|
python
|
leoTkinterDialog.center
|
(self)
|
Center any leoTkinterDialog.
|
Center any leoTkinterDialog.
|
[
"Center",
"any",
"leoTkinterDialog",
"."
] |
def center(self):
"""Center any leoTkinterDialog."""
g.app.gui.center_dialog(self.top)
|
[
"def",
"center",
"(",
"self",
")",
":",
"g",
".",
"app",
".",
"gui",
".",
"center_dialog",
"(",
"self",
".",
"top",
")"
] |
https://github.com/leo-editor/leo-editor/blob/383d6776d135ef17d73d935a2f0ecb3ac0e99494/leo/plugins/obsolete/tkGui.py#L1068-L1072
|
||
hardmaru/resnet-cppn-gan-tensorflow
|
9206e06512c118e932fbc789c91a5cf4f9e5d2b9
|
ops.py
|
python
|
conv2d
|
(input_, output_dim,
k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02,
name="conv2d")
|
[] |
def conv2d(input_, output_dim,
k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02,
name="conv2d"):
with tf.variable_scope(name):
w = tf.get_variable('w', [k_h, k_w, input_.get_shape()[-1], output_dim],
initializer=tf.truncated_normal_initializer(stddev=stddev))
conv = tf.nn.conv2d(input_, w, strides=[1, d_h, d_w, 1], padding='SAME')
biases = tf.get_variable('biases', [output_dim], initializer=tf.constant_initializer(0.0))
conv = tf.reshape(tf.nn.bias_add(conv, biases), conv.get_shape())
return conv
|
[
"def",
"conv2d",
"(",
"input_",
",",
"output_dim",
",",
"k_h",
"=",
"5",
",",
"k_w",
"=",
"5",
",",
"d_h",
"=",
"2",
",",
"d_w",
"=",
"2",
",",
"stddev",
"=",
"0.02",
",",
"name",
"=",
"\"conv2d\"",
")",
":",
"with",
"tf",
".",
"variable_scope",
"(",
"name",
")",
":",
"w",
"=",
"tf",
".",
"get_variable",
"(",
"'w'",
",",
"[",
"k_h",
",",
"k_w",
",",
"input_",
".",
"get_shape",
"(",
")",
"[",
"-",
"1",
"]",
",",
"output_dim",
"]",
",",
"initializer",
"=",
"tf",
".",
"truncated_normal_initializer",
"(",
"stddev",
"=",
"stddev",
")",
")",
"conv",
"=",
"tf",
".",
"nn",
".",
"conv2d",
"(",
"input_",
",",
"w",
",",
"strides",
"=",
"[",
"1",
",",
"d_h",
",",
"d_w",
",",
"1",
"]",
",",
"padding",
"=",
"'SAME'",
")",
"biases",
"=",
"tf",
".",
"get_variable",
"(",
"'biases'",
",",
"[",
"output_dim",
"]",
",",
"initializer",
"=",
"tf",
".",
"constant_initializer",
"(",
"0.0",
")",
")",
"conv",
"=",
"tf",
".",
"reshape",
"(",
"tf",
".",
"nn",
".",
"bias_add",
"(",
"conv",
",",
"biases",
")",
",",
"conv",
".",
"get_shape",
"(",
")",
")",
"return",
"conv"
] |
https://github.com/hardmaru/resnet-cppn-gan-tensorflow/blob/9206e06512c118e932fbc789c91a5cf4f9e5d2b9/ops.py#L59-L70
|
||||
psd-tools/psd-tools
|
00241f3aed2ca52a8012e198a0f390ff7d8edca9
|
src/psd_tools/composer/blend.py
|
python
|
rgb_to_hls
|
(rgb)
|
return h, l, s
|
RGB to HSL conversion.
See colorsys module.
|
RGB to HSL conversion.
|
[
"RGB",
"to",
"HSL",
"conversion",
"."
] |
def rgb_to_hls(rgb):
"""RGB to HSL conversion.
See colorsys module.
"""
import numpy as np
maxc = np.max(rgb, axis=2)
minc = np.min(rgb, axis=2)
nonzero_index = (minc < maxc)
c_diff = maxc - minc
l = (minc + maxc) / 2.0
s = np.zeros_like(l)
h = np.zeros_like(l)
index = nonzero_index
s[index] = c_diff[index] / (2.0 - maxc[index] - minc[index])
index = (l <= 0.5) & nonzero_index
s[index] = c_diff[index] / (maxc[index] + minc[index])
rc, gc, bc = (
maxc[nonzero_index] -
rgb[:, :, i][nonzero_index] / c_diff[nonzero_index] for i in range(3)
)
hc = 4.0 + gc - rc # 4 + gc - rc
index = (rgb[:, :, 1][nonzero_index] == maxc[nonzero_index])
hc[index] = 2.0 + rc[index] - bc[index] # 2 + rc - bc
index = (rgb[:, :, 0][nonzero_index] == maxc[nonzero_index])
hc[index] = bc[index] - gc[index] # bc - gc
h[nonzero_index] = (hc / 6.0) % 1.0
return h, l, s
|
[
"def",
"rgb_to_hls",
"(",
"rgb",
")",
":",
"import",
"numpy",
"as",
"np",
"maxc",
"=",
"np",
".",
"max",
"(",
"rgb",
",",
"axis",
"=",
"2",
")",
"minc",
"=",
"np",
".",
"min",
"(",
"rgb",
",",
"axis",
"=",
"2",
")",
"nonzero_index",
"=",
"(",
"minc",
"<",
"maxc",
")",
"c_diff",
"=",
"maxc",
"-",
"minc",
"l",
"=",
"(",
"minc",
"+",
"maxc",
")",
"/",
"2.0",
"s",
"=",
"np",
".",
"zeros_like",
"(",
"l",
")",
"h",
"=",
"np",
".",
"zeros_like",
"(",
"l",
")",
"index",
"=",
"nonzero_index",
"s",
"[",
"index",
"]",
"=",
"c_diff",
"[",
"index",
"]",
"/",
"(",
"2.0",
"-",
"maxc",
"[",
"index",
"]",
"-",
"minc",
"[",
"index",
"]",
")",
"index",
"=",
"(",
"l",
"<=",
"0.5",
")",
"&",
"nonzero_index",
"s",
"[",
"index",
"]",
"=",
"c_diff",
"[",
"index",
"]",
"/",
"(",
"maxc",
"[",
"index",
"]",
"+",
"minc",
"[",
"index",
"]",
")",
"rc",
",",
"gc",
",",
"bc",
"=",
"(",
"maxc",
"[",
"nonzero_index",
"]",
"-",
"rgb",
"[",
":",
",",
":",
",",
"i",
"]",
"[",
"nonzero_index",
"]",
"/",
"c_diff",
"[",
"nonzero_index",
"]",
"for",
"i",
"in",
"range",
"(",
"3",
")",
")",
"hc",
"=",
"4.0",
"+",
"gc",
"-",
"rc",
"# 4 + gc - rc",
"index",
"=",
"(",
"rgb",
"[",
":",
",",
":",
",",
"1",
"]",
"[",
"nonzero_index",
"]",
"==",
"maxc",
"[",
"nonzero_index",
"]",
")",
"hc",
"[",
"index",
"]",
"=",
"2.0",
"+",
"rc",
"[",
"index",
"]",
"-",
"bc",
"[",
"index",
"]",
"# 2 + rc - bc",
"index",
"=",
"(",
"rgb",
"[",
":",
",",
":",
",",
"0",
"]",
"[",
"nonzero_index",
"]",
"==",
"maxc",
"[",
"nonzero_index",
"]",
")",
"hc",
"[",
"index",
"]",
"=",
"bc",
"[",
"index",
"]",
"-",
"gc",
"[",
"index",
"]",
"# bc - gc",
"h",
"[",
"nonzero_index",
"]",
"=",
"(",
"hc",
"/",
"6.0",
")",
"%",
"1.0",
"return",
"h",
",",
"l",
",",
"s"
] |
https://github.com/psd-tools/psd-tools/blob/00241f3aed2ca52a8012e198a0f390ff7d8edca9/src/psd_tools/composer/blend.py#L279-L310
|
|
GoogleCloudPlatform/PerfKitBenchmarker
|
6e3412d7d5e414b8ca30ed5eaf970cef1d919a67
|
perfkitbenchmarker/vm_util.py
|
python
|
ShouldRunOnInternalIpAddress
|
(sending_vm, receiving_vm, ip_type=None)
|
return (ip_type_to_check in (IpAddressSubset.BOTH, IpAddressSubset.INTERNAL)
or (ip_type_to_check == IpAddressSubset.REACHABLE and
sending_vm.IsReachable(receiving_vm)))
|
Returns whether a test should be run on an instance's internal IP.
Based on the command line flag --ip_addresses. Internal IP addresses are used
when:
* --ip_addresses=BOTH or --ip-addresses=INTERNAL
* --ip_addresses=REACHABLE and 'sending_vm' can ping 'receiving_vm' on its
internal IP.
Args:
sending_vm: VirtualMachine. The client.
receiving_vm: VirtualMachine. The server.
ip_type: optional ip_type to use instead of what is set in the FLAGS
Returns:
Whether a test should be run on an instance's internal IP.
|
Returns whether a test should be run on an instance's internal IP.
|
[
"Returns",
"whether",
"a",
"test",
"should",
"be",
"run",
"on",
"an",
"instance",
"s",
"internal",
"IP",
"."
] |
def ShouldRunOnInternalIpAddress(sending_vm, receiving_vm, ip_type=None):
"""Returns whether a test should be run on an instance's internal IP.
Based on the command line flag --ip_addresses. Internal IP addresses are used
when:
* --ip_addresses=BOTH or --ip-addresses=INTERNAL
* --ip_addresses=REACHABLE and 'sending_vm' can ping 'receiving_vm' on its
internal IP.
Args:
sending_vm: VirtualMachine. The client.
receiving_vm: VirtualMachine. The server.
ip_type: optional ip_type to use instead of what is set in the FLAGS
Returns:
Whether a test should be run on an instance's internal IP.
"""
ip_type_to_check = ip_type or FLAGS.ip_addresses
return (ip_type_to_check in (IpAddressSubset.BOTH, IpAddressSubset.INTERNAL)
or (ip_type_to_check == IpAddressSubset.REACHABLE and
sending_vm.IsReachable(receiving_vm)))
|
[
"def",
"ShouldRunOnInternalIpAddress",
"(",
"sending_vm",
",",
"receiving_vm",
",",
"ip_type",
"=",
"None",
")",
":",
"ip_type_to_check",
"=",
"ip_type",
"or",
"FLAGS",
".",
"ip_addresses",
"return",
"(",
"ip_type_to_check",
"in",
"(",
"IpAddressSubset",
".",
"BOTH",
",",
"IpAddressSubset",
".",
"INTERNAL",
")",
"or",
"(",
"ip_type_to_check",
"==",
"IpAddressSubset",
".",
"REACHABLE",
"and",
"sending_vm",
".",
"IsReachable",
"(",
"receiving_vm",
")",
")",
")"
] |
https://github.com/GoogleCloudPlatform/PerfKitBenchmarker/blob/6e3412d7d5e414b8ca30ed5eaf970cef1d919a67/perfkitbenchmarker/vm_util.py#L513-L534
|
|
daler/gffutils
|
4b5b28e610a435af359ab1c31271deea1bae4c47
|
gffutils/interface.py
|
python
|
FeatureDB.merge_all
|
(self,
merge_order=('seqid', 'featuretype', 'strand', 'start'),
merge_criteria=(mc.seqid, mc.overlap_end_inclusive, mc.strand, mc.feature_type),
featuretypes_groups=(None,),
exclude_components=False)
|
return result_features
|
Merge all features in database according to criteria.
Merged features will be assigned as children of the merged record.
The resulting records are added to the database.
Parameters
----------
merge_order : list
Ordered list of columns with which to group features before evaluating criteria
merge_criteria : list
List of merge criteria callbacks. See merge().
featuretypes_groups : list
iterable of sets of featuretypes to merge together
exclude_components : bool
True: child features will be discarded. False to keep them.
Returns
-------
list of merge features
|
Merge all features in database according to criteria.
Merged features will be assigned as children of the merged record.
The resulting records are added to the database.
|
[
"Merge",
"all",
"features",
"in",
"database",
"according",
"to",
"criteria",
".",
"Merged",
"features",
"will",
"be",
"assigned",
"as",
"children",
"of",
"the",
"merged",
"record",
".",
"The",
"resulting",
"records",
"are",
"added",
"to",
"the",
"database",
"."
] |
def merge_all(self,
merge_order=('seqid', 'featuretype', 'strand', 'start'),
merge_criteria=(mc.seqid, mc.overlap_end_inclusive, mc.strand, mc.feature_type),
featuretypes_groups=(None,),
exclude_components=False):
"""
Merge all features in database according to criteria.
Merged features will be assigned as children of the merged record.
The resulting records are added to the database.
Parameters
----------
merge_order : list
Ordered list of columns with which to group features before evaluating criteria
merge_criteria : list
List of merge criteria callbacks. See merge().
featuretypes_groups : list
iterable of sets of featuretypes to merge together
exclude_components : bool
True: child features will be discarded. False to keep them.
Returns
-------
list of merge features
"""
if not len(featuretypes_groups):
# Can't be empty
featuretypes_groups = (None,)
result_features = []
# Merge features per featuregroup
for featuregroup in featuretypes_groups:
for merged in self.merge(self.all_features(featuretype=featuregroup, order_by=merge_order),
merge_criteria=merge_criteria):
# If feature is result of merge
if merged.children:
self._insert(merged, self.conn.cursor())
if exclude_components:
# Remove child features from DB
self.delete(merged.children)
else:
# Add child relations to DB
for child in merged.children:
self.add_relation(merged, child, 1, child_func=assign_child)
result_features.append(merged)
else:
pass # Do nothing, feature is already in DB
return result_features
|
[
"def",
"merge_all",
"(",
"self",
",",
"merge_order",
"=",
"(",
"'seqid'",
",",
"'featuretype'",
",",
"'strand'",
",",
"'start'",
")",
",",
"merge_criteria",
"=",
"(",
"mc",
".",
"seqid",
",",
"mc",
".",
"overlap_end_inclusive",
",",
"mc",
".",
"strand",
",",
"mc",
".",
"feature_type",
")",
",",
"featuretypes_groups",
"=",
"(",
"None",
",",
")",
",",
"exclude_components",
"=",
"False",
")",
":",
"if",
"not",
"len",
"(",
"featuretypes_groups",
")",
":",
"# Can't be empty",
"featuretypes_groups",
"=",
"(",
"None",
",",
")",
"result_features",
"=",
"[",
"]",
"# Merge features per featuregroup",
"for",
"featuregroup",
"in",
"featuretypes_groups",
":",
"for",
"merged",
"in",
"self",
".",
"merge",
"(",
"self",
".",
"all_features",
"(",
"featuretype",
"=",
"featuregroup",
",",
"order_by",
"=",
"merge_order",
")",
",",
"merge_criteria",
"=",
"merge_criteria",
")",
":",
"# If feature is result of merge",
"if",
"merged",
".",
"children",
":",
"self",
".",
"_insert",
"(",
"merged",
",",
"self",
".",
"conn",
".",
"cursor",
"(",
")",
")",
"if",
"exclude_components",
":",
"# Remove child features from DB",
"self",
".",
"delete",
"(",
"merged",
".",
"children",
")",
"else",
":",
"# Add child relations to DB",
"for",
"child",
"in",
"merged",
".",
"children",
":",
"self",
".",
"add_relation",
"(",
"merged",
",",
"child",
",",
"1",
",",
"child_func",
"=",
"assign_child",
")",
"result_features",
".",
"append",
"(",
"merged",
")",
"else",
":",
"pass",
"# Do nothing, feature is already in DB",
"return",
"result_features"
] |
https://github.com/daler/gffutils/blob/4b5b28e610a435af359ab1c31271deea1bae4c47/gffutils/interface.py#L1331-L1381
|
|
cbfinn/gps
|
82fa6cc930c4392d55d2525f6b792089f1d2ccfe
|
python/gps/agent/ros/agent_ros.py
|
python
|
AgentROS._get_next_seq_id
|
(self)
|
return self._seq_id
|
[] |
def _get_next_seq_id(self):
self._seq_id = (self._seq_id + 1) % (2 ** 32)
return self._seq_id
|
[
"def",
"_get_next_seq_id",
"(",
"self",
")",
":",
"self",
".",
"_seq_id",
"=",
"(",
"self",
".",
"_seq_id",
"+",
"1",
")",
"%",
"(",
"2",
"**",
"32",
")",
"return",
"self",
".",
"_seq_id"
] |
https://github.com/cbfinn/gps/blob/82fa6cc930c4392d55d2525f6b792089f1d2ccfe/python/gps/agent/ros/agent_ros.py#L75-L77
|
|||
twilio/twilio-python
|
6e1e811ea57a1edfadd5161ace87397c563f6915
|
twilio/rest/taskrouter/v1/workspace/task_queue/task_queue_statistics.py
|
python
|
TaskQueueStatisticsInstance.cumulative
|
(self)
|
return self._properties['cumulative']
|
:returns: An object that contains the cumulative statistics for the TaskQueue
:rtype: dict
|
:returns: An object that contains the cumulative statistics for the TaskQueue
:rtype: dict
|
[
":",
"returns",
":",
"An",
"object",
"that",
"contains",
"the",
"cumulative",
"statistics",
"for",
"the",
"TaskQueue",
":",
"rtype",
":",
"dict"
] |
def cumulative(self):
"""
:returns: An object that contains the cumulative statistics for the TaskQueue
:rtype: dict
"""
return self._properties['cumulative']
|
[
"def",
"cumulative",
"(",
"self",
")",
":",
"return",
"self",
".",
"_properties",
"[",
"'cumulative'",
"]"
] |
https://github.com/twilio/twilio-python/blob/6e1e811ea57a1edfadd5161ace87397c563f6915/twilio/rest/taskrouter/v1/workspace/task_queue/task_queue_statistics.py#L229-L234
|
|
mozilla-services/GitHub-Audit
|
3f80e4a00bf556af8c1be31532be976d770f85c8
|
get_branch_protections.py
|
python
|
DeferredRetryQueue.call_with_retry
|
(self, method, *args, **kwargs)
|
Make the call - add to retry queue if rc code matches
|
Make the call - add to retry queue if rc code matches
|
[
"Make",
"the",
"call",
"-",
"add",
"to",
"retry",
"queue",
"if",
"rc",
"code",
"matches"
] |
def call_with_retry(self, method, *args, **kwargs):
"""
Make the call - add to retry queue if rc code matches
"""
rc, _ = ag_call_with_rc(method, *args, **kwargs)
if rc in self.retry_codes:
logger.debug(
f"Data not ready - deferring call for {method.keywords['url']}"
)
self.add_retry(method)
|
[
"def",
"call_with_retry",
"(",
"self",
",",
"method",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"rc",
",",
"_",
"=",
"ag_call_with_rc",
"(",
"method",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"if",
"rc",
"in",
"self",
".",
"retry_codes",
":",
"logger",
".",
"debug",
"(",
"f\"Data not ready - deferring call for {method.keywords['url']}\"",
")",
"self",
".",
"add_retry",
"(",
"method",
")"
] |
https://github.com/mozilla-services/GitHub-Audit/blob/3f80e4a00bf556af8c1be31532be976d770f85c8/get_branch_protections.py#L281-L290
|
||
oracle/graalpython
|
577e02da9755d916056184ec441c26e00b70145c
|
graalpython/lib-python/3/codecs.py
|
python
|
BufferedIncrementalEncoder._buffer_encode
|
(self, input, errors, final)
|
[] |
def _buffer_encode(self, input, errors, final):
# Overwrite this method in subclasses: It must encode input
# and return an (output, length consumed) tuple
raise NotImplementedError
|
[
"def",
"_buffer_encode",
"(",
"self",
",",
"input",
",",
"errors",
",",
"final",
")",
":",
"# Overwrite this method in subclasses: It must encode input",
"# and return an (output, length consumed) tuple",
"raise",
"NotImplementedError"
] |
https://github.com/oracle/graalpython/blob/577e02da9755d916056184ec441c26e00b70145c/graalpython/lib-python/3/codecs.py#L231-L234
|
||||
AppScale/gts
|
46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9
|
AppServer/google/net/proto2/python/internal/enum_type_wrapper.py
|
python
|
EnumTypeWrapper.Value
|
(self, name)
|
Returns the value coresponding to the given enum name.
|
Returns the value coresponding to the given enum name.
|
[
"Returns",
"the",
"value",
"coresponding",
"to",
"the",
"given",
"enum",
"name",
"."
] |
def Value(self, name):
"""Returns the value coresponding to the given enum name."""
if name in self._enum_type.values_by_name:
return self._enum_type.values_by_name[name].number
raise ValueError('Enum %s has no value defined for name %s' % (
self._enum_type.name, name))
|
[
"def",
"Value",
"(",
"self",
",",
"name",
")",
":",
"if",
"name",
"in",
"self",
".",
"_enum_type",
".",
"values_by_name",
":",
"return",
"self",
".",
"_enum_type",
".",
"values_by_name",
"[",
"name",
"]",
".",
"number",
"raise",
"ValueError",
"(",
"'Enum %s has no value defined for name %s'",
"%",
"(",
"self",
".",
"_enum_type",
".",
"name",
",",
"name",
")",
")"
] |
https://github.com/AppScale/gts/blob/46f909cf5dc5ba81faf9d81dc9af598dcf8a82a9/AppServer/google/net/proto2/python/internal/enum_type_wrapper.py#L45-L50
|
||
opsmop/opsmop
|
376ca587f8c5f9ca8ed1829909d075c339066034
|
opsmop/providers/shell.py
|
python
|
Shell.plan
|
(self)
|
[] |
def plan(self):
self.needs('execute')
|
[
"def",
"plan",
"(",
"self",
")",
":",
"self",
".",
"needs",
"(",
"'execute'",
")"
] |
https://github.com/opsmop/opsmop/blob/376ca587f8c5f9ca8ed1829909d075c339066034/opsmop/providers/shell.py#L20-L21
|
||||
JaniceWuo/MovieRecommend
|
4c86db64ca45598917d304f535413df3bc9fea65
|
movierecommend/venv1/Lib/site-packages/django/core/cache/backends/base.py
|
python
|
BaseCache.get_backend_timeout
|
(self, timeout=DEFAULT_TIMEOUT)
|
return None if timeout is None else time.time() + timeout
|
Returns the timeout value usable by this backend based upon the provided
timeout.
|
Returns the timeout value usable by this backend based upon the provided
timeout.
|
[
"Returns",
"the",
"timeout",
"value",
"usable",
"by",
"this",
"backend",
"based",
"upon",
"the",
"provided",
"timeout",
"."
] |
def get_backend_timeout(self, timeout=DEFAULT_TIMEOUT):
"""
Returns the timeout value usable by this backend based upon the provided
timeout.
"""
if timeout == DEFAULT_TIMEOUT:
timeout = self.default_timeout
elif timeout == 0:
# ticket 21147 - avoid time.time() related precision issues
timeout = -1
return None if timeout is None else time.time() + timeout
|
[
"def",
"get_backend_timeout",
"(",
"self",
",",
"timeout",
"=",
"DEFAULT_TIMEOUT",
")",
":",
"if",
"timeout",
"==",
"DEFAULT_TIMEOUT",
":",
"timeout",
"=",
"self",
".",
"default_timeout",
"elif",
"timeout",
"==",
"0",
":",
"# ticket 21147 - avoid time.time() related precision issues",
"timeout",
"=",
"-",
"1",
"return",
"None",
"if",
"timeout",
"is",
"None",
"else",
"time",
".",
"time",
"(",
")",
"+",
"timeout"
] |
https://github.com/JaniceWuo/MovieRecommend/blob/4c86db64ca45598917d304f535413df3bc9fea65/movierecommend/venv1/Lib/site-packages/django/core/cache/backends/base.py#L79-L89
|
|
epfl-lts2/pygsp
|
a3412ce7696c02c8a55439e89d0c9ab8ae863269
|
pygsp/utils.py
|
python
|
compute_log_scales
|
(lmin, lmax, Nscales, t1=1, t2=2)
|
return np.exp(np.linspace(np.log(scale_max), np.log(scale_min), Nscales))
|
r"""
Compute logarithm scales for wavelets.
Parameters
----------
lmin : float
Smallest non-zero eigenvalue.
lmax : float
Largest eigenvalue, i.e. :py:attr:`pygsp.graphs.Graph.lmax`.
Nscales : int
Number of scales.
Returns
-------
scales : ndarray
List of scales of length Nscales.
Examples
--------
>>> from pygsp import utils
>>> utils.compute_log_scales(1, 10, 3)
array([2. , 0.4472136, 0.1 ])
|
r"""
Compute logarithm scales for wavelets.
|
[
"r",
"Compute",
"logarithm",
"scales",
"for",
"wavelets",
"."
] |
def compute_log_scales(lmin, lmax, Nscales, t1=1, t2=2):
r"""
Compute logarithm scales for wavelets.
Parameters
----------
lmin : float
Smallest non-zero eigenvalue.
lmax : float
Largest eigenvalue, i.e. :py:attr:`pygsp.graphs.Graph.lmax`.
Nscales : int
Number of scales.
Returns
-------
scales : ndarray
List of scales of length Nscales.
Examples
--------
>>> from pygsp import utils
>>> utils.compute_log_scales(1, 10, 3)
array([2. , 0.4472136, 0.1 ])
"""
scale_min = t1 / lmax
scale_max = t2 / lmin
return np.exp(np.linspace(np.log(scale_max), np.log(scale_min), Nscales))
|
[
"def",
"compute_log_scales",
"(",
"lmin",
",",
"lmax",
",",
"Nscales",
",",
"t1",
"=",
"1",
",",
"t2",
"=",
"2",
")",
":",
"scale_min",
"=",
"t1",
"/",
"lmax",
"scale_max",
"=",
"t2",
"/",
"lmin",
"return",
"np",
".",
"exp",
"(",
"np",
".",
"linspace",
"(",
"np",
".",
"log",
"(",
"scale_max",
")",
",",
"np",
".",
"log",
"(",
"scale_min",
")",
",",
"Nscales",
")",
")"
] |
https://github.com/epfl-lts2/pygsp/blob/a3412ce7696c02c8a55439e89d0c9ab8ae863269/pygsp/utils.py#L317-L344
|
|
CLUEbenchmark/CLUE
|
5bd39732734afecb490cf18a5212e692dbf2c007
|
baselines/models/ernie/modeling.py
|
python
|
BertModel.get_embedding_output
|
(self)
|
return self.embedding_output
|
Gets output of the embedding lookup (i.e., input to the transformer).
Returns:
float Tensor of shape [batch_size, seq_length, hidden_size] corresponding
to the output of the embedding layer, after summing the word
embeddings with the positional embeddings and the token type embeddings,
then performing layer normalization. This is the input to the transformer.
|
Gets output of the embedding lookup (i.e., input to the transformer).
|
[
"Gets",
"output",
"of",
"the",
"embedding",
"lookup",
"(",
"i",
".",
"e",
".",
"input",
"to",
"the",
"transformer",
")",
"."
] |
def get_embedding_output(self):
"""Gets output of the embedding lookup (i.e., input to the transformer).
Returns:
float Tensor of shape [batch_size, seq_length, hidden_size] corresponding
to the output of the embedding layer, after summing the word
embeddings with the positional embeddings and the token type embeddings,
then performing layer normalization. This is the input to the transformer.
"""
return self.embedding_output
|
[
"def",
"get_embedding_output",
"(",
"self",
")",
":",
"return",
"self",
".",
"embedding_output"
] |
https://github.com/CLUEbenchmark/CLUE/blob/5bd39732734afecb490cf18a5212e692dbf2c007/baselines/models/ernie/modeling.py#L249-L258
|
|
CharlesBlonde/libpurecoollink
|
a91362c57a0bc4126279c8c51c407dd713b08e10
|
libpurecoollink/utils.py
|
python
|
unpad
|
(string)
|
return string[:-ord(string[len(string) - 1:])]
|
Un pad string.
|
Un pad string.
|
[
"Un",
"pad",
"string",
"."
] |
def unpad(string):
"""Un pad string."""
return string[:-ord(string[len(string) - 1:])]
|
[
"def",
"unpad",
"(",
"string",
")",
":",
"return",
"string",
"[",
":",
"-",
"ord",
"(",
"string",
"[",
"len",
"(",
"string",
")",
"-",
"1",
":",
"]",
")",
"]"
] |
https://github.com/CharlesBlonde/libpurecoollink/blob/a91362c57a0bc4126279c8c51c407dd713b08e10/libpurecoollink/utils.py#L34-L36
|
|
maximecb/gym-minigrid
|
6116191b15aec9e09e4b48edd16f144e31b412fa
|
gym_minigrid/roomgrid.py
|
python
|
RoomGrid.add_door
|
(self, i, j, door_idx=None, color=None, locked=None)
|
return door, pos
|
Add a door to a room, connecting it to a neighbor
|
Add a door to a room, connecting it to a neighbor
|
[
"Add",
"a",
"door",
"to",
"a",
"room",
"connecting",
"it",
"to",
"a",
"neighbor"
] |
def add_door(self, i, j, door_idx=None, color=None, locked=None):
"""
Add a door to a room, connecting it to a neighbor
"""
room = self.get_room(i, j)
if door_idx == None:
# Need to make sure that there is a neighbor along this wall
# and that there is not already a door
while True:
door_idx = self._rand_int(0, 4)
if room.neighbors[door_idx] and room.doors[door_idx] is None:
break
if color == None:
color = self._rand_color()
if locked is None:
locked = self._rand_bool()
assert room.doors[door_idx] is None, "door already exists"
room.locked = locked
door = Door(color, is_locked=locked)
pos = room.door_pos[door_idx]
self.grid.set(*pos, door)
door.cur_pos = pos
neighbor = room.neighbors[door_idx]
room.doors[door_idx] = door
neighbor.doors[(door_idx+2) % 4] = door
return door, pos
|
[
"def",
"add_door",
"(",
"self",
",",
"i",
",",
"j",
",",
"door_idx",
"=",
"None",
",",
"color",
"=",
"None",
",",
"locked",
"=",
"None",
")",
":",
"room",
"=",
"self",
".",
"get_room",
"(",
"i",
",",
"j",
")",
"if",
"door_idx",
"==",
"None",
":",
"# Need to make sure that there is a neighbor along this wall",
"# and that there is not already a door",
"while",
"True",
":",
"door_idx",
"=",
"self",
".",
"_rand_int",
"(",
"0",
",",
"4",
")",
"if",
"room",
".",
"neighbors",
"[",
"door_idx",
"]",
"and",
"room",
".",
"doors",
"[",
"door_idx",
"]",
"is",
"None",
":",
"break",
"if",
"color",
"==",
"None",
":",
"color",
"=",
"self",
".",
"_rand_color",
"(",
")",
"if",
"locked",
"is",
"None",
":",
"locked",
"=",
"self",
".",
"_rand_bool",
"(",
")",
"assert",
"room",
".",
"doors",
"[",
"door_idx",
"]",
"is",
"None",
",",
"\"door already exists\"",
"room",
".",
"locked",
"=",
"locked",
"door",
"=",
"Door",
"(",
"color",
",",
"is_locked",
"=",
"locked",
")",
"pos",
"=",
"room",
".",
"door_pos",
"[",
"door_idx",
"]",
"self",
".",
"grid",
".",
"set",
"(",
"*",
"pos",
",",
"door",
")",
"door",
".",
"cur_pos",
"=",
"pos",
"neighbor",
"=",
"room",
".",
"neighbors",
"[",
"door_idx",
"]",
"room",
".",
"doors",
"[",
"door_idx",
"]",
"=",
"door",
"neighbor",
".",
"doors",
"[",
"(",
"door_idx",
"+",
"2",
")",
"%",
"4",
"]",
"=",
"door",
"return",
"door",
",",
"pos"
] |
https://github.com/maximecb/gym-minigrid/blob/6116191b15aec9e09e4b48edd16f144e31b412fa/gym_minigrid/roomgrid.py#L214-L248
|
|
mediacloud/backend
|
d36b489e4fbe6e44950916a04d9543a1d6cd5df0
|
apps/common/src/python/mediawords/util/config/common.py
|
python
|
CommonConfig.amazon_s3_downloads
|
()
|
return AmazonS3DownloadsConfig()
|
Amazon S3 raw download storage configuration.
|
Amazon S3 raw download storage configuration.
|
[
"Amazon",
"S3",
"raw",
"download",
"storage",
"configuration",
"."
] |
def amazon_s3_downloads() -> AmazonS3DownloadsConfig:
"""Amazon S3 raw download storage configuration."""
return AmazonS3DownloadsConfig()
|
[
"def",
"amazon_s3_downloads",
"(",
")",
"->",
"AmazonS3DownloadsConfig",
":",
"return",
"AmazonS3DownloadsConfig",
"(",
")"
] |
https://github.com/mediacloud/backend/blob/d36b489e4fbe6e44950916a04d9543a1d6cd5df0/apps/common/src/python/mediawords/util/config/common.py#L484-L486
|
|
pantsbuild/pex
|
473c6ac732ed4bc338b4b20a9ec930d1d722c9b4
|
pex/vendor/_vendored/pip/pip/_vendor/pkg_resources/__init__.py
|
python
|
Requirement.parse
|
(s)
|
return req
|
[] |
def parse(s):
req, = parse_requirements(s)
return req
|
[
"def",
"parse",
"(",
"s",
")",
":",
"req",
",",
"=",
"parse_requirements",
"(",
"s",
")",
"return",
"req"
] |
https://github.com/pantsbuild/pex/blob/473c6ac732ed4bc338b4b20a9ec930d1d722c9b4/pex/vendor/_vendored/pip/pip/_vendor/pkg_resources/__init__.py#L3147-L3149
|
|||
home-assistant/core
|
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
|
homeassistant/components/zha/api.py
|
python
|
websocket_unbind_devices
|
(hass, connection, msg)
|
Remove a direct binding between devices.
|
Remove a direct binding between devices.
|
[
"Remove",
"a",
"direct",
"binding",
"between",
"devices",
"."
] |
async def websocket_unbind_devices(hass, connection, msg):
"""Remove a direct binding between devices."""
zha_gateway = hass.data[DATA_ZHA][DATA_ZHA_GATEWAY]
source_ieee = msg[ATTR_SOURCE_IEEE]
target_ieee = msg[ATTR_TARGET_IEEE]
await async_binding_operation(
zha_gateway, source_ieee, target_ieee, zdo_types.ZDOCmd.Unbind_req
)
_LOGGER.info(
"Devices un-bound: %s: [%s] %s: [%s]",
ATTR_SOURCE_IEEE,
source_ieee,
ATTR_TARGET_IEEE,
target_ieee,
)
|
[
"async",
"def",
"websocket_unbind_devices",
"(",
"hass",
",",
"connection",
",",
"msg",
")",
":",
"zha_gateway",
"=",
"hass",
".",
"data",
"[",
"DATA_ZHA",
"]",
"[",
"DATA_ZHA_GATEWAY",
"]",
"source_ieee",
"=",
"msg",
"[",
"ATTR_SOURCE_IEEE",
"]",
"target_ieee",
"=",
"msg",
"[",
"ATTR_TARGET_IEEE",
"]",
"await",
"async_binding_operation",
"(",
"zha_gateway",
",",
"source_ieee",
",",
"target_ieee",
",",
"zdo_types",
".",
"ZDOCmd",
".",
"Unbind_req",
")",
"_LOGGER",
".",
"info",
"(",
"\"Devices un-bound: %s: [%s] %s: [%s]\"",
",",
"ATTR_SOURCE_IEEE",
",",
"source_ieee",
",",
"ATTR_TARGET_IEEE",
",",
"target_ieee",
",",
")"
] |
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/zha/api.py#L765-L779
|
||
natewong1313/bird-bot
|
0a76dca2157c021c6cd5734928b1ffcf46a2b3b2
|
webhook.py
|
python
|
DiscordEmbed.set_color
|
(self, color)
|
[] |
def set_color(self, color):
self.color = color
|
[
"def",
"set_color",
"(",
"self",
",",
"color",
")",
":",
"self",
".",
"color",
"=",
"color"
] |
https://github.com/natewong1313/bird-bot/blob/0a76dca2157c021c6cd5734928b1ffcf46a2b3b2/webhook.py#L79-L80
|
||||
obspy/obspy
|
0ee5a0d2db293c8d5d4c3b1f148a6c5a85fea55f
|
obspy/signal/invsim.py
|
python
|
paz_to_freq_resp
|
(poles, zeros, scale_fac, t_samp, nfft, freq=False)
|
return h
|
Convert Poles and Zeros (PAZ) to frequency response.
The output contains the frequency zero which is the offset of the trace.
:type poles: list of complex
:param poles: The poles of the transfer function
:type zeros: list of complex
:param zeros: The zeros of the transfer function
:type scale_fac: float
:param scale_fac: Gain factor
:type t_samp: float
:param t_samp: Sampling interval in seconds
:type nfft: int
:param nfft: Number of FFT points of signal which needs correction
:rtype: :class:`numpy.ndarray` complex128
:return: Frequency response of PAZ of length nfft
|
Convert Poles and Zeros (PAZ) to frequency response.
|
[
"Convert",
"Poles",
"and",
"Zeros",
"(",
"PAZ",
")",
"to",
"frequency",
"response",
"."
] |
def paz_to_freq_resp(poles, zeros, scale_fac, t_samp, nfft, freq=False):
"""
Convert Poles and Zeros (PAZ) to frequency response.
The output contains the frequency zero which is the offset of the trace.
:type poles: list of complex
:param poles: The poles of the transfer function
:type zeros: list of complex
:param zeros: The zeros of the transfer function
:type scale_fac: float
:param scale_fac: Gain factor
:type t_samp: float
:param t_samp: Sampling interval in seconds
:type nfft: int
:param nfft: Number of FFT points of signal which needs correction
:rtype: :class:`numpy.ndarray` complex128
:return: Frequency response of PAZ of length nfft
"""
n = nfft // 2
b, a = scipy.signal.ltisys.zpk2tf(zeros, poles, scale_fac)
# a has to be a list for the scipy.signal.freqs() call later but zpk2tf()
# strangely returns it as an integer.
if not isinstance(a, np.ndarray) and a == 1.0:
a = [1.0]
fy = 1 / (t_samp * 2.0)
# start at zero to get zero for offset / DC of fft
f = np.linspace(0, fy, n + 1)
_w, h = scipy.signal.freqs(b, a, f * 2 * np.pi)
if freq:
return h, f
return h
|
[
"def",
"paz_to_freq_resp",
"(",
"poles",
",",
"zeros",
",",
"scale_fac",
",",
"t_samp",
",",
"nfft",
",",
"freq",
"=",
"False",
")",
":",
"n",
"=",
"nfft",
"//",
"2",
"b",
",",
"a",
"=",
"scipy",
".",
"signal",
".",
"ltisys",
".",
"zpk2tf",
"(",
"zeros",
",",
"poles",
",",
"scale_fac",
")",
"# a has to be a list for the scipy.signal.freqs() call later but zpk2tf()",
"# strangely returns it as an integer.",
"if",
"not",
"isinstance",
"(",
"a",
",",
"np",
".",
"ndarray",
")",
"and",
"a",
"==",
"1.0",
":",
"a",
"=",
"[",
"1.0",
"]",
"fy",
"=",
"1",
"/",
"(",
"t_samp",
"*",
"2.0",
")",
"# start at zero to get zero for offset / DC of fft",
"f",
"=",
"np",
".",
"linspace",
"(",
"0",
",",
"fy",
",",
"n",
"+",
"1",
")",
"_w",
",",
"h",
"=",
"scipy",
".",
"signal",
".",
"freqs",
"(",
"b",
",",
"a",
",",
"f",
"*",
"2",
"*",
"np",
".",
"pi",
")",
"if",
"freq",
":",
"return",
"h",
",",
"f",
"return",
"h"
] |
https://github.com/obspy/obspy/blob/0ee5a0d2db293c8d5d4c3b1f148a6c5a85fea55f/obspy/signal/invsim.py#L354-L385
|
|
zhaoolee/StarsAndClown
|
b2d4039cad2f9232b691e5976f787b49a0a2c113
|
node_modules/npmi/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py
|
python
|
MacTool._LoadProvisioningProfile
|
(self, profile_path)
|
Extracts the plist embedded in a provisioning profile.
Args:
profile_path: string, path to the .mobileprovision file
Returns:
Content of the plist embedded in the provisioning profile as a dictionary.
|
Extracts the plist embedded in a provisioning profile.
|
[
"Extracts",
"the",
"plist",
"embedded",
"in",
"a",
"provisioning",
"profile",
"."
] |
def _LoadProvisioningProfile(self, profile_path):
"""Extracts the plist embedded in a provisioning profile.
Args:
profile_path: string, path to the .mobileprovision file
Returns:
Content of the plist embedded in the provisioning profile as a dictionary.
"""
with tempfile.NamedTemporaryFile() as temp:
subprocess.check_call([
'security', 'cms', '-D', '-i', profile_path, '-o', temp.name])
return self._LoadPlistMaybeBinary(temp.name)
|
[
"def",
"_LoadProvisioningProfile",
"(",
"self",
",",
"profile_path",
")",
":",
"with",
"tempfile",
".",
"NamedTemporaryFile",
"(",
")",
"as",
"temp",
":",
"subprocess",
".",
"check_call",
"(",
"[",
"'security'",
",",
"'cms'",
",",
"'-D'",
",",
"'-i'",
",",
"profile_path",
",",
"'-o'",
",",
"temp",
".",
"name",
"]",
")",
"return",
"self",
".",
"_LoadPlistMaybeBinary",
"(",
"temp",
".",
"name",
")"
] |
https://github.com/zhaoolee/StarsAndClown/blob/b2d4039cad2f9232b691e5976f787b49a0a2c113/node_modules/npmi/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/mac_tool.py#L474-L486
|
||
openedx/edx-platform
|
68dd185a0ab45862a2a61e0f803d7e03d2be71b5
|
lms/djangoapps/teams/api.py
|
python
|
discussion_visible_by_user
|
(discussion_id, user)
|
return not is_team_discussion_private(team) or user_is_a_team_member(user, team)
|
This function checks whether the discussion should be visible to the user.
The discussion should not be visible to the user if
* The discussion is part of the Team AND
* The team is configured to hide the discussions from non-teammembers AND
* The user is not part of the team
|
This function checks whether the discussion should be visible to the user.
The discussion should not be visible to the user if
* The discussion is part of the Team AND
* The team is configured to hide the discussions from non-teammembers AND
* The user is not part of the team
|
[
"This",
"function",
"checks",
"whether",
"the",
"discussion",
"should",
"be",
"visible",
"to",
"the",
"user",
".",
"The",
"discussion",
"should",
"not",
"be",
"visible",
"to",
"the",
"user",
"if",
"*",
"The",
"discussion",
"is",
"part",
"of",
"the",
"Team",
"AND",
"*",
"The",
"team",
"is",
"configured",
"to",
"hide",
"the",
"discussions",
"from",
"non",
"-",
"teammembers",
"AND",
"*",
"The",
"user",
"is",
"not",
"part",
"of",
"the",
"team"
] |
def discussion_visible_by_user(discussion_id, user):
"""
This function checks whether the discussion should be visible to the user.
The discussion should not be visible to the user if
* The discussion is part of the Team AND
* The team is configured to hide the discussions from non-teammembers AND
* The user is not part of the team
"""
team = get_team_by_discussion(discussion_id)
return not is_team_discussion_private(team) or user_is_a_team_member(user, team)
|
[
"def",
"discussion_visible_by_user",
"(",
"discussion_id",
",",
"user",
")",
":",
"team",
"=",
"get_team_by_discussion",
"(",
"discussion_id",
")",
"return",
"not",
"is_team_discussion_private",
"(",
"team",
")",
"or",
"user_is_a_team_member",
"(",
"user",
",",
"team",
")"
] |
https://github.com/openedx/edx-platform/blob/68dd185a0ab45862a2a61e0f803d7e03d2be71b5/lms/djangoapps/teams/api.py#L125-L134
|
|
dexy/dexy
|
323c1806e51f75435e11d2265703e68f46c8aef3
|
dexy/filters/templating_plugins.py
|
python
|
Assertions.do_assert_startswith
|
(self, doc, startswith)
|
return self.decorate_response(doc)
|
Assert that the input starts with the specified value.
|
Assert that the input starts with the specified value.
|
[
"Assert",
"that",
"the",
"input",
"starts",
"with",
"the",
"specified",
"value",
"."
] |
def do_assert_startswith(self, doc, startswith):
"""
Assert that the input starts with the specified value.
"""
assert str(doc).startswith(startswith), "input text did not start with '%s'" % startswith
return self.decorate_response(doc)
|
[
"def",
"do_assert_startswith",
"(",
"self",
",",
"doc",
",",
"startswith",
")",
":",
"assert",
"str",
"(",
"doc",
")",
".",
"startswith",
"(",
"startswith",
")",
",",
"\"input text did not start with '%s'\"",
"%",
"startswith",
"return",
"self",
".",
"decorate_response",
"(",
"doc",
")"
] |
https://github.com/dexy/dexy/blob/323c1806e51f75435e11d2265703e68f46c8aef3/dexy/filters/templating_plugins.py#L301-L306
|
|
zhl2008/awd-platform
|
0416b31abea29743387b10b3914581fbe8e7da5e
|
web_flaskbb/lib/python2.7/site-packages/wtforms/i18n.py
|
python
|
DefaultTranslations.gettext
|
(self, string)
|
return self.translations.ugettext(string)
|
[] |
def gettext(self, string):
return self.translations.ugettext(string)
|
[
"def",
"gettext",
"(",
"self",
",",
"string",
")",
":",
"return",
"self",
".",
"translations",
".",
"ugettext",
"(",
"string",
")"
] |
https://github.com/zhl2008/awd-platform/blob/0416b31abea29743387b10b3914581fbe8e7da5e/web_flaskbb/lib/python2.7/site-packages/wtforms/i18n.py#L54-L55
|
|||
cloudera/hue
|
23f02102d4547c17c32bd5ea0eb24e9eadd657a4
|
apps/impala/gen-py/TCLIService/TCLIService.py
|
python
|
Iface.CloseSession
|
(self, req)
|
Parameters:
- req
|
Parameters:
- req
|
[
"Parameters",
":",
"-",
"req"
] |
def CloseSession(self, req):
"""
Parameters:
- req
"""
pass
|
[
"def",
"CloseSession",
"(",
"self",
",",
"req",
")",
":",
"pass"
] |
https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/apps/impala/gen-py/TCLIService/TCLIService.py#L30-L36
|
||
onnx/keras-onnx
|
3b6da290c21bbbbf418577f3e2c528986a2965c5
|
keras2onnx/parser.py
|
python
|
parse_graph
|
(topo, graph, target_opset, output_names, keras_node_dict)
|
return _parse_graph_core_v2(
graph, keras_node_dict, topo, top_level, output_names
) if is_tf2 and is_tf_keras else _parse_graph_core(
graph, keras_node_dict, topo, top_level, output_names)
|
Build the node-layer mapper and parse the whole TF graph of Keras Model.
|
Build the node-layer mapper and parse the whole TF graph of Keras Model.
|
[
"Build",
"the",
"node",
"-",
"layer",
"mapper",
"and",
"parse",
"the",
"whole",
"TF",
"graph",
"of",
"Keras",
"Model",
"."
] |
def parse_graph(topo, graph, target_opset, output_names, keras_node_dict):
# type: (Topology, tf.Graph, int, [], []) -> Topology
"""
Build the node-layer mapper and parse the whole TF graph of Keras Model.
"""
top_level = topo.declare_scope('__root')
dim_variable_counter = 0
# Create the onnx model input name before parsing to keep ...
# ... the model input names are identical to the original Keras model.
for idx_ in range(len(topo.raw_model.model.inputs)):
op = top_level.declare_local_operator(TYPES.Identity)
idx_key = idx_
if isinstance(topo.raw_model.model.inputs, dict):
idx_key = list(topo.raw_model.model.inputs.keys())[idx_]
input_ts = topo.raw_model.model.inputs[idx_key]
var_type = _adjust_input_batch_size(infer_variable_type(input_ts, target_opset))
dim_variable_counter = _adjust_input_output_size(var_type, dim_variable_counter)
str_value = input_ts.name
var0 = None
if hasattr(topo.raw_model.model, 'input_names'):
str_value = topo.raw_model.model.input_names[idx_]
elif input_ts.name.endswith(':0'):
str_value = input_ts.name[:-2]
else:
# if there is no difference between input tensor name and model input name,
# skip it.
var0 = top_level.get_local_variable_or_declare_one(str_value, var_type)
if not var0:
var0 = top_level.get_local_variable_or_declare_one(str_value, var_type)
var1 = top_level.get_local_variable_or_declare_one(input_ts.name, var_type)
op.add_input(var0)
op.add_output(var1)
topo.raw_model.add_input_name(str_value)
output_name_dict = {}
output_tensors = topo.raw_model.model.outputs
if output_names:
output_tensors = [graph.get_tensor_by_name(n_) for n_ in output_names]
for idx_, ts_ in enumerate(output_tensors):
op = top_level.declare_local_operator(TYPES.Identity)
var_type = _adjust_input_batch_size(infer_variable_type(ts_, target_opset))
dim_variable_counter = _adjust_input_output_size(var_type, dim_variable_counter)
str_value = ts_.name
use_ts_name = False
if hasattr(topo.raw_model.model, 'output_names'):
str_value = topo.raw_model.model.output_names[idx_]
elif ts_.name.endswith(':0'):
str_value = tsname_to_node(ts_.name)
else:
# if there is no difference between output tensor name and model output name
# skip it.
use_ts_name = True
if str_value in output_name_dict:
cur_count = output_name_dict[str_value]
output_name_dict[str_value] = cur_count + 1
str_value = str_value + ':' + str(cur_count)
else:
output_name_dict[str_value] = 1
if not use_ts_name:
var0 = top_level.get_local_variable_or_declare_one(str_value, var_type)
var1 = top_level.get_local_variable_or_declare_one(ts_.name, var_type)
op.add_input(var1)
op.add_output(var0)
topo.raw_model.add_output_name(str_value)
return _parse_graph_core_v2(
graph, keras_node_dict, topo, top_level, output_names
) if is_tf2 and is_tf_keras else _parse_graph_core(
graph, keras_node_dict, topo, top_level, output_names)
|
[
"def",
"parse_graph",
"(",
"topo",
",",
"graph",
",",
"target_opset",
",",
"output_names",
",",
"keras_node_dict",
")",
":",
"# type: (Topology, tf.Graph, int, [], []) -> Topology",
"top_level",
"=",
"topo",
".",
"declare_scope",
"(",
"'__root'",
")",
"dim_variable_counter",
"=",
"0",
"# Create the onnx model input name before parsing to keep ...",
"# ... the model input names are identical to the original Keras model.",
"for",
"idx_",
"in",
"range",
"(",
"len",
"(",
"topo",
".",
"raw_model",
".",
"model",
".",
"inputs",
")",
")",
":",
"op",
"=",
"top_level",
".",
"declare_local_operator",
"(",
"TYPES",
".",
"Identity",
")",
"idx_key",
"=",
"idx_",
"if",
"isinstance",
"(",
"topo",
".",
"raw_model",
".",
"model",
".",
"inputs",
",",
"dict",
")",
":",
"idx_key",
"=",
"list",
"(",
"topo",
".",
"raw_model",
".",
"model",
".",
"inputs",
".",
"keys",
"(",
")",
")",
"[",
"idx_",
"]",
"input_ts",
"=",
"topo",
".",
"raw_model",
".",
"model",
".",
"inputs",
"[",
"idx_key",
"]",
"var_type",
"=",
"_adjust_input_batch_size",
"(",
"infer_variable_type",
"(",
"input_ts",
",",
"target_opset",
")",
")",
"dim_variable_counter",
"=",
"_adjust_input_output_size",
"(",
"var_type",
",",
"dim_variable_counter",
")",
"str_value",
"=",
"input_ts",
".",
"name",
"var0",
"=",
"None",
"if",
"hasattr",
"(",
"topo",
".",
"raw_model",
".",
"model",
",",
"'input_names'",
")",
":",
"str_value",
"=",
"topo",
".",
"raw_model",
".",
"model",
".",
"input_names",
"[",
"idx_",
"]",
"elif",
"input_ts",
".",
"name",
".",
"endswith",
"(",
"':0'",
")",
":",
"str_value",
"=",
"input_ts",
".",
"name",
"[",
":",
"-",
"2",
"]",
"else",
":",
"# if there is no difference between input tensor name and model input name,",
"# skip it.",
"var0",
"=",
"top_level",
".",
"get_local_variable_or_declare_one",
"(",
"str_value",
",",
"var_type",
")",
"if",
"not",
"var0",
":",
"var0",
"=",
"top_level",
".",
"get_local_variable_or_declare_one",
"(",
"str_value",
",",
"var_type",
")",
"var1",
"=",
"top_level",
".",
"get_local_variable_or_declare_one",
"(",
"input_ts",
".",
"name",
",",
"var_type",
")",
"op",
".",
"add_input",
"(",
"var0",
")",
"op",
".",
"add_output",
"(",
"var1",
")",
"topo",
".",
"raw_model",
".",
"add_input_name",
"(",
"str_value",
")",
"output_name_dict",
"=",
"{",
"}",
"output_tensors",
"=",
"topo",
".",
"raw_model",
".",
"model",
".",
"outputs",
"if",
"output_names",
":",
"output_tensors",
"=",
"[",
"graph",
".",
"get_tensor_by_name",
"(",
"n_",
")",
"for",
"n_",
"in",
"output_names",
"]",
"for",
"idx_",
",",
"ts_",
"in",
"enumerate",
"(",
"output_tensors",
")",
":",
"op",
"=",
"top_level",
".",
"declare_local_operator",
"(",
"TYPES",
".",
"Identity",
")",
"var_type",
"=",
"_adjust_input_batch_size",
"(",
"infer_variable_type",
"(",
"ts_",
",",
"target_opset",
")",
")",
"dim_variable_counter",
"=",
"_adjust_input_output_size",
"(",
"var_type",
",",
"dim_variable_counter",
")",
"str_value",
"=",
"ts_",
".",
"name",
"use_ts_name",
"=",
"False",
"if",
"hasattr",
"(",
"topo",
".",
"raw_model",
".",
"model",
",",
"'output_names'",
")",
":",
"str_value",
"=",
"topo",
".",
"raw_model",
".",
"model",
".",
"output_names",
"[",
"idx_",
"]",
"elif",
"ts_",
".",
"name",
".",
"endswith",
"(",
"':0'",
")",
":",
"str_value",
"=",
"tsname_to_node",
"(",
"ts_",
".",
"name",
")",
"else",
":",
"# if there is no difference between output tensor name and model output name",
"# skip it.",
"use_ts_name",
"=",
"True",
"if",
"str_value",
"in",
"output_name_dict",
":",
"cur_count",
"=",
"output_name_dict",
"[",
"str_value",
"]",
"output_name_dict",
"[",
"str_value",
"]",
"=",
"cur_count",
"+",
"1",
"str_value",
"=",
"str_value",
"+",
"':'",
"+",
"str",
"(",
"cur_count",
")",
"else",
":",
"output_name_dict",
"[",
"str_value",
"]",
"=",
"1",
"if",
"not",
"use_ts_name",
":",
"var0",
"=",
"top_level",
".",
"get_local_variable_or_declare_one",
"(",
"str_value",
",",
"var_type",
")",
"var1",
"=",
"top_level",
".",
"get_local_variable_or_declare_one",
"(",
"ts_",
".",
"name",
",",
"var_type",
")",
"op",
".",
"add_input",
"(",
"var1",
")",
"op",
".",
"add_output",
"(",
"var0",
")",
"topo",
".",
"raw_model",
".",
"add_output_name",
"(",
"str_value",
")",
"return",
"_parse_graph_core_v2",
"(",
"graph",
",",
"keras_node_dict",
",",
"topo",
",",
"top_level",
",",
"output_names",
")",
"if",
"is_tf2",
"and",
"is_tf_keras",
"else",
"_parse_graph_core",
"(",
"graph",
",",
"keras_node_dict",
",",
"topo",
",",
"top_level",
",",
"output_names",
")"
] |
https://github.com/onnx/keras-onnx/blob/3b6da290c21bbbbf418577f3e2c528986a2965c5/keras2onnx/parser.py#L836-L908
|
|
fabioz/PyDev.Debugger
|
0f8c02a010fe5690405da1dd30ed72326191ce63
|
pydevd.py
|
python
|
send_json_message
|
(msg)
|
return True
|
API to send some custom json message.
:param dict|pydevd_schema.BaseSchema msg:
The custom message to be sent.
:return bool:
True if the message was added to the queue to be sent and False otherwise.
|
API to send some custom json message.
|
[
"API",
"to",
"send",
"some",
"custom",
"json",
"message",
"."
] |
def send_json_message(msg):
'''
API to send some custom json message.
:param dict|pydevd_schema.BaseSchema msg:
The custom message to be sent.
:return bool:
True if the message was added to the queue to be sent and False otherwise.
'''
py_db = get_global_debugger()
if py_db is None:
return False
writer = py_db.writer
if writer is None:
return False
cmd = NetCommand(-1, 0, msg, is_json=True)
writer.add_command(cmd)
return True
|
[
"def",
"send_json_message",
"(",
"msg",
")",
":",
"py_db",
"=",
"get_global_debugger",
"(",
")",
"if",
"py_db",
"is",
"None",
":",
"return",
"False",
"writer",
"=",
"py_db",
".",
"writer",
"if",
"writer",
"is",
"None",
":",
"return",
"False",
"cmd",
"=",
"NetCommand",
"(",
"-",
"1",
",",
"0",
",",
"msg",
",",
"is_json",
"=",
"True",
")",
"writer",
".",
"add_command",
"(",
"cmd",
")",
"return",
"True"
] |
https://github.com/fabioz/PyDev.Debugger/blob/0f8c02a010fe5690405da1dd30ed72326191ce63/pydevd.py#L2539-L2559
|
|
WerWolv/EdiZon_CheatsConfigsAndScripts
|
d16d36c7509c01dca770f402babd83ff2e9ae6e7
|
Scripts/lib/python3.5/_pydecimal.py
|
python
|
Context.canonical
|
(self, a)
|
return a.canonical()
|
Returns the same Decimal object.
As we do not have different encodings for the same number, the
received object already is in its canonical form.
>>> ExtendedContext.canonical(Decimal('2.50'))
Decimal('2.50')
|
Returns the same Decimal object.
|
[
"Returns",
"the",
"same",
"Decimal",
"object",
"."
] |
def canonical(self, a):
"""Returns the same Decimal object.
As we do not have different encodings for the same number, the
received object already is in its canonical form.
>>> ExtendedContext.canonical(Decimal('2.50'))
Decimal('2.50')
"""
if not isinstance(a, Decimal):
raise TypeError("canonical requires a Decimal as an argument.")
return a.canonical()
|
[
"def",
"canonical",
"(",
"self",
",",
"a",
")",
":",
"if",
"not",
"isinstance",
"(",
"a",
",",
"Decimal",
")",
":",
"raise",
"TypeError",
"(",
"\"canonical requires a Decimal as an argument.\"",
")",
"return",
"a",
".",
"canonical",
"(",
")"
] |
https://github.com/WerWolv/EdiZon_CheatsConfigsAndScripts/blob/d16d36c7509c01dca770f402babd83ff2e9ae6e7/Scripts/lib/python3.5/_pydecimal.py#L4161-L4172
|
|
evhub/coconut
|
27a4af9dc06667870f736f20c862930001b8cbb2
|
coconut/compiler/header.py
|
python
|
one_num_ver
|
(target)
|
return target[:1]
|
Return the first number of the target version, if it has one.
|
Return the first number of the target version, if it has one.
|
[
"Return",
"the",
"first",
"number",
"of",
"the",
"target",
"version",
"if",
"it",
"has",
"one",
"."
] |
def one_num_ver(target):
"""Return the first number of the target version, if it has one."""
return target[:1]
|
[
"def",
"one_num_ver",
"(",
"target",
")",
":",
"return",
"target",
"[",
":",
"1",
"]"
] |
https://github.com/evhub/coconut/blob/27a4af9dc06667870f736f20c862930001b8cbb2/coconut/compiler/header.py#L93-L95
|
|
RenYurui/StructureFlow
|
1ac8f559475452e6b674699671c6b34f000d9ebd
|
src/structure_flow.py
|
python
|
StructureFlow.write_loss
|
(self, logs, train_writer)
|
[] |
def write_loss(self, logs, train_writer):
iteration = [x[1] for x in logs if x[0]=='iter']
for x in logs:
if x[0].startswith('l_'):
train_writer.add_scalar(x[0], x[1], iteration[-1])
|
[
"def",
"write_loss",
"(",
"self",
",",
"logs",
",",
"train_writer",
")",
":",
"iteration",
"=",
"[",
"x",
"[",
"1",
"]",
"for",
"x",
"in",
"logs",
"if",
"x",
"[",
"0",
"]",
"==",
"'iter'",
"]",
"for",
"x",
"in",
"logs",
":",
"if",
"x",
"[",
"0",
"]",
".",
"startswith",
"(",
"'l_'",
")",
":",
"train_writer",
".",
"add_scalar",
"(",
"x",
"[",
"0",
"]",
",",
"x",
"[",
"1",
"]",
",",
"iteration",
"[",
"-",
"1",
"]",
")"
] |
https://github.com/RenYurui/StructureFlow/blob/1ac8f559475452e6b674699671c6b34f000d9ebd/src/structure_flow.py#L255-L259
|
||||
zigpy/zigpy
|
db10b078874d93ad1c546ec810706c2e5dc33d7f
|
zigpy/util.py
|
python
|
ListenableMixin.add_listener
|
(self, listener)
|
return self._add_listener(listener, include_context=False)
|
[] |
def add_listener(self, listener):
return self._add_listener(listener, include_context=False)
|
[
"def",
"add_listener",
"(",
"self",
",",
"listener",
")",
":",
"return",
"self",
".",
"_add_listener",
"(",
"listener",
",",
"include_context",
"=",
"False",
")"
] |
https://github.com/zigpy/zigpy/blob/db10b078874d93ad1c546ec810706c2e5dc33d7f/zigpy/util.py#L29-L30
|
|||
aws-samples/ecs-blue-green-deployment
|
f319ca8a1e5c90ad48beaa67c4f6ea6fa51f2efb
|
scripts/deployer.py
|
python
|
handler
|
()
|
Main handler as an entry point of code. Handler controls the sequence of methods to call.No inputs required.
As this runs in AWS CodeBuild, the script gets all the values from the environment variables in codebuild.
1. Retrieve artifact (build.json) from the previous stage (CodeBuild phase, which builds application container images)
2. Check if the load balancer exists. Name of the ELB is fed through environment variable by the pipeline.
3. Get tag key value of the target group, running on port 8080 and 80 with KeyName as "Identifier"
4. Get Sha of the image id running on target group at port 8080 and 80
5. Edit the build.json retrieved from step-1 and append the values retrieved in step3 and step4
6. Save the modified build.json. This file is the output from codebuild project and fed as an input to the CloudFormation
execution stage.
Args: None
Raises:
Exception: Any exception thrown by handler
|
Main handler as an entry point of code. Handler controls the sequence of methods to call.No inputs required.
As this runs in AWS CodeBuild, the script gets all the values from the environment variables in codebuild.
1. Retrieve artifact (build.json) from the previous stage (CodeBuild phase, which builds application container images)
2. Check if the load balancer exists. Name of the ELB is fed through environment variable by the pipeline.
3. Get tag key value of the target group, running on port 8080 and 80 with KeyName as "Identifier"
4. Get Sha of the image id running on target group at port 8080 and 80
5. Edit the build.json retrieved from step-1 and append the values retrieved in step3 and step4
6. Save the modified build.json. This file is the output from codebuild project and fed as an input to the CloudFormation
execution stage.
|
[
"Main",
"handler",
"as",
"an",
"entry",
"point",
"of",
"code",
".",
"Handler",
"controls",
"the",
"sequence",
"of",
"methods",
"to",
"call",
".",
"No",
"inputs",
"required",
".",
"As",
"this",
"runs",
"in",
"AWS",
"CodeBuild",
"the",
"script",
"gets",
"all",
"the",
"values",
"from",
"the",
"environment",
"variables",
"in",
"codebuild",
".",
"1",
".",
"Retrieve",
"artifact",
"(",
"build",
".",
"json",
")",
"from",
"the",
"previous",
"stage",
"(",
"CodeBuild",
"phase",
"which",
"builds",
"application",
"container",
"images",
")",
"2",
".",
"Check",
"if",
"the",
"load",
"balancer",
"exists",
".",
"Name",
"of",
"the",
"ELB",
"is",
"fed",
"through",
"environment",
"variable",
"by",
"the",
"pipeline",
".",
"3",
".",
"Get",
"tag",
"key",
"value",
"of",
"the",
"target",
"group",
"running",
"on",
"port",
"8080",
"and",
"80",
"with",
"KeyName",
"as",
"Identifier",
"4",
".",
"Get",
"Sha",
"of",
"the",
"image",
"id",
"running",
"on",
"target",
"group",
"at",
"port",
"8080",
"and",
"80",
"5",
".",
"Edit",
"the",
"build",
".",
"json",
"retrieved",
"from",
"step",
"-",
"1",
"and",
"append",
"the",
"values",
"retrieved",
"in",
"step3",
"and",
"step4",
"6",
".",
"Save",
"the",
"modified",
"build",
".",
"json",
".",
"This",
"file",
"is",
"the",
"output",
"from",
"codebuild",
"project",
"and",
"fed",
"as",
"an",
"input",
"to",
"the",
"CloudFormation",
"execution",
"stage",
"."
] |
def handler():
""" Main handler as an entry point of code. Handler controls the sequence of methods to call.No inputs required.
As this runs in AWS CodeBuild, the script gets all the values from the environment variables in codebuild.
1. Retrieve artifact (build.json) from the previous stage (CodeBuild phase, which builds application container images)
2. Check if the load balancer exists. Name of the ELB is fed through environment variable by the pipeline.
3. Get tag key value of the target group, running on port 8080 and 80 with KeyName as "Identifier"
4. Get Sha of the image id running on target group at port 8080 and 80
5. Edit the build.json retrieved from step-1 and append the values retrieved in step3 and step4
6. Save the modified build.json. This file is the output from codebuild project and fed as an input to the CloudFormation
execution stage.
Args: None
Raises:
Exception: Any exception thrown by handler
"""
print(elb_name)
build_id = get_build_artifact_id(get_build_execution_id())
if check_elb_exists():
beta_identifier, beta_sha, live_identifier, live_sha = find_beta_targetgroup()
cf_inputs = { beta_identifier:str(build_id),live_identifier:live_sha }
else:
cf_inputs = {"Code1": str(build_id), "Code2": str(build_id)}
with open('cf_inputs.json', 'w+') as outfile:
json.dump(cf_inputs, outfile)
|
[
"def",
"handler",
"(",
")",
":",
"print",
"(",
"elb_name",
")",
"build_id",
"=",
"get_build_artifact_id",
"(",
"get_build_execution_id",
"(",
")",
")",
"if",
"check_elb_exists",
"(",
")",
":",
"beta_identifier",
",",
"beta_sha",
",",
"live_identifier",
",",
"live_sha",
"=",
"find_beta_targetgroup",
"(",
")",
"cf_inputs",
"=",
"{",
"beta_identifier",
":",
"str",
"(",
"build_id",
")",
",",
"live_identifier",
":",
"live_sha",
"}",
"else",
":",
"cf_inputs",
"=",
"{",
"\"Code1\"",
":",
"str",
"(",
"build_id",
")",
",",
"\"Code2\"",
":",
"str",
"(",
"build_id",
")",
"}",
"with",
"open",
"(",
"'cf_inputs.json'",
",",
"'w+'",
")",
"as",
"outfile",
":",
"json",
".",
"dump",
"(",
"cf_inputs",
",",
"outfile",
")"
] |
https://github.com/aws-samples/ecs-blue-green-deployment/blob/f319ca8a1e5c90ad48beaa67c4f6ea6fa51f2efb/scripts/deployer.py#L19-L44
|
||
grow/grow
|
97fc21730b6a674d5d33948d94968e79447ce433
|
grow/conversion/content_locale_split.py
|
python
|
PlainTextYamlLoader.construct_plaintext
|
(self, node)
|
return PlainText(node.tag, node.value)
|
[] |
def construct_plaintext(self, node):
return PlainText(node.tag, node.value)
|
[
"def",
"construct_plaintext",
"(",
"self",
",",
"node",
")",
":",
"return",
"PlainText",
"(",
"node",
".",
"tag",
",",
"node",
".",
"value",
")"
] |
https://github.com/grow/grow/blob/97fc21730b6a674d5d33948d94968e79447ce433/grow/conversion/content_locale_split.py#L72-L73
|
|||
rowliny/DiffHelper
|
ab3a96f58f9579d0023aed9ebd785f4edf26f8af
|
Tool/SitePackages/nltk/classify/api.py
|
python
|
ClassifierI.classify
|
(self, featureset)
|
:return: the most appropriate label for the given featureset.
:rtype: label
|
:return: the most appropriate label for the given featureset.
:rtype: label
|
[
":",
"return",
":",
"the",
"most",
"appropriate",
"label",
"for",
"the",
"given",
"featureset",
".",
":",
"rtype",
":",
"label"
] |
def classify(self, featureset):
"""
:return: the most appropriate label for the given featureset.
:rtype: label
"""
if overridden(self.classify_many):
return self.classify_many([featureset])[0]
else:
raise NotImplementedError()
|
[
"def",
"classify",
"(",
"self",
",",
"featureset",
")",
":",
"if",
"overridden",
"(",
"self",
".",
"classify_many",
")",
":",
"return",
"self",
".",
"classify_many",
"(",
"[",
"featureset",
"]",
")",
"[",
"0",
"]",
"else",
":",
"raise",
"NotImplementedError",
"(",
")"
] |
https://github.com/rowliny/DiffHelper/blob/ab3a96f58f9579d0023aed9ebd785f4edf26f8af/Tool/SitePackages/nltk/classify/api.py#L50-L58
|
||
demisto/content
|
5c664a65b992ac8ca90ac3f11b1b2cdf11ee9b07
|
Packs/MicrosoftGraphGroups/Integrations/MicrosoftGraphGroups/MicrosoftGraphGroups.py
|
python
|
MsGraphClient.test_function
|
(self)
|
Performs basic GET request to check if the API is reachable and authentication is successful.
Returns:
ok if successful.
|
Performs basic GET request to check if the API is reachable and authentication is successful.
|
[
"Performs",
"basic",
"GET",
"request",
"to",
"check",
"if",
"the",
"API",
"is",
"reachable",
"and",
"authentication",
"is",
"successful",
"."
] |
def test_function(self):
"""Performs basic GET request to check if the API is reachable and authentication is successful.
Returns:
ok if successful.
"""
self.ms_client.http_request(method='GET', url_suffix='groups', params={'$orderby': 'displayName'})
demisto.results('ok')
|
[
"def",
"test_function",
"(",
"self",
")",
":",
"self",
".",
"ms_client",
".",
"http_request",
"(",
"method",
"=",
"'GET'",
",",
"url_suffix",
"=",
"'groups'",
",",
"params",
"=",
"{",
"'$orderby'",
":",
"'displayName'",
"}",
")",
"demisto",
".",
"results",
"(",
"'ok'",
")"
] |
https://github.com/demisto/content/blob/5c664a65b992ac8ca90ac3f11b1b2cdf11ee9b07/Packs/MicrosoftGraphGroups/Integrations/MicrosoftGraphGroups/MicrosoftGraphGroups.py#L73-L80
|
||
ray-project/ray
|
703c1610348615dcb8c2d141a0c46675084660f5
|
rllib/examples/documentation/rllib_on_ray_readme.py
|
python
|
SimpleCorridor.reset
|
(self)
|
return [self.cur_pos]
|
Resets the episode and returns the initial observation of the new one.
|
Resets the episode and returns the initial observation of the new one.
|
[
"Resets",
"the",
"episode",
"and",
"returns",
"the",
"initial",
"observation",
"of",
"the",
"new",
"one",
"."
] |
def reset(self):
"""Resets the episode and returns the initial observation of the new one.
"""
self.cur_pos = 0
# Return initial observation.
return [self.cur_pos]
|
[
"def",
"reset",
"(",
"self",
")",
":",
"self",
".",
"cur_pos",
"=",
"0",
"# Return initial observation.",
"return",
"[",
"self",
".",
"cur_pos",
"]"
] |
https://github.com/ray-project/ray/blob/703c1610348615dcb8c2d141a0c46675084660f5/rllib/examples/documentation/rllib_on_ray_readme.py#L26-L31
|
|
jrzaurin/pytorch-widedeep
|
8b4c3a8acbf06b385c821d7111b1139a16b4f480
|
pytorch_widedeep/utils/fastai_transforms.py
|
python
|
Vocab.load
|
(cls, path)
|
return cls(itos)
|
Load an intance of :obj:`Vocab` contained in ``path``
|
Load an intance of :obj:`Vocab` contained in ``path``
|
[
"Load",
"an",
"intance",
"of",
":",
"obj",
":",
"Vocab",
"contained",
"in",
"path"
] |
def load(cls, path):
"""Load an intance of :obj:`Vocab` contained in ``path``"""
itos = pickle.load(open(path, "rb"))
return cls(itos)
|
[
"def",
"load",
"(",
"cls",
",",
"path",
")",
":",
"itos",
"=",
"pickle",
".",
"load",
"(",
"open",
"(",
"path",
",",
"\"rb\"",
")",
")",
"return",
"cls",
"(",
"itos",
")"
] |
https://github.com/jrzaurin/pytorch-widedeep/blob/8b4c3a8acbf06b385c821d7111b1139a16b4f480/pytorch_widedeep/utils/fastai_transforms.py#L400-L403
|
|
criteo/biggraphite
|
1f647ada6b3f2b2f3fb4e59d326f73a2c891fc30
|
biggraphite/metadata_cache.py
|
python
|
MemoryCache.clean
|
(self)
|
Automatically cleaned by cachetools.
|
Automatically cleaned by cachetools.
|
[
"Automatically",
"cleaned",
"by",
"cachetools",
"."
] |
def clean(self):
"""Automatically cleaned by cachetools."""
with self._lock:
self.__cache.expire()
|
[
"def",
"clean",
"(",
"self",
")",
":",
"with",
"self",
".",
"_lock",
":",
"self",
".",
"__cache",
".",
"expire",
"(",
")"
] |
https://github.com/criteo/biggraphite/blob/1f647ada6b3f2b2f3fb4e59d326f73a2c891fc30/biggraphite/metadata_cache.py#L303-L306
|
||
robinhood/faust
|
01b4c0ad8390221db71751d80001b0fd879291e2
|
faust/topics.py
|
python
|
Topic.declare
|
(self)
|
Declare/create this topic on the server.
|
Declare/create this topic on the server.
|
[
"Declare",
"/",
"create",
"this",
"topic",
"on",
"the",
"server",
"."
] |
async def declare(self) -> None:
"""Declare/create this topic on the server."""
partitions = self.partitions
if partitions is None:
partitions = self.app.conf.topic_partitions
replicas: int
if self.replicas is None:
replicas = self.app.conf.topic_replication_factor
else:
replicas = self.replicas
if self.app.conf.topic_allow_declare:
producer = await self._get_producer()
for topic in self.topics:
await producer.create_topic(
topic=topic,
partitions=partitions,
replication=replicas or 0,
config=self.config,
compacting=self.compacting,
deleting=self.deleting,
retention=self.retention,
)
|
[
"async",
"def",
"declare",
"(",
"self",
")",
"->",
"None",
":",
"partitions",
"=",
"self",
".",
"partitions",
"if",
"partitions",
"is",
"None",
":",
"partitions",
"=",
"self",
".",
"app",
".",
"conf",
".",
"topic_partitions",
"replicas",
":",
"int",
"if",
"self",
".",
"replicas",
"is",
"None",
":",
"replicas",
"=",
"self",
".",
"app",
".",
"conf",
".",
"topic_replication_factor",
"else",
":",
"replicas",
"=",
"self",
".",
"replicas",
"if",
"self",
".",
"app",
".",
"conf",
".",
"topic_allow_declare",
":",
"producer",
"=",
"await",
"self",
".",
"_get_producer",
"(",
")",
"for",
"topic",
"in",
"self",
".",
"topics",
":",
"await",
"producer",
".",
"create_topic",
"(",
"topic",
"=",
"topic",
",",
"partitions",
"=",
"partitions",
",",
"replication",
"=",
"replicas",
"or",
"0",
",",
"config",
"=",
"self",
".",
"config",
",",
"compacting",
"=",
"self",
".",
"compacting",
",",
"deleting",
"=",
"self",
".",
"deleting",
",",
"retention",
"=",
"self",
".",
"retention",
",",
")"
] |
https://github.com/robinhood/faust/blob/01b4c0ad8390221db71751d80001b0fd879291e2/faust/topics.py#L457-L478
|
||
aws-samples/aws-kube-codesuite
|
ab4e5ce45416b83bffb947ab8d234df5437f4fca
|
src/kubernetes/client/models/v1_scale.py
|
python
|
V1Scale.spec
|
(self, spec)
|
Sets the spec of this V1Scale.
defines the behavior of the scale. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status.
:param spec: The spec of this V1Scale.
:type: V1ScaleSpec
|
Sets the spec of this V1Scale.
defines the behavior of the scale. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status.
|
[
"Sets",
"the",
"spec",
"of",
"this",
"V1Scale",
".",
"defines",
"the",
"behavior",
"of",
"the",
"scale",
".",
"More",
"info",
":",
"https",
":",
"//",
"git",
".",
"k8s",
".",
"io",
"/",
"community",
"/",
"contributors",
"/",
"devel",
"/",
"api",
"-",
"conventions",
".",
"md#spec",
"-",
"and",
"-",
"status",
"."
] |
def spec(self, spec):
"""
Sets the spec of this V1Scale.
defines the behavior of the scale. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#spec-and-status.
:param spec: The spec of this V1Scale.
:type: V1ScaleSpec
"""
self._spec = spec
|
[
"def",
"spec",
"(",
"self",
",",
"spec",
")",
":",
"self",
".",
"_spec",
"=",
"spec"
] |
https://github.com/aws-samples/aws-kube-codesuite/blob/ab4e5ce45416b83bffb947ab8d234df5437f4fca/src/kubernetes/client/models/v1_scale.py#L136-L145
|
||
makerbot/ReplicatorG
|
d6f2b07785a5a5f1e172fb87cb4303b17c575d5d
|
skein_engines/skeinforge-50/fabmetheus_utilities/geometry/solids/group.py
|
python
|
Group.addXMLInnerSection
|
(self, depth, output)
|
Add xml inner section for this object.
|
Add xml inner section for this object.
|
[
"Add",
"xml",
"inner",
"section",
"for",
"this",
"object",
"."
] |
def addXMLInnerSection(self, depth, output):
"Add xml inner section for this object."
if self.matrix4X4 != None:
self.matrix4X4.addXML(depth, output)
self.addXMLSection(depth, output)
|
[
"def",
"addXMLInnerSection",
"(",
"self",
",",
"depth",
",",
"output",
")",
":",
"if",
"self",
".",
"matrix4X4",
"!=",
"None",
":",
"self",
".",
"matrix4X4",
".",
"addXML",
"(",
"depth",
",",
"output",
")",
"self",
".",
"addXMLSection",
"(",
"depth",
",",
"output",
")"
] |
https://github.com/makerbot/ReplicatorG/blob/d6f2b07785a5a5f1e172fb87cb4303b17c575d5d/skein_engines/skeinforge-50/fabmetheus_utilities/geometry/solids/group.py#L48-L52
|
||
CodeReclaimers/neat-python
|
c2b79c88667a1798bfe33c00dd8e251ef8be41fa
|
neat/statistics.py
|
python
|
StatisticsReporter.best_genome
|
(self)
|
return self.best_genomes(1)[0]
|
Returns the most fit genome ever seen.
|
Returns the most fit genome ever seen.
|
[
"Returns",
"the",
"most",
"fit",
"genome",
"ever",
"seen",
"."
] |
def best_genome(self):
"""Returns the most fit genome ever seen."""
return self.best_genomes(1)[0]
|
[
"def",
"best_genome",
"(",
"self",
")",
":",
"return",
"self",
".",
"best_genomes",
"(",
"1",
")",
"[",
"0",
"]"
] |
https://github.com/CodeReclaimers/neat-python/blob/c2b79c88667a1798bfe33c00dd8e251ef8be41fa/neat/statistics.py#L75-L77
|
|
PIQuIL/QuCumber
|
25a8cbfaf6b8d009a6f9877770760b525c3f91a8
|
qucumber/rbm/binary_rbm.py
|
python
|
BinaryRBM.sample_v_given_h
|
(self, h, out=None)
|
return v
|
Sample/generate a visible state given a hidden state.
:param h: The hidden state.
:type h: torch.Tensor
:param out: The output tensor to write to.
:type out: torch.Tensor
:returns: The sampled visible state.
:rtype: torch.Tensor
|
Sample/generate a visible state given a hidden state.
|
[
"Sample",
"/",
"generate",
"a",
"visible",
"state",
"given",
"a",
"hidden",
"state",
"."
] |
def sample_v_given_h(self, h, out=None):
"""Sample/generate a visible state given a hidden state.
:param h: The hidden state.
:type h: torch.Tensor
:param out: The output tensor to write to.
:type out: torch.Tensor
:returns: The sampled visible state.
:rtype: torch.Tensor
"""
v = self.prob_v_given_h(h, out=out)
v = torch.bernoulli(v, out=out) # overwrite v with its sample
return v
|
[
"def",
"sample_v_given_h",
"(",
"self",
",",
"h",
",",
"out",
"=",
"None",
")",
":",
"v",
"=",
"self",
".",
"prob_v_given_h",
"(",
"h",
",",
"out",
"=",
"out",
")",
"v",
"=",
"torch",
".",
"bernoulli",
"(",
"v",
",",
"out",
"=",
"out",
")",
"# overwrite v with its sample",
"return",
"v"
] |
https://github.com/PIQuIL/QuCumber/blob/25a8cbfaf6b8d009a6f9877770760b525c3f91a8/qucumber/rbm/binary_rbm.py#L170-L183
|
|
huggingface/transformers
|
623b4f7c63f60cce917677ee704d6c93ee960b4b
|
src/transformers/utils/dummy_pt_objects.py
|
python
|
GlueDataTrainingArguments.__init__
|
(self, *args, **kwargs)
|
[] |
def __init__(self, *args, **kwargs):
requires_backends(self, ["torch"])
|
[
"def",
"__init__",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"requires_backends",
"(",
"self",
",",
"[",
"\"torch\"",
"]",
")"
] |
https://github.com/huggingface/transformers/blob/623b4f7c63f60cce917677ee704d6c93ee960b4b/src/transformers/utils/dummy_pt_objects.py#L21-L22
|
||||
deanishe/alfred-workflow
|
70d04df5bded8e501ce3bb82fa55ecc1f947f240
|
workflow/util.py
|
python
|
uninterruptible.__init__
|
(self, func, class_name='')
|
Decorate `func`.
|
Decorate `func`.
|
[
"Decorate",
"func",
"."
] |
def __init__(self, func, class_name=''):
"""Decorate `func`."""
self.func = func
functools.update_wrapper(self, func)
self._caught_signal = None
|
[
"def",
"__init__",
"(",
"self",
",",
"func",
",",
"class_name",
"=",
"''",
")",
":",
"self",
".",
"func",
"=",
"func",
"functools",
".",
"update_wrapper",
"(",
"self",
",",
"func",
")",
"self",
".",
"_caught_signal",
"=",
"None"
] |
https://github.com/deanishe/alfred-workflow/blob/70d04df5bded8e501ce3bb82fa55ecc1f947f240/workflow/util.py#L611-L615
|
||
fengju514/Face-Pose-Net
|
088bba25a17005f8944bc6292cba1857a57f1ac1
|
pose_model.py
|
python
|
ThreeD_Pose_Estimation._bottleneck_residual
|
(self, x, in_filter, out_filter, stride,
activate_before_residual=False)
|
return x
|
Bottleneck resisual unit with 3 sub layers.
|
Bottleneck resisual unit with 3 sub layers.
|
[
"Bottleneck",
"resisual",
"unit",
"with",
"3",
"sub",
"layers",
"."
] |
def _bottleneck_residual(self, x, in_filter, out_filter, stride,
activate_before_residual=False):
"""Bottleneck resisual unit with 3 sub layers."""
if activate_before_residual:
with tf.variable_scope('common_bn_relu'):
x = self._batch_norm('init_bn', x)
x = self._relu(x, self.hps.relu_leakiness)
orig_x = x
else:
with tf.variable_scope('residual_bn_relu'):
orig_x = x
x = self._batch_norm('init_bn', x)
x = self._relu(x, self.hps.relu_leakiness)
with tf.variable_scope('sub1'):
x = self._conv('conv1', x, 1, in_filter, out_filter/4, stride)
with tf.variable_scope('sub2'):
x = self._batch_norm('bn2', x)
x = self._relu(x, self.hps.relu_leakiness)
x = self._conv('conv2', x, 3, out_filter/4, out_filter/4, [1, 1, 1, 1])
with tf.variable_scope('sub3'):
x = self._batch_norm('bn3', x)
x = self._relu(x, self.hps.relu_leakiness)
x = self._conv('conv3', x, 1, out_filter/4, out_filter, [1, 1, 1, 1])
with tf.variable_scope('sub_add'):
if in_filter != out_filter:
orig_x = self._conv('project', orig_x, 1, in_filter, out_filter, stride)
x += orig_x
tf.logging.info('image after unit %s', x.get_shape())
return x
|
[
"def",
"_bottleneck_residual",
"(",
"self",
",",
"x",
",",
"in_filter",
",",
"out_filter",
",",
"stride",
",",
"activate_before_residual",
"=",
"False",
")",
":",
"if",
"activate_before_residual",
":",
"with",
"tf",
".",
"variable_scope",
"(",
"'common_bn_relu'",
")",
":",
"x",
"=",
"self",
".",
"_batch_norm",
"(",
"'init_bn'",
",",
"x",
")",
"x",
"=",
"self",
".",
"_relu",
"(",
"x",
",",
"self",
".",
"hps",
".",
"relu_leakiness",
")",
"orig_x",
"=",
"x",
"else",
":",
"with",
"tf",
".",
"variable_scope",
"(",
"'residual_bn_relu'",
")",
":",
"orig_x",
"=",
"x",
"x",
"=",
"self",
".",
"_batch_norm",
"(",
"'init_bn'",
",",
"x",
")",
"x",
"=",
"self",
".",
"_relu",
"(",
"x",
",",
"self",
".",
"hps",
".",
"relu_leakiness",
")",
"with",
"tf",
".",
"variable_scope",
"(",
"'sub1'",
")",
":",
"x",
"=",
"self",
".",
"_conv",
"(",
"'conv1'",
",",
"x",
",",
"1",
",",
"in_filter",
",",
"out_filter",
"/",
"4",
",",
"stride",
")",
"with",
"tf",
".",
"variable_scope",
"(",
"'sub2'",
")",
":",
"x",
"=",
"self",
".",
"_batch_norm",
"(",
"'bn2'",
",",
"x",
")",
"x",
"=",
"self",
".",
"_relu",
"(",
"x",
",",
"self",
".",
"hps",
".",
"relu_leakiness",
")",
"x",
"=",
"self",
".",
"_conv",
"(",
"'conv2'",
",",
"x",
",",
"3",
",",
"out_filter",
"/",
"4",
",",
"out_filter",
"/",
"4",
",",
"[",
"1",
",",
"1",
",",
"1",
",",
"1",
"]",
")",
"with",
"tf",
".",
"variable_scope",
"(",
"'sub3'",
")",
":",
"x",
"=",
"self",
".",
"_batch_norm",
"(",
"'bn3'",
",",
"x",
")",
"x",
"=",
"self",
".",
"_relu",
"(",
"x",
",",
"self",
".",
"hps",
".",
"relu_leakiness",
")",
"x",
"=",
"self",
".",
"_conv",
"(",
"'conv3'",
",",
"x",
",",
"1",
",",
"out_filter",
"/",
"4",
",",
"out_filter",
",",
"[",
"1",
",",
"1",
",",
"1",
",",
"1",
"]",
")",
"with",
"tf",
".",
"variable_scope",
"(",
"'sub_add'",
")",
":",
"if",
"in_filter",
"!=",
"out_filter",
":",
"orig_x",
"=",
"self",
".",
"_conv",
"(",
"'project'",
",",
"orig_x",
",",
"1",
",",
"in_filter",
",",
"out_filter",
",",
"stride",
")",
"x",
"+=",
"orig_x",
"tf",
".",
"logging",
".",
"info",
"(",
"'image after unit %s'",
",",
"x",
".",
"get_shape",
"(",
")",
")",
"return",
"x"
] |
https://github.com/fengju514/Face-Pose-Net/blob/088bba25a17005f8944bc6292cba1857a57f1ac1/pose_model.py#L490-L523
|
|
omz/PythonistaAppTemplate
|
f560f93f8876d82a21d108977f90583df08d55af
|
PythonistaAppTemplate/PythonistaKit.framework/pylib/plistlib.py
|
python
|
Plist.fromFile
|
(cls, pathOrFile)
|
return plist
|
Deprecated. Use the readPlist() function instead.
|
Deprecated. Use the readPlist() function instead.
|
[
"Deprecated",
".",
"Use",
"the",
"readPlist",
"()",
"function",
"instead",
"."
] |
def fromFile(cls, pathOrFile):
"""Deprecated. Use the readPlist() function instead."""
rootObject = readPlist(pathOrFile)
plist = cls()
plist.update(rootObject)
return plist
|
[
"def",
"fromFile",
"(",
"cls",
",",
"pathOrFile",
")",
":",
"rootObject",
"=",
"readPlist",
"(",
"pathOrFile",
")",
"plist",
"=",
"cls",
"(",
")",
"plist",
".",
"update",
"(",
"rootObject",
")",
"return",
"plist"
] |
https://github.com/omz/PythonistaAppTemplate/blob/f560f93f8876d82a21d108977f90583df08d55af/PythonistaAppTemplate/PythonistaKit.framework/pylib/plistlib.py#L343-L348
|
|
securesystemslab/zippy
|
ff0e84ac99442c2c55fe1d285332cfd4e185e089
|
zippy/benchmarks/src/benchmarks/sympy/sympy/mpmath/ctx_mp_python.py
|
python
|
_mpf.__pos__
|
(s)
|
return v
|
[] |
def __pos__(s):
cls, new, (prec, rounding) = s._ctxdata
v = new(cls)
v._mpf_ = mpf_pos(s._mpf_, prec, rounding)
return v
|
[
"def",
"__pos__",
"(",
"s",
")",
":",
"cls",
",",
"new",
",",
"(",
"prec",
",",
"rounding",
")",
"=",
"s",
".",
"_ctxdata",
"v",
"=",
"new",
"(",
"cls",
")",
"v",
".",
"_mpf_",
"=",
"mpf_pos",
"(",
"s",
".",
"_mpf_",
",",
"prec",
",",
"rounding",
")",
"return",
"v"
] |
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/sympy/sympy/mpmath/ctx_mp_python.py#L155-L159
|
|||
eandersson/amqpstorm
|
7f57cf1291c8b3817527c10aae317aa1702654bc
|
amqpstorm/heartbeat.py
|
python
|
Heartbeat._check_for_life_signs
|
(self)
|
return self._start_new_timer()
|
Check Connection for life signs.
First check if any data has been sent, if not send a heartbeat
to the remote server.
If we have not received any data what so ever within two
intervals, we need to raise an exception so that we can
close the connection.
:rtype: bool
|
Check Connection for life signs.
|
[
"Check",
"Connection",
"for",
"life",
"signs",
"."
] |
def _check_for_life_signs(self):
"""Check Connection for life signs.
First check if any data has been sent, if not send a heartbeat
to the remote server.
If we have not received any data what so ever within two
intervals, we need to raise an exception so that we can
close the connection.
:rtype: bool
"""
if not self._running.is_set():
return False
if self._writes_since_check == 0:
self.send_heartbeat_impl()
self._lock.acquire()
try:
if self._reads_since_check == 0:
self._threshold += 1
if self._threshold >= 2:
self._running.clear()
self._raise_or_append_exception()
return False
else:
self._threshold = 0
finally:
self._reads_since_check = 0
self._writes_since_check = 0
self._lock.release()
return self._start_new_timer()
|
[
"def",
"_check_for_life_signs",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_running",
".",
"is_set",
"(",
")",
":",
"return",
"False",
"if",
"self",
".",
"_writes_since_check",
"==",
"0",
":",
"self",
".",
"send_heartbeat_impl",
"(",
")",
"self",
".",
"_lock",
".",
"acquire",
"(",
")",
"try",
":",
"if",
"self",
".",
"_reads_since_check",
"==",
"0",
":",
"self",
".",
"_threshold",
"+=",
"1",
"if",
"self",
".",
"_threshold",
">=",
"2",
":",
"self",
".",
"_running",
".",
"clear",
"(",
")",
"self",
".",
"_raise_or_append_exception",
"(",
")",
"return",
"False",
"else",
":",
"self",
".",
"_threshold",
"=",
"0",
"finally",
":",
"self",
".",
"_reads_since_check",
"=",
"0",
"self",
".",
"_writes_since_check",
"=",
"0",
"self",
".",
"_lock",
".",
"release",
"(",
")",
"return",
"self",
".",
"_start_new_timer",
"(",
")"
] |
https://github.com/eandersson/amqpstorm/blob/7f57cf1291c8b3817527c10aae317aa1702654bc/amqpstorm/heartbeat.py#L68-L99
|
|
jcartledge/sublime-worksheet
|
44b2ba96d02759b485adbf85c1a2c9d45cc39599
|
repl/pexpect.py
|
python
|
ExceptionPexpect.get_trace
|
(self)
|
return ''.join(tblist)
|
This returns an abbreviated stack trace with lines that only concern
the caller. In other words, the stack trace inside the Pexpect module
is not included.
|
This returns an abbreviated stack trace with lines that only concern
the caller. In other words, the stack trace inside the Pexpect module
is not included.
|
[
"This",
"returns",
"an",
"abbreviated",
"stack",
"trace",
"with",
"lines",
"that",
"only",
"concern",
"the",
"caller",
".",
"In",
"other",
"words",
"the",
"stack",
"trace",
"inside",
"the",
"Pexpect",
"module",
"is",
"not",
"included",
"."
] |
def get_trace(self):
"""This returns an abbreviated stack trace with lines that only concern
the caller. In other words, the stack trace inside the Pexpect module
is not included. """
tblist = traceback.extract_tb(sys.exc_info()[2])
#tblist = filter(self.__filter_not_pexpect, tblist)
tblist = [item for item in tblist if self.__filter_not_pexpect(item)]
tblist = traceback.format_list(tblist)
return ''.join(tblist)
|
[
"def",
"get_trace",
"(",
"self",
")",
":",
"tblist",
"=",
"traceback",
".",
"extract_tb",
"(",
"sys",
".",
"exc_info",
"(",
")",
"[",
"2",
"]",
")",
"#tblist = filter(self.__filter_not_pexpect, tblist)",
"tblist",
"=",
"[",
"item",
"for",
"item",
"in",
"tblist",
"if",
"self",
".",
"__filter_not_pexpect",
"(",
"item",
")",
"]",
"tblist",
"=",
"traceback",
".",
"format_list",
"(",
"tblist",
")",
"return",
"''",
".",
"join",
"(",
"tblist",
")"
] |
https://github.com/jcartledge/sublime-worksheet/blob/44b2ba96d02759b485adbf85c1a2c9d45cc39599/repl/pexpect.py#L145-L155
|
|
pymedusa/Medusa
|
1405fbb6eb8ef4d20fcca24c32ddca52b11f0f38
|
ext/github/AuthenticatedUser.py
|
python
|
AuthenticatedUser.get_gists
|
(self, since=github.GithubObject.NotSet)
|
return github.PaginatedList.PaginatedList(
github.Gist.Gist, self._requester, "/gists", url_parameters
)
|
:calls: `GET /gists <http://developer.github.com/v3/gists>`_
:param since: datetime.datetime format YYYY-MM-DDTHH:MM:SSZ
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Gist.Gist`
|
:calls: `GET /gists <http://developer.github.com/v3/gists>`_
:param since: datetime.datetime format YYYY-MM-DDTHH:MM:SSZ
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Gist.Gist`
|
[
":",
"calls",
":",
"GET",
"/",
"gists",
"<http",
":",
"//",
"developer",
".",
"github",
".",
"com",
"/",
"v3",
"/",
"gists",
">",
"_",
":",
"param",
"since",
":",
"datetime",
".",
"datetime",
"format",
"YYYY",
"-",
"MM",
"-",
"DDTHH",
":",
"MM",
":",
"SSZ",
":",
"rtype",
":",
":",
"class",
":",
"github",
".",
"PaginatedList",
".",
"PaginatedList",
"of",
":",
"class",
":",
"github",
".",
"Gist",
".",
"Gist"
] |
def get_gists(self, since=github.GithubObject.NotSet):
"""
:calls: `GET /gists <http://developer.github.com/v3/gists>`_
:param since: datetime.datetime format YYYY-MM-DDTHH:MM:SSZ
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Gist.Gist`
"""
assert since is github.GithubObject.NotSet or isinstance(
since, datetime.datetime
), since
url_parameters = dict()
if since is not github.GithubObject.NotSet:
url_parameters["since"] = since.strftime("%Y-%m-%dT%H:%M:%SZ")
return github.PaginatedList.PaginatedList(
github.Gist.Gist, self._requester, "/gists", url_parameters
)
|
[
"def",
"get_gists",
"(",
"self",
",",
"since",
"=",
"github",
".",
"GithubObject",
".",
"NotSet",
")",
":",
"assert",
"since",
"is",
"github",
".",
"GithubObject",
".",
"NotSet",
"or",
"isinstance",
"(",
"since",
",",
"datetime",
".",
"datetime",
")",
",",
"since",
"url_parameters",
"=",
"dict",
"(",
")",
"if",
"since",
"is",
"not",
"github",
".",
"GithubObject",
".",
"NotSet",
":",
"url_parameters",
"[",
"\"since\"",
"]",
"=",
"since",
".",
"strftime",
"(",
"\"%Y-%m-%dT%H:%M:%SZ\"",
")",
"return",
"github",
".",
"PaginatedList",
".",
"PaginatedList",
"(",
"github",
".",
"Gist",
".",
"Gist",
",",
"self",
".",
"_requester",
",",
"\"/gists\"",
",",
"url_parameters",
")"
] |
https://github.com/pymedusa/Medusa/blob/1405fbb6eb8ef4d20fcca24c32ddca52b11f0f38/ext/github/AuthenticatedUser.py#L785-L799
|
|
FederatedAI/FATE
|
32540492623568ecd1afcb367360133616e02fa3
|
python/fate_arch/metastore/base_model.py
|
python
|
auto_date_timestamp_db_field
|
()
|
return {f"f_{f}_time" for f in AUTO_DATE_TIMESTAMP_FIELD_PREFIX}
|
[] |
def auto_date_timestamp_db_field():
return {f"f_{f}_time" for f in AUTO_DATE_TIMESTAMP_FIELD_PREFIX}
|
[
"def",
"auto_date_timestamp_db_field",
"(",
")",
":",
"return",
"{",
"f\"f_{f}_time\"",
"for",
"f",
"in",
"AUTO_DATE_TIMESTAMP_FIELD_PREFIX",
"}"
] |
https://github.com/FederatedAI/FATE/blob/32540492623568ecd1afcb367360133616e02fa3/python/fate_arch/metastore/base_model.py#L124-L125
|
|||
gkrizek/bash-lambda-layer
|
703b0ade8174022d44779d823172ab7ac33a5505
|
bin/urllib3/util/connection.py
|
python
|
_has_ipv6
|
(host)
|
return has_ipv6
|
Returns True if the system can bind an IPv6 address.
|
Returns True if the system can bind an IPv6 address.
|
[
"Returns",
"True",
"if",
"the",
"system",
"can",
"bind",
"an",
"IPv6",
"address",
"."
] |
def _has_ipv6(host):
""" Returns True if the system can bind an IPv6 address. """
sock = None
has_ipv6 = False
# App Engine doesn't support IPV6 sockets and actually has a quota on the
# number of sockets that can be used, so just early out here instead of
# creating a socket needlessly.
# See https://github.com/urllib3/urllib3/issues/1446
if _appengine_environ.is_appengine_sandbox():
return False
if socket.has_ipv6:
# has_ipv6 returns true if cPython was compiled with IPv6 support.
# It does not tell us if the system has IPv6 support enabled. To
# determine that we must bind to an IPv6 address.
# https://github.com/shazow/urllib3/pull/611
# https://bugs.python.org/issue658327
try:
sock = socket.socket(socket.AF_INET6)
sock.bind((host, 0))
has_ipv6 = True
except Exception:
pass
if sock:
sock.close()
return has_ipv6
|
[
"def",
"_has_ipv6",
"(",
"host",
")",
":",
"sock",
"=",
"None",
"has_ipv6",
"=",
"False",
"# App Engine doesn't support IPV6 sockets and actually has a quota on the",
"# number of sockets that can be used, so just early out here instead of",
"# creating a socket needlessly.",
"# See https://github.com/urllib3/urllib3/issues/1446",
"if",
"_appengine_environ",
".",
"is_appengine_sandbox",
"(",
")",
":",
"return",
"False",
"if",
"socket",
".",
"has_ipv6",
":",
"# has_ipv6 returns true if cPython was compiled with IPv6 support.",
"# It does not tell us if the system has IPv6 support enabled. To",
"# determine that we must bind to an IPv6 address.",
"# https://github.com/shazow/urllib3/pull/611",
"# https://bugs.python.org/issue658327",
"try",
":",
"sock",
"=",
"socket",
".",
"socket",
"(",
"socket",
".",
"AF_INET6",
")",
"sock",
".",
"bind",
"(",
"(",
"host",
",",
"0",
")",
")",
"has_ipv6",
"=",
"True",
"except",
"Exception",
":",
"pass",
"if",
"sock",
":",
"sock",
".",
"close",
"(",
")",
"return",
"has_ipv6"
] |
https://github.com/gkrizek/bash-lambda-layer/blob/703b0ade8174022d44779d823172ab7ac33a5505/bin/urllib3/util/connection.py#L104-L131
|
|
jonathf/matlab2cpp
|
af7ab502eb6d07b60f19ebdb836138a69d6b27e7
|
src/matlab2cpp/rules/_reserved.py
|
python
|
Get_axis
|
(node)
|
return "_plot.axis(", ", ", ")"
|
>>> print(matlab2cpp.qscript("axis(0, 3, -2, 4)"))
_plot.axis(0, 3, -2, 4) ;
_plot.show() ;
>>> print(matlab2cpp.qscript("axis([0, 3, -2, 4])"))
_plot.axis(0, 3, -2, 4) ;
_plot.show() ;
|
>>> print(matlab2cpp.qscript("axis(0, 3, -2, 4)"))
_plot.axis(0, 3, -2, 4) ;
_plot.show() ;
>>> print(matlab2cpp.qscript("axis([0, 3, -2, 4])"))
_plot.axis(0, 3, -2, 4) ;
_plot.show() ;
|
[
">>>",
"print",
"(",
"matlab2cpp",
".",
"qscript",
"(",
"axis",
"(",
"0",
"3",
"-",
"2",
"4",
")",
"))",
"_plot",
".",
"axis",
"(",
"0",
"3",
"-",
"2",
"4",
")",
";",
"_plot",
".",
"show",
"()",
";",
">>>",
"print",
"(",
"matlab2cpp",
".",
"qscript",
"(",
"axis",
"(",
"[",
"0",
"3",
"-",
"2",
"4",
"]",
")",
"))",
"_plot",
".",
"axis",
"(",
"0",
"3",
"-",
"2",
"4",
")",
";",
"_plot",
".",
"show",
"()",
";"
] |
def Get_axis(node):
"""
>>> print(matlab2cpp.qscript("axis(0, 3, -2, 4)"))
_plot.axis(0, 3, -2, 4) ;
_plot.show() ;
>>> print(matlab2cpp.qscript("axis([0, 3, -2, 4])"))
_plot.axis(0, 3, -2, 4) ;
_plot.show() ;
"""
node.plotting()
if len(node) == 1:
arg = node[0]
if arg.cls == "Matrix" and len(arg[0]) == 4:
a,b,c,d = arg[0]
return "_plot.axis(" + str(a) + ", " + str(b) + ", " + str(c) + ", " + str(d) + ")"
elif arg.cls != "Matrix" and arg.num and arg.dim>0:
name1 = arg.name + "(0)";
name2 = arg.name + "(1)"
name3 = arg.name + "(2)";
name4 = arg.name + "(3)"
if arg.mem not in (2,3):
name1 = "static_cast<double>(" + name1 + ")"
name2 = "static_cast<double>(" + name2 + ")"
name3 = "static_cast<double>(" + name3 + ")"
name4 = "static_cast<double>(" + name4 + ")"
return "_plot.axis(" + name1 + ", " + name2 + ", " + name3 + ", " + name4 + ")"
node.error("argument array type")
return "_plot.axis(", ", ", ")"
|
[
"def",
"Get_axis",
"(",
"node",
")",
":",
"node",
".",
"plotting",
"(",
")",
"if",
"len",
"(",
"node",
")",
"==",
"1",
":",
"arg",
"=",
"node",
"[",
"0",
"]",
"if",
"arg",
".",
"cls",
"==",
"\"Matrix\"",
"and",
"len",
"(",
"arg",
"[",
"0",
"]",
")",
"==",
"4",
":",
"a",
",",
"b",
",",
"c",
",",
"d",
"=",
"arg",
"[",
"0",
"]",
"return",
"\"_plot.axis(\"",
"+",
"str",
"(",
"a",
")",
"+",
"\", \"",
"+",
"str",
"(",
"b",
")",
"+",
"\", \"",
"+",
"str",
"(",
"c",
")",
"+",
"\", \"",
"+",
"str",
"(",
"d",
")",
"+",
"\")\"",
"elif",
"arg",
".",
"cls",
"!=",
"\"Matrix\"",
"and",
"arg",
".",
"num",
"and",
"arg",
".",
"dim",
">",
"0",
":",
"name1",
"=",
"arg",
".",
"name",
"+",
"\"(0)\"",
"name2",
"=",
"arg",
".",
"name",
"+",
"\"(1)\"",
"name3",
"=",
"arg",
".",
"name",
"+",
"\"(2)\"",
"name4",
"=",
"arg",
".",
"name",
"+",
"\"(3)\"",
"if",
"arg",
".",
"mem",
"not",
"in",
"(",
"2",
",",
"3",
")",
":",
"name1",
"=",
"\"static_cast<double>(\"",
"+",
"name1",
"+",
"\")\"",
"name2",
"=",
"\"static_cast<double>(\"",
"+",
"name2",
"+",
"\")\"",
"name3",
"=",
"\"static_cast<double>(\"",
"+",
"name3",
"+",
"\")\"",
"name4",
"=",
"\"static_cast<double>(\"",
"+",
"name4",
"+",
"\")\"",
"return",
"\"_plot.axis(\"",
"+",
"name1",
"+",
"\", \"",
"+",
"name2",
"+",
"\", \"",
"+",
"name3",
"+",
"\", \"",
"+",
"name4",
"+",
"\")\"",
"node",
".",
"error",
"(",
"\"argument array type\"",
")",
"return",
"\"_plot.axis(\"",
",",
"\", \"",
",",
"\")\""
] |
https://github.com/jonathf/matlab2cpp/blob/af7ab502eb6d07b60f19ebdb836138a69d6b27e7/src/matlab2cpp/rules/_reserved.py#L1381-L1415
|
|
aiogram/aiogram
|
4d2d81138681d730270819579f22b3a0001c43a5
|
aiogram/types/chat.py
|
python
|
ChatType.is_channel
|
(cls, obj)
|
return cls._check(obj, [cls.CHANNEL])
|
Check chat is channel
:param obj:
:return:
|
Check chat is channel
|
[
"Check",
"chat",
"is",
"channel"
] |
def is_channel(cls, obj) -> bool:
"""
Check chat is channel
:param obj:
:return:
"""
return cls._check(obj, [cls.CHANNEL])
|
[
"def",
"is_channel",
"(",
"cls",
",",
"obj",
")",
"->",
"bool",
":",
"return",
"cls",
".",
"_check",
"(",
"obj",
",",
"[",
"cls",
".",
"CHANNEL",
"]",
")"
] |
https://github.com/aiogram/aiogram/blob/4d2d81138681d730270819579f22b3a0001c43a5/aiogram/types/chat.py#L728-L735
|
|
sahana/eden
|
1696fa50e90ce967df69f66b571af45356cc18da
|
modules/s3cfg.py
|
python
|
S3Config.get_dvr_household_size
|
(self)
|
return self.dvr.get("household_size", False)
|
Register number of persons per household (family)
False = off
True = manual
"auto" = count family members automatically
|
Register number of persons per household (family)
|
[
"Register",
"number",
"of",
"persons",
"per",
"household",
"(",
"family",
")"
] |
def get_dvr_household_size(self):
"""
Register number of persons per household (family)
False = off
True = manual
"auto" = count family members automatically
"""
return self.dvr.get("household_size", False)
|
[
"def",
"get_dvr_household_size",
"(",
"self",
")",
":",
"return",
"self",
".",
"dvr",
".",
"get",
"(",
"\"household_size\"",
",",
"False",
")"
] |
https://github.com/sahana/eden/blob/1696fa50e90ce967df69f66b571af45356cc18da/modules/s3cfg.py#L3877-L3885
|
|
home-assistant/core
|
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
|
homeassistant/components/coinbase/sensor.py
|
python
|
ExchangeRateSensor.__init__
|
(self, coinbase_data, exchange_currency, exchange_base)
|
Initialize the sensor.
|
Initialize the sensor.
|
[
"Initialize",
"the",
"sensor",
"."
] |
def __init__(self, coinbase_data, exchange_currency, exchange_base):
"""Initialize the sensor."""
self._coinbase_data = coinbase_data
self.currency = exchange_currency
self._name = f"{exchange_currency} Exchange Rate"
self._id = f"coinbase-{coinbase_data.user_id}-xe-{exchange_currency}"
self._state = round(
1 / float(self._coinbase_data.exchange_rates[API_RATES][self.currency]), 2
)
self._unit_of_measurement = exchange_base
self._attr_state_class = SensorStateClass.MEASUREMENT
self._attr_device_info = DeviceInfo(
configuration_url="https://www.coinbase.com/settings/api",
entry_type=DeviceEntryType.SERVICE,
identifiers={(DOMAIN, self._coinbase_data.user_id)},
manufacturer="Coinbase.com",
name=f"Coinbase {self._coinbase_data.user_id[-4:]}",
)
|
[
"def",
"__init__",
"(",
"self",
",",
"coinbase_data",
",",
"exchange_currency",
",",
"exchange_base",
")",
":",
"self",
".",
"_coinbase_data",
"=",
"coinbase_data",
"self",
".",
"currency",
"=",
"exchange_currency",
"self",
".",
"_name",
"=",
"f\"{exchange_currency} Exchange Rate\"",
"self",
".",
"_id",
"=",
"f\"coinbase-{coinbase_data.user_id}-xe-{exchange_currency}\"",
"self",
".",
"_state",
"=",
"round",
"(",
"1",
"/",
"float",
"(",
"self",
".",
"_coinbase_data",
".",
"exchange_rates",
"[",
"API_RATES",
"]",
"[",
"self",
".",
"currency",
"]",
")",
",",
"2",
")",
"self",
".",
"_unit_of_measurement",
"=",
"exchange_base",
"self",
".",
"_attr_state_class",
"=",
"SensorStateClass",
".",
"MEASUREMENT",
"self",
".",
"_attr_device_info",
"=",
"DeviceInfo",
"(",
"configuration_url",
"=",
"\"https://www.coinbase.com/settings/api\"",
",",
"entry_type",
"=",
"DeviceEntryType",
".",
"SERVICE",
",",
"identifiers",
"=",
"{",
"(",
"DOMAIN",
",",
"self",
".",
"_coinbase_data",
".",
"user_id",
")",
"}",
",",
"manufacturer",
"=",
"\"Coinbase.com\"",
",",
"name",
"=",
"f\"Coinbase {self._coinbase_data.user_id[-4:]}\"",
",",
")"
] |
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/coinbase/sensor.py#L181-L198
|
||
gvnn3/conductor
|
c2aab433e75feffd0a4702e34b9e3b265fa6f30b
|
conductor/client.py
|
python
|
Client.collect
|
(self)
|
Push the collection phase to the player
|
Push the collection phase to the player
|
[
"Push",
"the",
"collection",
"phase",
"to",
"the",
"player"
] |
def collect(self):
"""Push the collection phase to the player"""
self.download(self.collect_phase)
|
[
"def",
"collect",
"(",
"self",
")",
":",
"self",
".",
"download",
"(",
"self",
".",
"collect_phase",
")"
] |
https://github.com/gvnn3/conductor/blob/c2aab433e75feffd0a4702e34b9e3b265fa6f30b/conductor/client.py#L139-L141
|
||
openshift/openshift-tools
|
1188778e728a6e4781acf728123e5b356380fe6f
|
openshift/installer/vendored/openshift-ansible-3.11.28-1/roles/lib_openshift/library/oc_clusterrole.py
|
python
|
OCClusterRole.get
|
(self)
|
return result
|
return a clusterrole
|
return a clusterrole
|
[
"return",
"a",
"clusterrole"
] |
def get(self):
'''return a clusterrole '''
result = self._get(self.kind, self.name)
if result['returncode'] == 0:
self.clusterrole = ClusterRole(content=result['results'][0])
result['results'] = self.clusterrole.yaml_dict
elif '"{}" not found'.format(self.name) in result['stderr']:
result['returncode'] = 0
self.clusterrole = None
return result
|
[
"def",
"get",
"(",
"self",
")",
":",
"result",
"=",
"self",
".",
"_get",
"(",
"self",
".",
"kind",
",",
"self",
".",
"name",
")",
"if",
"result",
"[",
"'returncode'",
"]",
"==",
"0",
":",
"self",
".",
"clusterrole",
"=",
"ClusterRole",
"(",
"content",
"=",
"result",
"[",
"'results'",
"]",
"[",
"0",
"]",
")",
"result",
"[",
"'results'",
"]",
"=",
"self",
".",
"clusterrole",
".",
"yaml_dict",
"elif",
"'\"{}\" not found'",
".",
"format",
"(",
"self",
".",
"name",
")",
"in",
"result",
"[",
"'stderr'",
"]",
":",
"result",
"[",
"'returncode'",
"]",
"=",
"0",
"self",
".",
"clusterrole",
"=",
"None",
"return",
"result"
] |
https://github.com/openshift/openshift-tools/blob/1188778e728a6e4781acf728123e5b356380fe6f/openshift/installer/vendored/openshift-ansible-3.11.28-1/roles/lib_openshift/library/oc_clusterrole.py#L1722-L1734
|
|
tandasat/scripts_for_RE
|
b2c8f5738fb5a668617a0b170bd3109fadeaac4f
|
visualize_binary.py
|
python
|
main
|
(arg_values, arg_length)
|
return
|
Main routine
|
Main routine
|
[
"Main",
"routine"
] |
def main(arg_values, arg_length):
"""Main routine"""
if arg_length != 2:
help(os.path.splitext(os.path.basename(sys.argv[0]))[0])
return
input_file_name = arg_values[1]
input_file = open(input_file_name, "rb")
input_data = bytearray(input_file.read())
if len(input_data) == 0:
print "Empty file."
return
IMAGE_WIDTH = 128
image_size = (IMAGE_WIDTH,
int(math.ceil(len(input_data) / (IMAGE_WIDTH * 1.0))))
image = Image.new("RGB", image_size, "white")
def convert_color(byte):
"""Decides a pixel color according to the rule of Stirling."""
if byte >= 0x80:
return 0x000000
elif byte >= 0x20:
return 0x0000ff
elif byte >= 0x01:
return 0xffff00
else:
return 0xffffff
def fill_image(input_data, image, image_size):
"""Puts color pixels on an image with color conversion"""
y_range = range(image_size[1])
x_range = range(IMAGE_WIDTH)
d_range = len(input_data)
pix = image.load()
index = 0
for y in y_range:
for x in x_range:
pix[x, y] = convert_color(input_data[index])
index += 1
if index >= d_range:
return
return
fill_image(input_data, image, image_size)
image.convert("P").save(input_file_name + ".png", "PNG")
return
|
[
"def",
"main",
"(",
"arg_values",
",",
"arg_length",
")",
":",
"if",
"arg_length",
"!=",
"2",
":",
"help",
"(",
"os",
".",
"path",
".",
"splitext",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"sys",
".",
"argv",
"[",
"0",
"]",
")",
")",
"[",
"0",
"]",
")",
"return",
"input_file_name",
"=",
"arg_values",
"[",
"1",
"]",
"input_file",
"=",
"open",
"(",
"input_file_name",
",",
"\"rb\"",
")",
"input_data",
"=",
"bytearray",
"(",
"input_file",
".",
"read",
"(",
")",
")",
"if",
"len",
"(",
"input_data",
")",
"==",
"0",
":",
"print",
"\"Empty file.\"",
"return",
"IMAGE_WIDTH",
"=",
"128",
"image_size",
"=",
"(",
"IMAGE_WIDTH",
",",
"int",
"(",
"math",
".",
"ceil",
"(",
"len",
"(",
"input_data",
")",
"/",
"(",
"IMAGE_WIDTH",
"*",
"1.0",
")",
")",
")",
")",
"image",
"=",
"Image",
".",
"new",
"(",
"\"RGB\"",
",",
"image_size",
",",
"\"white\"",
")",
"def",
"convert_color",
"(",
"byte",
")",
":",
"\"\"\"Decides a pixel color according to the rule of Stirling.\"\"\"",
"if",
"byte",
">=",
"0x80",
":",
"return",
"0x000000",
"elif",
"byte",
">=",
"0x20",
":",
"return",
"0x0000ff",
"elif",
"byte",
">=",
"0x01",
":",
"return",
"0xffff00",
"else",
":",
"return",
"0xffffff",
"def",
"fill_image",
"(",
"input_data",
",",
"image",
",",
"image_size",
")",
":",
"\"\"\"Puts color pixels on an image with color conversion\"\"\"",
"y_range",
"=",
"range",
"(",
"image_size",
"[",
"1",
"]",
")",
"x_range",
"=",
"range",
"(",
"IMAGE_WIDTH",
")",
"d_range",
"=",
"len",
"(",
"input_data",
")",
"pix",
"=",
"image",
".",
"load",
"(",
")",
"index",
"=",
"0",
"for",
"y",
"in",
"y_range",
":",
"for",
"x",
"in",
"x_range",
":",
"pix",
"[",
"x",
",",
"y",
"]",
"=",
"convert_color",
"(",
"input_data",
"[",
"index",
"]",
")",
"index",
"+=",
"1",
"if",
"index",
">=",
"d_range",
":",
"return",
"return",
"fill_image",
"(",
"input_data",
",",
"image",
",",
"image_size",
")",
"image",
".",
"convert",
"(",
"\"P\"",
")",
".",
"save",
"(",
"input_file_name",
"+",
"\".png\"",
",",
"\"PNG\"",
")",
"return"
] |
https://github.com/tandasat/scripts_for_RE/blob/b2c8f5738fb5a668617a0b170bd3109fadeaac4f/visualize_binary.py#L53-L105
|
|
cloudera/hue
|
23f02102d4547c17c32bd5ea0eb24e9eadd657a4
|
desktop/core/ext-py/oauth2client-4.1.3/oauth2client/client.py
|
python
|
OAuth2Credentials.__init__
|
(self, access_token, client_id, client_secret, refresh_token,
token_expiry, token_uri, user_agent, revoke_uri=None,
id_token=None, token_response=None, scopes=None,
token_info_uri=None, id_token_jwt=None)
|
Create an instance of OAuth2Credentials.
This constructor is not usually called by the user, instead
OAuth2Credentials objects are instantiated by the OAuth2WebServerFlow.
Args:
access_token: string, access token.
client_id: string, client identifier.
client_secret: string, client secret.
refresh_token: string, refresh token.
token_expiry: datetime, when the access_token expires.
token_uri: string, URI of token endpoint.
user_agent: string, The HTTP User-Agent to provide for this
application.
revoke_uri: string, URI for revoke endpoint. Defaults to None; a
token can't be revoked if this is None.
id_token: object, The identity of the resource owner.
token_response: dict, the decoded response to the token request.
None if a token hasn't been requested yet. Stored
because some providers (e.g. wordpress.com) include
extra fields that clients may want.
scopes: list, authorized scopes for these credentials.
token_info_uri: string, the URI for the token info endpoint.
Defaults to None; scopes can not be refreshed if
this is None.
id_token_jwt: string, the encoded and signed identity JWT. The
decoded version of this is stored in id_token.
Notes:
store: callable, A callable that when passed a Credential
will store the credential back to where it came from.
This is needed to store the latest access_token if it
has expired and been refreshed.
|
Create an instance of OAuth2Credentials.
|
[
"Create",
"an",
"instance",
"of",
"OAuth2Credentials",
"."
] |
def __init__(self, access_token, client_id, client_secret, refresh_token,
token_expiry, token_uri, user_agent, revoke_uri=None,
id_token=None, token_response=None, scopes=None,
token_info_uri=None, id_token_jwt=None):
"""Create an instance of OAuth2Credentials.
This constructor is not usually called by the user, instead
OAuth2Credentials objects are instantiated by the OAuth2WebServerFlow.
Args:
access_token: string, access token.
client_id: string, client identifier.
client_secret: string, client secret.
refresh_token: string, refresh token.
token_expiry: datetime, when the access_token expires.
token_uri: string, URI of token endpoint.
user_agent: string, The HTTP User-Agent to provide for this
application.
revoke_uri: string, URI for revoke endpoint. Defaults to None; a
token can't be revoked if this is None.
id_token: object, The identity of the resource owner.
token_response: dict, the decoded response to the token request.
None if a token hasn't been requested yet. Stored
because some providers (e.g. wordpress.com) include
extra fields that clients may want.
scopes: list, authorized scopes for these credentials.
token_info_uri: string, the URI for the token info endpoint.
Defaults to None; scopes can not be refreshed if
this is None.
id_token_jwt: string, the encoded and signed identity JWT. The
decoded version of this is stored in id_token.
Notes:
store: callable, A callable that when passed a Credential
will store the credential back to where it came from.
This is needed to store the latest access_token if it
has expired and been refreshed.
"""
self.access_token = access_token
self.client_id = client_id
self.client_secret = client_secret
self.refresh_token = refresh_token
self.store = None
self.token_expiry = token_expiry
self.token_uri = token_uri
self.user_agent = user_agent
self.revoke_uri = revoke_uri
self.id_token = id_token
self.id_token_jwt = id_token_jwt
self.token_response = token_response
self.scopes = set(_helpers.string_to_scopes(scopes or []))
self.token_info_uri = token_info_uri
# True if the credentials have been revoked or expired and can't be
# refreshed.
self.invalid = False
|
[
"def",
"__init__",
"(",
"self",
",",
"access_token",
",",
"client_id",
",",
"client_secret",
",",
"refresh_token",
",",
"token_expiry",
",",
"token_uri",
",",
"user_agent",
",",
"revoke_uri",
"=",
"None",
",",
"id_token",
"=",
"None",
",",
"token_response",
"=",
"None",
",",
"scopes",
"=",
"None",
",",
"token_info_uri",
"=",
"None",
",",
"id_token_jwt",
"=",
"None",
")",
":",
"self",
".",
"access_token",
"=",
"access_token",
"self",
".",
"client_id",
"=",
"client_id",
"self",
".",
"client_secret",
"=",
"client_secret",
"self",
".",
"refresh_token",
"=",
"refresh_token",
"self",
".",
"store",
"=",
"None",
"self",
".",
"token_expiry",
"=",
"token_expiry",
"self",
".",
"token_uri",
"=",
"token_uri",
"self",
".",
"user_agent",
"=",
"user_agent",
"self",
".",
"revoke_uri",
"=",
"revoke_uri",
"self",
".",
"id_token",
"=",
"id_token",
"self",
".",
"id_token_jwt",
"=",
"id_token_jwt",
"self",
".",
"token_response",
"=",
"token_response",
"self",
".",
"scopes",
"=",
"set",
"(",
"_helpers",
".",
"string_to_scopes",
"(",
"scopes",
"or",
"[",
"]",
")",
")",
"self",
".",
"token_info_uri",
"=",
"token_info_uri",
"# True if the credentials have been revoked or expired and can't be",
"# refreshed.",
"self",
".",
"invalid",
"=",
"False"
] |
https://github.com/cloudera/hue/blob/23f02102d4547c17c32bd5ea0eb24e9eadd657a4/desktop/core/ext-py/oauth2client-4.1.3/oauth2client/client.py#L451-L506
|
||
lpty/nlp_base
|
e82f5a317a335b382e106307c9f047850c6da6f4
|
segment/src/corpus.py
|
python
|
Corpus.read_corpus_from_file
|
(cls, file_path)
|
读取语料
|
读取语料
|
[
"读取语料"
] |
def read_corpus_from_file(cls, file_path):
"""
读取语料
"""
f = open(file_path, 'r')
lines = f.readlines()
for line in lines:
cls._words.extend([word for word in line.decode('gbk').strip().split(' ') if word and not cls.is_puns(word)])
f.close()
|
[
"def",
"read_corpus_from_file",
"(",
"cls",
",",
"file_path",
")",
":",
"f",
"=",
"open",
"(",
"file_path",
",",
"'r'",
")",
"lines",
"=",
"f",
".",
"readlines",
"(",
")",
"for",
"line",
"in",
"lines",
":",
"cls",
".",
"_words",
".",
"extend",
"(",
"[",
"word",
"for",
"word",
"in",
"line",
".",
"decode",
"(",
"'gbk'",
")",
".",
"strip",
"(",
")",
".",
"split",
"(",
"' '",
")",
"if",
"word",
"and",
"not",
"cls",
".",
"is_puns",
"(",
"word",
")",
"]",
")",
"f",
".",
"close",
"(",
")"
] |
https://github.com/lpty/nlp_base/blob/e82f5a317a335b382e106307c9f047850c6da6f4/segment/src/corpus.py#L47-L55
|
||
TKkk-iOSer/wechat-alfred-workflow
|
449995275dd700bcb3686abcfe2ed9c63ea826a3
|
src/workflow/workflow.py
|
python
|
Workflow.clear_settings
|
(self)
|
Delete workflow's :attr:`settings_path`.
|
Delete workflow's :attr:`settings_path`.
|
[
"Delete",
"workflow",
"s",
":",
"attr",
":",
"settings_path",
"."
] |
def clear_settings(self):
"""Delete workflow's :attr:`settings_path`."""
if os.path.exists(self.settings_path):
os.unlink(self.settings_path)
self.logger.debug('deleted : %r', self.settings_path)
|
[
"def",
"clear_settings",
"(",
"self",
")",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"settings_path",
")",
":",
"os",
".",
"unlink",
"(",
"self",
".",
"settings_path",
")",
"self",
".",
"logger",
".",
"debug",
"(",
"'deleted : %r'",
",",
"self",
".",
"settings_path",
")"
] |
https://github.com/TKkk-iOSer/wechat-alfred-workflow/blob/449995275dd700bcb3686abcfe2ed9c63ea826a3/src/workflow/workflow.py#L2620-L2624
|
||
albertz/music-player
|
d23586f5bf657cbaea8147223be7814d117ae73d
|
src/Player.py
|
python
|
songsPeekQueue
|
()
|
return lambda n: filter(openSong, Queue.peekNextSongs(n))
|
[] |
def songsPeekQueue():
def openSong(song):
song.openFile()
return song
import Queue
return lambda n: filter(openSong, Queue.peekNextSongs(n))
|
[
"def",
"songsPeekQueue",
"(",
")",
":",
"def",
"openSong",
"(",
"song",
")",
":",
"song",
".",
"openFile",
"(",
")",
"return",
"song",
"import",
"Queue",
"return",
"lambda",
"n",
":",
"filter",
"(",
"openSong",
",",
"Queue",
".",
"peekNextSongs",
"(",
"n",
")",
")"
] |
https://github.com/albertz/music-player/blob/d23586f5bf657cbaea8147223be7814d117ae73d/src/Player.py#L40-L45
|
|||
spectacles/CodeComplice
|
8ca8ee4236f72b58caa4209d2fbd5fa56bd31d62
|
libs/codeintel2/tree.py
|
python
|
TreeEvaluator._check_infinite_recursion
|
(self, expr)
|
return TreeEvaluator._infinite_recursion_checker(self)
|
[] |
def _check_infinite_recursion(self, expr):
if self._eval_count_from_expr is None:
# Move this init into eval() when on TreeEvalutor.
self._eval_count_from_expr = {}
eval_count = self._eval_count_from_expr.get(expr, 0)
eval_count += 1
if eval_count >= self._SENTINEL_MAX_EXPR_COUNT:
raise EvalError("hit eval sentinel: expr '%s' eval count "
"is %d (abort)" % (expr, eval_count))
self._eval_count_from_expr[expr] = eval_count
return TreeEvaluator._infinite_recursion_checker(self)
|
[
"def",
"_check_infinite_recursion",
"(",
"self",
",",
"expr",
")",
":",
"if",
"self",
".",
"_eval_count_from_expr",
"is",
"None",
":",
"# Move this init into eval() when on TreeEvalutor.",
"self",
".",
"_eval_count_from_expr",
"=",
"{",
"}",
"eval_count",
"=",
"self",
".",
"_eval_count_from_expr",
".",
"get",
"(",
"expr",
",",
"0",
")",
"eval_count",
"+=",
"1",
"if",
"eval_count",
">=",
"self",
".",
"_SENTINEL_MAX_EXPR_COUNT",
":",
"raise",
"EvalError",
"(",
"\"hit eval sentinel: expr '%s' eval count \"",
"\"is %d (abort)\"",
"%",
"(",
"expr",
",",
"eval_count",
")",
")",
"self",
".",
"_eval_count_from_expr",
"[",
"expr",
"]",
"=",
"eval_count",
"return",
"TreeEvaluator",
".",
"_infinite_recursion_checker",
"(",
"self",
")"
] |
https://github.com/spectacles/CodeComplice/blob/8ca8ee4236f72b58caa4209d2fbd5fa56bd31d62/libs/codeintel2/tree.py#L695-L705
|
|||
securesystemslab/zippy
|
ff0e84ac99442c2c55fe1d285332cfd4e185e089
|
zippy/benchmarks/src/benchmarks/sympy/sympy/logic/algorithms/dpll2.py
|
python
|
SATSolver._vsids_clause_added
|
(self, cls)
|
Handle the addition of a new clause for the VSIDS heuristic.
Examples
========
>>> from sympy.logic.algorithms.dpll2 import SATSolver
>>> l = SATSolver([set([2, -3]), set([1]), set([3, -3]), set([2, -2]),
... set([3, -2])], set([1, 2, 3]), set([]))
>>> l.num_learned_clauses
0
>>> l.lit_scores
{-3: -2.0, -2: -2.0, -1: 0.0, 1: 0.0, 2: -2.0, 3: -2.0}
>>> l._vsids_clause_added(set([2, -3]))
>>> l.num_learned_clauses
1
>>> l.lit_scores
{-3: -1.0, -2: -2.0, -1: 0.0, 1: 0.0, 2: -1.0, 3: -2.0}
|
Handle the addition of a new clause for the VSIDS heuristic.
|
[
"Handle",
"the",
"addition",
"of",
"a",
"new",
"clause",
"for",
"the",
"VSIDS",
"heuristic",
"."
] |
def _vsids_clause_added(self, cls):
"""Handle the addition of a new clause for the VSIDS heuristic.
Examples
========
>>> from sympy.logic.algorithms.dpll2 import SATSolver
>>> l = SATSolver([set([2, -3]), set([1]), set([3, -3]), set([2, -2]),
... set([3, -2])], set([1, 2, 3]), set([]))
>>> l.num_learned_clauses
0
>>> l.lit_scores
{-3: -2.0, -2: -2.0, -1: 0.0, 1: 0.0, 2: -2.0, 3: -2.0}
>>> l._vsids_clause_added(set([2, -3]))
>>> l.num_learned_clauses
1
>>> l.lit_scores
{-3: -1.0, -2: -2.0, -1: 0.0, 1: 0.0, 2: -1.0, 3: -2.0}
"""
self.num_learned_clauses += 1
for lit in cls:
self.lit_scores[lit] += 1
|
[
"def",
"_vsids_clause_added",
"(",
"self",
",",
"cls",
")",
":",
"self",
".",
"num_learned_clauses",
"+=",
"1",
"for",
"lit",
"in",
"cls",
":",
"self",
".",
"lit_scores",
"[",
"lit",
"]",
"+=",
"1"
] |
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/benchmarks/src/benchmarks/sympy/sympy/logic/algorithms/dpll2.py#L510-L534
|
||
llSourcell/AI_Artist
|
3038c06c2e389b9c919c881c9a169efe2fd7810e
|
lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/universaldetector.py
|
python
|
UniversalDetector.reset
|
(self)
|
[] |
def reset(self):
self.result = {'encoding': None, 'confidence': 0.0}
self.done = False
self._mStart = True
self._mGotData = False
self._mInputState = ePureAscii
self._mLastChar = b''
if self._mEscCharSetProber:
self._mEscCharSetProber.reset()
for prober in self._mCharSetProbers:
prober.reset()
|
[
"def",
"reset",
"(",
"self",
")",
":",
"self",
".",
"result",
"=",
"{",
"'encoding'",
":",
"None",
",",
"'confidence'",
":",
"0.0",
"}",
"self",
".",
"done",
"=",
"False",
"self",
".",
"_mStart",
"=",
"True",
"self",
".",
"_mGotData",
"=",
"False",
"self",
".",
"_mInputState",
"=",
"ePureAscii",
"self",
".",
"_mLastChar",
"=",
"b''",
"if",
"self",
".",
"_mEscCharSetProber",
":",
"self",
".",
"_mEscCharSetProber",
".",
"reset",
"(",
")",
"for",
"prober",
"in",
"self",
".",
"_mCharSetProbers",
":",
"prober",
".",
"reset",
"(",
")"
] |
https://github.com/llSourcell/AI_Artist/blob/3038c06c2e389b9c919c881c9a169efe2fd7810e/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/universaldetector.py#L52-L62
|
||||
openstack/nova
|
b49b7663e1c3073917d5844b81d38db8e86d05c4
|
nova/virt/vmwareapi/driver.py
|
python
|
VMwareVCDriver.get_vnc_console
|
(self, context, instance)
|
return self._vmops.get_vnc_console(instance)
|
Return link to instance's VNC console using vCenter logic.
|
Return link to instance's VNC console using vCenter logic.
|
[
"Return",
"link",
"to",
"instance",
"s",
"VNC",
"console",
"using",
"vCenter",
"logic",
"."
] |
def get_vnc_console(self, context, instance):
"""Return link to instance's VNC console using vCenter logic."""
# vCenter does not actually run the VNC service
# itself. You must talk to the VNC host underneath vCenter.
return self._vmops.get_vnc_console(instance)
|
[
"def",
"get_vnc_console",
"(",
"self",
",",
"context",
",",
"instance",
")",
":",
"# vCenter does not actually run the VNC service",
"# itself. You must talk to the VNC host underneath vCenter.",
"return",
"self",
".",
"_vmops",
".",
"get_vnc_console",
"(",
"instance",
")"
] |
https://github.com/openstack/nova/blob/b49b7663e1c3073917d5844b81d38db8e86d05c4/nova/virt/vmwareapi/driver.py#L344-L348
|
|
sfu-db/dataprep
|
6dfb9c659e8bf73f07978ae195d0372495c6f118
|
dataprep/clean/clean_at_uid.py
|
python
|
validate_at_uid
|
(
df: Union[str, pd.Series, dd.Series, pd.DataFrame, dd.DataFrame],
column: str = "",
)
|
return uid.is_valid(df)
|
Validate if a data cell is Austrian UID in a DataFrame column.
For each cell, return True or False.
Parameters
----------
df
A pandas or Dask DataFrame containing the data to be validated.
column
The name of the column to be validated.
|
Validate if a data cell is Austrian UID in a DataFrame column.
For each cell, return True or False.
|
[
"Validate",
"if",
"a",
"data",
"cell",
"is",
"Austrian",
"UID",
"in",
"a",
"DataFrame",
"column",
".",
"For",
"each",
"cell",
"return",
"True",
"or",
"False",
"."
] |
def validate_at_uid(
df: Union[str, pd.Series, dd.Series, pd.DataFrame, dd.DataFrame],
column: str = "",
) -> Union[bool, pd.Series, pd.DataFrame]:
"""
Validate if a data cell is Austrian UID in a DataFrame column.
For each cell, return True or False.
Parameters
----------
df
A pandas or Dask DataFrame containing the data to be validated.
column
The name of the column to be validated.
"""
if isinstance(df, (pd.Series, dd.Series)):
return df.apply(uid.is_valid)
elif isinstance(df, (pd.DataFrame, dd.DataFrame)):
if column != "":
return df[column].apply(uid.is_valid)
else:
return df.applymap(uid.is_valid)
return uid.is_valid(df)
|
[
"def",
"validate_at_uid",
"(",
"df",
":",
"Union",
"[",
"str",
",",
"pd",
".",
"Series",
",",
"dd",
".",
"Series",
",",
"pd",
".",
"DataFrame",
",",
"dd",
".",
"DataFrame",
"]",
",",
"column",
":",
"str",
"=",
"\"\"",
",",
")",
"->",
"Union",
"[",
"bool",
",",
"pd",
".",
"Series",
",",
"pd",
".",
"DataFrame",
"]",
":",
"if",
"isinstance",
"(",
"df",
",",
"(",
"pd",
".",
"Series",
",",
"dd",
".",
"Series",
")",
")",
":",
"return",
"df",
".",
"apply",
"(",
"uid",
".",
"is_valid",
")",
"elif",
"isinstance",
"(",
"df",
",",
"(",
"pd",
".",
"DataFrame",
",",
"dd",
".",
"DataFrame",
")",
")",
":",
"if",
"column",
"!=",
"\"\"",
":",
"return",
"df",
"[",
"column",
"]",
".",
"apply",
"(",
"uid",
".",
"is_valid",
")",
"else",
":",
"return",
"df",
".",
"applymap",
"(",
"uid",
".",
"is_valid",
")",
"return",
"uid",
".",
"is_valid",
"(",
"df",
")"
] |
https://github.com/sfu-db/dataprep/blob/6dfb9c659e8bf73f07978ae195d0372495c6f118/dataprep/clean/clean_at_uid.py#L116-L138
|
|
PINTO0309/PINTO_model_zoo
|
2924acda7a7d541d8712efd7cc4fd1c61ef5bddd
|
090_Ghost-free_Shadow_Removal/networks.py
|
python
|
conv2d
|
(input_, output_dim, ks=4, s=2, stddev=0.02, padding='SAME', name="conv2d")
|
[] |
def conv2d(input_, output_dim, ks=4, s=2, stddev=0.02, padding='SAME', name="conv2d"):
with tf.variable_scope(name):
# return slim.conv2d(input_, output_dim, ks, s, padding=padding, activation_fn=None,
# weights_initializer=tf.truncated_normal_initializer(stddev=stddev),
# biases_initializer=None)
return slim.conv2d(input_, output_dim, ks, s, padding=padding, activation_fn=None,
weights_initializer=tf.truncated_normal_initializer(stddev=stddev),
biases_initializer=None, trainable=False)
|
[
"def",
"conv2d",
"(",
"input_",
",",
"output_dim",
",",
"ks",
"=",
"4",
",",
"s",
"=",
"2",
",",
"stddev",
"=",
"0.02",
",",
"padding",
"=",
"'SAME'",
",",
"name",
"=",
"\"conv2d\"",
")",
":",
"with",
"tf",
".",
"variable_scope",
"(",
"name",
")",
":",
"# return slim.conv2d(input_, output_dim, ks, s, padding=padding, activation_fn=None,",
"# weights_initializer=tf.truncated_normal_initializer(stddev=stddev),",
"# biases_initializer=None)",
"return",
"slim",
".",
"conv2d",
"(",
"input_",
",",
"output_dim",
",",
"ks",
",",
"s",
",",
"padding",
"=",
"padding",
",",
"activation_fn",
"=",
"None",
",",
"weights_initializer",
"=",
"tf",
".",
"truncated_normal_initializer",
"(",
"stddev",
"=",
"stddev",
")",
",",
"biases_initializer",
"=",
"None",
",",
"trainable",
"=",
"False",
")"
] |
https://github.com/PINTO0309/PINTO_model_zoo/blob/2924acda7a7d541d8712efd7cc4fd1c61ef5bddd/090_Ghost-free_Shadow_Removal/networks.py#L270-L277
|
||||
home-assistant/core
|
265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1
|
homeassistant/components/cast/media_player.py
|
python
|
DynamicCastGroup.async_setup
|
(self)
|
Create chromecast object.
|
Create chromecast object.
|
[
"Create",
"chromecast",
"object",
"."
] |
def async_setup(self):
"""Create chromecast object."""
self._add_remove_handler = async_dispatcher_connect(
self.hass, SIGNAL_CAST_DISCOVERED, self._async_cast_discovered
)
self._del_remove_handler = async_dispatcher_connect(
self.hass, SIGNAL_CAST_REMOVED, self._async_cast_removed
)
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self._async_stop)
self.async_set_cast_info(self._cast_info)
self.hass.async_create_task(
async_create_catching_coro(self.async_connect_to_chromecast())
)
|
[
"def",
"async_setup",
"(",
"self",
")",
":",
"self",
".",
"_add_remove_handler",
"=",
"async_dispatcher_connect",
"(",
"self",
".",
"hass",
",",
"SIGNAL_CAST_DISCOVERED",
",",
"self",
".",
"_async_cast_discovered",
")",
"self",
".",
"_del_remove_handler",
"=",
"async_dispatcher_connect",
"(",
"self",
".",
"hass",
",",
"SIGNAL_CAST_REMOVED",
",",
"self",
".",
"_async_cast_removed",
")",
"self",
".",
"hass",
".",
"bus",
".",
"async_listen_once",
"(",
"EVENT_HOMEASSISTANT_STOP",
",",
"self",
".",
"_async_stop",
")",
"self",
".",
"async_set_cast_info",
"(",
"self",
".",
"_cast_info",
")",
"self",
".",
"hass",
".",
"async_create_task",
"(",
"async_create_catching_coro",
"(",
"self",
".",
"async_connect_to_chromecast",
"(",
")",
")",
")"
] |
https://github.com/home-assistant/core/blob/265ebd17a3f17ed8dc1e9bdede03ac8e323f1ab1/homeassistant/components/cast/media_player.py#L786-L798
|
||
simoncadman/CUPS-Cloud-Print
|
5d96eaa5ba1d3ffe40845498917879b0e907f6bd
|
printermanager.py
|
python
|
PrinterManager.sanitizePrinterName
|
(self, name)
|
return re.sub('[^a-zA-Z0-9\-_]', '', name.encode('ascii', 'replace').replace(' ', '_'))
|
Sanitizes printer name for CUPS
Args:
name: string, name of printer from Google Cloud Print
Returns:
string: CUPS-friendly name for the printer
|
Sanitizes printer name for CUPS
|
[
"Sanitizes",
"printer",
"name",
"for",
"CUPS"
] |
def sanitizePrinterName(self, name):
"""Sanitizes printer name for CUPS
Args:
name: string, name of printer from Google Cloud Print
Returns:
string: CUPS-friendly name for the printer
"""
return re.sub('[^a-zA-Z0-9\-_]', '', name.encode('ascii', 'replace').replace(' ', '_'))
|
[
"def",
"sanitizePrinterName",
"(",
"self",
",",
"name",
")",
":",
"return",
"re",
".",
"sub",
"(",
"'[^a-zA-Z0-9\\-_]'",
",",
"''",
",",
"name",
".",
"encode",
"(",
"'ascii'",
",",
"'replace'",
")",
".",
"replace",
"(",
"' '",
",",
"'_'",
")",
")"
] |
https://github.com/simoncadman/CUPS-Cloud-Print/blob/5d96eaa5ba1d3ffe40845498917879b0e907f6bd/printermanager.py#L114-L123
|
|
securesystemslab/zippy
|
ff0e84ac99442c2c55fe1d285332cfd4e185e089
|
zippy/lib-python/3/xml/etree/ElementTree.py
|
python
|
Element.iter
|
(self, tag=None)
|
[] |
def iter(self, tag=None):
if tag == "*":
tag = None
if tag is None or self.tag == tag:
yield self
for e in self._children:
for e in e.iter(tag):
yield e
|
[
"def",
"iter",
"(",
"self",
",",
"tag",
"=",
"None",
")",
":",
"if",
"tag",
"==",
"\"*\"",
":",
"tag",
"=",
"None",
"if",
"tag",
"is",
"None",
"or",
"self",
".",
"tag",
"==",
"tag",
":",
"yield",
"self",
"for",
"e",
"in",
"self",
".",
"_children",
":",
"for",
"e",
"in",
"e",
".",
"iter",
"(",
"tag",
")",
":",
"yield",
"e"
] |
https://github.com/securesystemslab/zippy/blob/ff0e84ac99442c2c55fe1d285332cfd4e185e089/zippy/lib-python/3/xml/etree/ElementTree.py#L471-L478
|
||||
1040003585/WebScrapingWithPython
|
a770fa5b03894076c8c9539b1ffff34424ffc016
|
8.Scrapy爬虫框架/portia_examle/lib/python2.7/codecs.py
|
python
|
StreamRecoder.__getattr__
|
(self, name,
getattr=getattr)
|
return getattr(self.stream, name)
|
Inherit all other methods from the underlying stream.
|
Inherit all other methods from the underlying stream.
|
[
"Inherit",
"all",
"other",
"methods",
"from",
"the",
"underlying",
"stream",
"."
] |
def __getattr__(self, name,
getattr=getattr):
""" Inherit all other methods from the underlying stream.
"""
return getattr(self.stream, name)
|
[
"def",
"__getattr__",
"(",
"self",
",",
"name",
",",
"getattr",
"=",
"getattr",
")",
":",
"return",
"getattr",
"(",
"self",
".",
"stream",
",",
"name",
")"
] |
https://github.com/1040003585/WebScrapingWithPython/blob/a770fa5b03894076c8c9539b1ffff34424ffc016/8.Scrapy爬虫框架/portia_examle/lib/python2.7/codecs.py#L843-L848
|
|
mesonbuild/meson
|
a22d0f9a0a787df70ce79b05d0c45de90a970048
|
mesonbuild/linkers/linkers.py
|
python
|
NAGDynamicLinker.build_rpath_args
|
(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: T.Tuple[str, ...], build_rpath: str,
install_rpath: str)
|
return (args, set())
|
[] |
def build_rpath_args(self, env: 'Environment', build_dir: str, from_dir: str,
rpath_paths: T.Tuple[str, ...], build_rpath: str,
install_rpath: str) -> T.Tuple[T.List[str], T.Set[bytes]]:
if not rpath_paths and not install_rpath and not build_rpath:
return ([], set())
args = []
origin_placeholder = '$ORIGIN'
processed_rpaths = prepare_rpaths(rpath_paths, build_dir, from_dir)
all_paths = mesonlib.OrderedSet([os.path.join(origin_placeholder, p) for p in processed_rpaths])
if build_rpath != '':
all_paths.add(build_rpath)
for rp in all_paths:
args.extend(self._apply_prefix('-Wl,-Wl,,-rpath,,' + rp))
return (args, set())
|
[
"def",
"build_rpath_args",
"(",
"self",
",",
"env",
":",
"'Environment'",
",",
"build_dir",
":",
"str",
",",
"from_dir",
":",
"str",
",",
"rpath_paths",
":",
"T",
".",
"Tuple",
"[",
"str",
",",
"...",
"]",
",",
"build_rpath",
":",
"str",
",",
"install_rpath",
":",
"str",
")",
"->",
"T",
".",
"Tuple",
"[",
"T",
".",
"List",
"[",
"str",
"]",
",",
"T",
".",
"Set",
"[",
"bytes",
"]",
"]",
":",
"if",
"not",
"rpath_paths",
"and",
"not",
"install_rpath",
"and",
"not",
"build_rpath",
":",
"return",
"(",
"[",
"]",
",",
"set",
"(",
")",
")",
"args",
"=",
"[",
"]",
"origin_placeholder",
"=",
"'$ORIGIN'",
"processed_rpaths",
"=",
"prepare_rpaths",
"(",
"rpath_paths",
",",
"build_dir",
",",
"from_dir",
")",
"all_paths",
"=",
"mesonlib",
".",
"OrderedSet",
"(",
"[",
"os",
".",
"path",
".",
"join",
"(",
"origin_placeholder",
",",
"p",
")",
"for",
"p",
"in",
"processed_rpaths",
"]",
")",
"if",
"build_rpath",
"!=",
"''",
":",
"all_paths",
".",
"add",
"(",
"build_rpath",
")",
"for",
"rp",
"in",
"all_paths",
":",
"args",
".",
"extend",
"(",
"self",
".",
"_apply_prefix",
"(",
"'-Wl,-Wl,,-rpath,,'",
"+",
"rp",
")",
")",
"return",
"(",
"args",
",",
"set",
"(",
")",
")"
] |
https://github.com/mesonbuild/meson/blob/a22d0f9a0a787df70ce79b05d0c45de90a970048/mesonbuild/linkers/linkers.py#L1067-L1081
|
|||
MDudek-ICS/TRISIS-TRITON-HATMAN
|
15a00af7fd1040f0430729d024427601f84886a1
|
decompiled_code/library/cmd.py
|
python
|
Cmd.completedefault
|
(self, *ignored)
|
return []
|
Method called to complete an input line when no command-specific
complete_*() method is available.
By default, it returns an empty list.
|
Method called to complete an input line when no command-specific
complete_*() method is available.
By default, it returns an empty list.
|
[
"Method",
"called",
"to",
"complete",
"an",
"input",
"line",
"when",
"no",
"command",
"-",
"specific",
"complete_",
"*",
"()",
"method",
"is",
"available",
".",
"By",
"default",
"it",
"returns",
"an",
"empty",
"list",
"."
] |
def completedefault(self, *ignored):
"""Method called to complete an input line when no command-specific
complete_*() method is available.
By default, it returns an empty list.
"""
return []
|
[
"def",
"completedefault",
"(",
"self",
",",
"*",
"ignored",
")",
":",
"return",
"[",
"]"
] |
https://github.com/MDudek-ICS/TRISIS-TRITON-HATMAN/blob/15a00af7fd1040f0430729d024427601f84886a1/decompiled_code/library/cmd.py#L256-L263
|
|
rubys/venus
|
9de21094a8cf565bdfcf75688e121a5ad1f5397b
|
filters/excerpt.py
|
python
|
copy.copyElement
|
(self, source, target)
|
copy source element to the target
|
copy source element to the target
|
[
"copy",
"source",
"element",
"to",
"the",
"target"
] |
def copyElement(self, source, target):
""" copy source element to the target """
# check the omit list
if source.nodeName in omit:
if source.nodeName == 'img':
return self.elideImage(source, target)
return self.copyChildren(source, target)
# copy element, attributes, and children
child = self.dom.createElementNS(source.namespaceURI, source.nodeName)
target.appendChild(child)
for i in range(0, source.attributes.length):
attr = source.attributes.item(i)
child.setAttributeNS(attr.namespaceURI, attr.name, attr.value)
self.copyChildren(source, child)
|
[
"def",
"copyElement",
"(",
"self",
",",
"source",
",",
"target",
")",
":",
"# check the omit list",
"if",
"source",
".",
"nodeName",
"in",
"omit",
":",
"if",
"source",
".",
"nodeName",
"==",
"'img'",
":",
"return",
"self",
".",
"elideImage",
"(",
"source",
",",
"target",
")",
"return",
"self",
".",
"copyChildren",
"(",
"source",
",",
"target",
")",
"# copy element, attributes, and children",
"child",
"=",
"self",
".",
"dom",
".",
"createElementNS",
"(",
"source",
".",
"namespaceURI",
",",
"source",
".",
"nodeName",
")",
"target",
".",
"appendChild",
"(",
"child",
")",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"source",
".",
"attributes",
".",
"length",
")",
":",
"attr",
"=",
"source",
".",
"attributes",
".",
"item",
"(",
"i",
")",
"child",
".",
"setAttributeNS",
"(",
"attr",
".",
"namespaceURI",
",",
"attr",
".",
"name",
",",
"attr",
".",
"value",
")",
"self",
".",
"copyChildren",
"(",
"source",
",",
"child",
")"
] |
https://github.com/rubys/venus/blob/9de21094a8cf565bdfcf75688e121a5ad1f5397b/filters/excerpt.py#L48-L63
|
||
Scalsol/mega.pytorch
|
a6aa6e0537b82d70da94228100a51e6a53d98f82
|
mega_core/modeling/backbone/fbnet.py
|
python
|
add_conv_body
|
(cfg, dim_in=3)
|
return model
|
[] |
def add_conv_body(cfg, dim_in=3):
builder, arch_def = create_builder(cfg)
body = FBNetTrunk(builder, arch_def, dim_in)
model = nn.Sequential(OrderedDict([("body", body)]))
model.out_channels = builder.last_depth
return model
|
[
"def",
"add_conv_body",
"(",
"cfg",
",",
"dim_in",
"=",
"3",
")",
":",
"builder",
",",
"arch_def",
"=",
"create_builder",
"(",
"cfg",
")",
"body",
"=",
"FBNetTrunk",
"(",
"builder",
",",
"arch_def",
",",
"dim_in",
")",
"model",
"=",
"nn",
".",
"Sequential",
"(",
"OrderedDict",
"(",
"[",
"(",
"\"body\"",
",",
"body",
")",
"]",
")",
")",
"model",
".",
"out_channels",
"=",
"builder",
".",
"last_depth",
"return",
"model"
] |
https://github.com/Scalsol/mega.pytorch/blob/a6aa6e0537b82d70da94228100a51e6a53d98f82/mega_core/modeling/backbone/fbnet.py#L97-L104
|
|||
microsoft/nni
|
31f11f51249660930824e888af0d4e022823285c
|
nni/algorithms/compression/v2/pytorch/pruning/tools/base.py
|
python
|
SparsityAllocator._compress_mask
|
(self, mask: Tensor)
|
return (mask != 0).type_as(mask)
|
This function will reduce the mask with `self.dim` and `self.block_sparse_size`.
e.g., a mask tensor with size [50, 60, 70], self.dim is (0, 1), self.block_sparse_size is [10, 10].
Then, the reduced mask size is [50 / 10, 60 / 10] => [5, 6].
Parameters
----------
name
The masked module name.
mask
The entire mask has the same size with weight.
Returns
-------
Tensor
Reduced mask.
|
This function will reduce the mask with `self.dim` and `self.block_sparse_size`.
e.g., a mask tensor with size [50, 60, 70], self.dim is (0, 1), self.block_sparse_size is [10, 10].
Then, the reduced mask size is [50 / 10, 60 / 10] => [5, 6].
|
[
"This",
"function",
"will",
"reduce",
"the",
"mask",
"with",
"self",
".",
"dim",
"and",
"self",
".",
"block_sparse_size",
".",
"e",
".",
"g",
".",
"a",
"mask",
"tensor",
"with",
"size",
"[",
"50",
"60",
"70",
"]",
"self",
".",
"dim",
"is",
"(",
"0",
"1",
")",
"self",
".",
"block_sparse_size",
"is",
"[",
"10",
"10",
"]",
".",
"Then",
"the",
"reduced",
"mask",
"size",
"is",
"[",
"50",
"/",
"10",
"60",
"/",
"10",
"]",
"=",
">",
"[",
"5",
"6",
"]",
"."
] |
def _compress_mask(self, mask: Tensor) -> Tensor:
"""
This function will reduce the mask with `self.dim` and `self.block_sparse_size`.
e.g., a mask tensor with size [50, 60, 70], self.dim is (0, 1), self.block_sparse_size is [10, 10].
Then, the reduced mask size is [50 / 10, 60 / 10] => [5, 6].
Parameters
----------
name
The masked module name.
mask
The entire mask has the same size with weight.
Returns
-------
Tensor
Reduced mask.
"""
if self.dim is None or len(mask.size()) == 1:
mask = mask.clone()
else:
mask_dim = list(range(len(mask.size())))
for dim in self.dim:
mask_dim.remove(dim)
mask = torch.sum(mask, dim=mask_dim)
if self.block_sparse_size is not None:
# operation like pooling
lower_case_letters = 'abcdefghijklmnopqrstuvwxyz'
ein_expression = ''
for i, step in enumerate(self.block_sparse_size):
mask = mask.unfold(i, step, step)
ein_expression += lower_case_letters[i]
ein_expression = '...{},{}'.format(ein_expression, ein_expression)
mask = torch.einsum(ein_expression, mask, torch.ones(self.block_sparse_size).to(mask.device))
return (mask != 0).type_as(mask)
|
[
"def",
"_compress_mask",
"(",
"self",
",",
"mask",
":",
"Tensor",
")",
"->",
"Tensor",
":",
"if",
"self",
".",
"dim",
"is",
"None",
"or",
"len",
"(",
"mask",
".",
"size",
"(",
")",
")",
"==",
"1",
":",
"mask",
"=",
"mask",
".",
"clone",
"(",
")",
"else",
":",
"mask_dim",
"=",
"list",
"(",
"range",
"(",
"len",
"(",
"mask",
".",
"size",
"(",
")",
")",
")",
")",
"for",
"dim",
"in",
"self",
".",
"dim",
":",
"mask_dim",
".",
"remove",
"(",
"dim",
")",
"mask",
"=",
"torch",
".",
"sum",
"(",
"mask",
",",
"dim",
"=",
"mask_dim",
")",
"if",
"self",
".",
"block_sparse_size",
"is",
"not",
"None",
":",
"# operation like pooling",
"lower_case_letters",
"=",
"'abcdefghijklmnopqrstuvwxyz'",
"ein_expression",
"=",
"''",
"for",
"i",
",",
"step",
"in",
"enumerate",
"(",
"self",
".",
"block_sparse_size",
")",
":",
"mask",
"=",
"mask",
".",
"unfold",
"(",
"i",
",",
"step",
",",
"step",
")",
"ein_expression",
"+=",
"lower_case_letters",
"[",
"i",
"]",
"ein_expression",
"=",
"'...{},{}'",
".",
"format",
"(",
"ein_expression",
",",
"ein_expression",
")",
"mask",
"=",
"torch",
".",
"einsum",
"(",
"ein_expression",
",",
"mask",
",",
"torch",
".",
"ones",
"(",
"self",
".",
"block_sparse_size",
")",
".",
"to",
"(",
"mask",
".",
"device",
")",
")",
"return",
"(",
"mask",
"!=",
"0",
")",
".",
"type_as",
"(",
"mask",
")"
] |
https://github.com/microsoft/nni/blob/31f11f51249660930824e888af0d4e022823285c/nni/algorithms/compression/v2/pytorch/pruning/tools/base.py#L409-L445
|
|
seppius-xbmc-repo/ru
|
d0879d56ec8243b2c7af44fda5cf3d1ff77fd2e2
|
plugin.video.stepashka.com/resources/lib/BeautifulSoup.py
|
python
|
Tag.renderContents
|
(self, encoding=DEFAULT_OUTPUT_ENCODING,
prettyPrint=False, indentLevel=0)
|
return ''.join(s)
|
Renders the contents of this tag as a string in the given
encoding. If encoding is None, returns a Unicode string..
|
Renders the contents of this tag as a string in the given
encoding. If encoding is None, returns a Unicode string..
|
[
"Renders",
"the",
"contents",
"of",
"this",
"tag",
"as",
"a",
"string",
"in",
"the",
"given",
"encoding",
".",
"If",
"encoding",
"is",
"None",
"returns",
"a",
"Unicode",
"string",
".."
] |
def renderContents(self, encoding=DEFAULT_OUTPUT_ENCODING,
prettyPrint=False, indentLevel=0):
"""Renders the contents of this tag as a string in the given
encoding. If encoding is None, returns a Unicode string.."""
s=[]
for c in self:
text = None
if isinstance(c, NavigableString):
text = c.__str__(encoding)
elif isinstance(c, Tag):
s.append(c.__str__(encoding, prettyPrint, indentLevel))
if text and prettyPrint:
text = text.strip()
if text:
if prettyPrint:
s.append(" " * (indentLevel-1))
s.append(text)
if prettyPrint:
s.append("\n")
return ''.join(s)
|
[
"def",
"renderContents",
"(",
"self",
",",
"encoding",
"=",
"DEFAULT_OUTPUT_ENCODING",
",",
"prettyPrint",
"=",
"False",
",",
"indentLevel",
"=",
"0",
")",
":",
"s",
"=",
"[",
"]",
"for",
"c",
"in",
"self",
":",
"text",
"=",
"None",
"if",
"isinstance",
"(",
"c",
",",
"NavigableString",
")",
":",
"text",
"=",
"c",
".",
"__str__",
"(",
"encoding",
")",
"elif",
"isinstance",
"(",
"c",
",",
"Tag",
")",
":",
"s",
".",
"append",
"(",
"c",
".",
"__str__",
"(",
"encoding",
",",
"prettyPrint",
",",
"indentLevel",
")",
")",
"if",
"text",
"and",
"prettyPrint",
":",
"text",
"=",
"text",
".",
"strip",
"(",
")",
"if",
"text",
":",
"if",
"prettyPrint",
":",
"s",
".",
"append",
"(",
"\" \"",
"*",
"(",
"indentLevel",
"-",
"1",
")",
")",
"s",
".",
"append",
"(",
"text",
")",
"if",
"prettyPrint",
":",
"s",
".",
"append",
"(",
"\"\\n\"",
")",
"return",
"''",
".",
"join",
"(",
"s",
")"
] |
https://github.com/seppius-xbmc-repo/ru/blob/d0879d56ec8243b2c7af44fda5cf3d1ff77fd2e2/plugin.video.stepashka.com/resources/lib/BeautifulSoup.py#L801-L820
|
|
neo4j/neo4j-python-driver
|
97fd0e1da8223373018fa4755ac431b90a144f02
|
neo4j/_sync/io/_bolt.py
|
python
|
Bolt.protocol_handlers
|
(cls, protocol_version=None)
|
return {}
|
Return a dictionary of available Bolt protocol handlers,
keyed by version tuple. If an explicit protocol version is
provided, the dictionary will contain either zero or one items,
depending on whether that version is supported. If no protocol
version is provided, all available versions will be returned.
:param protocol_version: tuple identifying a specific protocol
version (e.g. (3, 5)) or None
:return: dictionary of version tuple to handler class for all
relevant and supported protocol versions
:raise TypeError: if protocol version is not passed in a tuple
|
Return a dictionary of available Bolt protocol handlers,
keyed by version tuple. If an explicit protocol version is
provided, the dictionary will contain either zero or one items,
depending on whether that version is supported. If no protocol
version is provided, all available versions will be returned.
|
[
"Return",
"a",
"dictionary",
"of",
"available",
"Bolt",
"protocol",
"handlers",
"keyed",
"by",
"version",
"tuple",
".",
"If",
"an",
"explicit",
"protocol",
"version",
"is",
"provided",
"the",
"dictionary",
"will",
"contain",
"either",
"zero",
"or",
"one",
"items",
"depending",
"on",
"whether",
"that",
"version",
"is",
"supported",
".",
"If",
"no",
"protocol",
"version",
"is",
"provided",
"all",
"available",
"versions",
"will",
"be",
"returned",
"."
] |
def protocol_handlers(cls, protocol_version=None):
""" Return a dictionary of available Bolt protocol handlers,
keyed by version tuple. If an explicit protocol version is
provided, the dictionary will contain either zero or one items,
depending on whether that version is supported. If no protocol
version is provided, all available versions will be returned.
:param protocol_version: tuple identifying a specific protocol
version (e.g. (3, 5)) or None
:return: dictionary of version tuple to handler class for all
relevant and supported protocol versions
:raise TypeError: if protocol version is not passed in a tuple
"""
# Carry out Bolt subclass imports locally to avoid circular dependency issues.
from ._bolt3 import Bolt3
from ._bolt4 import (
Bolt4x0,
Bolt4x1,
Bolt4x2,
Bolt4x3,
Bolt4x4,
)
handlers = {
Bolt3.PROTOCOL_VERSION: Bolt3,
Bolt4x0.PROTOCOL_VERSION: Bolt4x0,
Bolt4x1.PROTOCOL_VERSION: Bolt4x1,
Bolt4x2.PROTOCOL_VERSION: Bolt4x2,
Bolt4x3.PROTOCOL_VERSION: Bolt4x3,
Bolt4x4.PROTOCOL_VERSION: Bolt4x4,
}
if protocol_version is None:
return handlers
if not isinstance(protocol_version, tuple):
raise TypeError("Protocol version must be specified as a tuple")
if protocol_version in handlers:
return {protocol_version: handlers[protocol_version]}
return {}
|
[
"def",
"protocol_handlers",
"(",
"cls",
",",
"protocol_version",
"=",
"None",
")",
":",
"# Carry out Bolt subclass imports locally to avoid circular dependency issues.",
"from",
".",
"_bolt3",
"import",
"Bolt3",
"from",
".",
"_bolt4",
"import",
"(",
"Bolt4x0",
",",
"Bolt4x1",
",",
"Bolt4x2",
",",
"Bolt4x3",
",",
"Bolt4x4",
",",
")",
"handlers",
"=",
"{",
"Bolt3",
".",
"PROTOCOL_VERSION",
":",
"Bolt3",
",",
"Bolt4x0",
".",
"PROTOCOL_VERSION",
":",
"Bolt4x0",
",",
"Bolt4x1",
".",
"PROTOCOL_VERSION",
":",
"Bolt4x1",
",",
"Bolt4x2",
".",
"PROTOCOL_VERSION",
":",
"Bolt4x2",
",",
"Bolt4x3",
".",
"PROTOCOL_VERSION",
":",
"Bolt4x3",
",",
"Bolt4x4",
".",
"PROTOCOL_VERSION",
":",
"Bolt4x4",
",",
"}",
"if",
"protocol_version",
"is",
"None",
":",
"return",
"handlers",
"if",
"not",
"isinstance",
"(",
"protocol_version",
",",
"tuple",
")",
":",
"raise",
"TypeError",
"(",
"\"Protocol version must be specified as a tuple\"",
")",
"if",
"protocol_version",
"in",
"handlers",
":",
"return",
"{",
"protocol_version",
":",
"handlers",
"[",
"protocol_version",
"]",
"}",
"return",
"{",
"}"
] |
https://github.com/neo4j/neo4j-python-driver/blob/97fd0e1da8223373018fa4755ac431b90a144f02/neo4j/_sync/io/_bolt.py#L157-L199
|
|
HCIILAB/DeRPN
|
21e6738ee1f7d3f159ee48d435c543e773f8ce99
|
tools/train_faster_rcnn_alt_opt.py
|
python
|
parse_args
|
()
|
return args
|
Parse input arguments
|
Parse input arguments
|
[
"Parse",
"input",
"arguments"
] |
def parse_args():
"""
Parse input arguments
"""
parser = argparse.ArgumentParser(description='Train a Faster R-CNN network')
parser.add_argument('--gpu', dest='gpu_id',
help='GPU device id to use [0]',
default=0, type=int)
parser.add_argument('--net_name', dest='net_name',
help='network name (e.g., "ZF")',
default=None, type=str)
parser.add_argument('--weights', dest='pretrained_model',
help='initialize with pretrained model weights',
default=None, type=str)
parser.add_argument('--cfg', dest='cfg_file',
help='optional config file',
default=None, type=str)
parser.add_argument('--imdb', dest='imdb_name',
help='dataset to train on',
default='voc_2007_trainval', type=str)
parser.add_argument('--set', dest='set_cfgs',
help='set config keys', default=None,
nargs=argparse.REMAINDER)
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
return args
|
[
"def",
"parse_args",
"(",
")",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"'Train a Faster R-CNN network'",
")",
"parser",
".",
"add_argument",
"(",
"'--gpu'",
",",
"dest",
"=",
"'gpu_id'",
",",
"help",
"=",
"'GPU device id to use [0]'",
",",
"default",
"=",
"0",
",",
"type",
"=",
"int",
")",
"parser",
".",
"add_argument",
"(",
"'--net_name'",
",",
"dest",
"=",
"'net_name'",
",",
"help",
"=",
"'network name (e.g., \"ZF\")'",
",",
"default",
"=",
"None",
",",
"type",
"=",
"str",
")",
"parser",
".",
"add_argument",
"(",
"'--weights'",
",",
"dest",
"=",
"'pretrained_model'",
",",
"help",
"=",
"'initialize with pretrained model weights'",
",",
"default",
"=",
"None",
",",
"type",
"=",
"str",
")",
"parser",
".",
"add_argument",
"(",
"'--cfg'",
",",
"dest",
"=",
"'cfg_file'",
",",
"help",
"=",
"'optional config file'",
",",
"default",
"=",
"None",
",",
"type",
"=",
"str",
")",
"parser",
".",
"add_argument",
"(",
"'--imdb'",
",",
"dest",
"=",
"'imdb_name'",
",",
"help",
"=",
"'dataset to train on'",
",",
"default",
"=",
"'voc_2007_trainval'",
",",
"type",
"=",
"str",
")",
"parser",
".",
"add_argument",
"(",
"'--set'",
",",
"dest",
"=",
"'set_cfgs'",
",",
"help",
"=",
"'set config keys'",
",",
"default",
"=",
"None",
",",
"nargs",
"=",
"argparse",
".",
"REMAINDER",
")",
"if",
"len",
"(",
"sys",
".",
"argv",
")",
"==",
"1",
":",
"parser",
".",
"print_help",
"(",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
")",
"return",
"args"
] |
https://github.com/HCIILAB/DeRPN/blob/21e6738ee1f7d3f159ee48d435c543e773f8ce99/tools/train_faster_rcnn_alt_opt.py#L29-L58
|
|
graalvm/mx
|
29c0debab406352df3af246be2f8973be5db69ae
|
mx.py
|
python
|
GitConfig.can_push
|
(self, vcdir, strict=True, abortOnError=True)
|
Check if `vcdir` can be pushed.
:param str vcdir: a valid repository path
:param bool strict: if set no uncommitted changes or unadded are allowed
:return: True if we can push, False otherwise
:rtype: bool
|
Check if `vcdir` can be pushed.
|
[
"Check",
"if",
"vcdir",
"can",
"be",
"pushed",
"."
] |
def can_push(self, vcdir, strict=True, abortOnError=True):
"""
Check if `vcdir` can be pushed.
:param str vcdir: a valid repository path
:param bool strict: if set no uncommitted changes or unadded are allowed
:return: True if we can push, False otherwise
:rtype: bool
"""
out = OutputCapture()
rc = self.run(['git', 'status', '--porcelain'], cwd=vcdir, nonZeroIsFatal=abortOnError, out=out)
if rc == 0:
output = out.data
if strict:
return output == ''
else:
if len(output) > 0:
for line in output.split('\n'):
if len(line) > 0 and not line.startswith('??'):
return False
return True
else:
return False
|
[
"def",
"can_push",
"(",
"self",
",",
"vcdir",
",",
"strict",
"=",
"True",
",",
"abortOnError",
"=",
"True",
")",
":",
"out",
"=",
"OutputCapture",
"(",
")",
"rc",
"=",
"self",
".",
"run",
"(",
"[",
"'git'",
",",
"'status'",
",",
"'--porcelain'",
"]",
",",
"cwd",
"=",
"vcdir",
",",
"nonZeroIsFatal",
"=",
"abortOnError",
",",
"out",
"=",
"out",
")",
"if",
"rc",
"==",
"0",
":",
"output",
"=",
"out",
".",
"data",
"if",
"strict",
":",
"return",
"output",
"==",
"''",
"else",
":",
"if",
"len",
"(",
"output",
")",
">",
"0",
":",
"for",
"line",
"in",
"output",
".",
"split",
"(",
"'\\n'",
")",
":",
"if",
"len",
"(",
"line",
")",
">",
"0",
"and",
"not",
"line",
".",
"startswith",
"(",
"'??'",
")",
":",
"return",
"False",
"return",
"True",
"else",
":",
"return",
"False"
] |
https://github.com/graalvm/mx/blob/29c0debab406352df3af246be2f8973be5db69ae/mx.py#L10257-L10279
|
||
504ensicsLabs/DAMM
|
60e7ec7dacd6087cd6320b3615becca9b4cf9b24
|
volatility/plugins/linux/cpuinfo.py
|
python
|
linux_cpuinfo.get_per_cpu_symbol
|
(self, sym_name, module = "kernel")
|
return ret
|
In 2.6.3x, Linux changed how the symbols for per_cpu variables were named
This handles both formats so plugins needing per-cpu vars are cleaner
|
In 2.6.3x, Linux changed how the symbols for per_cpu variables were named
This handles both formats so plugins needing per-cpu vars are cleaner
|
[
"In",
"2",
".",
"6",
".",
"3x",
"Linux",
"changed",
"how",
"the",
"symbols",
"for",
"per_cpu",
"variables",
"were",
"named",
"This",
"handles",
"both",
"formats",
"so",
"plugins",
"needing",
"per",
"-",
"cpu",
"vars",
"are",
"cleaner"
] |
def get_per_cpu_symbol(self, sym_name, module = "kernel"):
"""
In 2.6.3x, Linux changed how the symbols for per_cpu variables were named
This handles both formats so plugins needing per-cpu vars are cleaner
"""
ret = self.addr_space.profile.get_symbol(sym_name, module = module)
if not ret:
ret = self.addr_space.profile.get_symbol("per_cpu__" + sym_name, module = module)
return ret
|
[
"def",
"get_per_cpu_symbol",
"(",
"self",
",",
"sym_name",
",",
"module",
"=",
"\"kernel\"",
")",
":",
"ret",
"=",
"self",
".",
"addr_space",
".",
"profile",
".",
"get_symbol",
"(",
"sym_name",
",",
"module",
"=",
"module",
")",
"if",
"not",
"ret",
":",
"ret",
"=",
"self",
".",
"addr_space",
".",
"profile",
".",
"get_symbol",
"(",
"\"per_cpu__\"",
"+",
"sym_name",
",",
"module",
"=",
"module",
")",
"return",
"ret"
] |
https://github.com/504ensicsLabs/DAMM/blob/60e7ec7dacd6087cd6320b3615becca9b4cf9b24/volatility/plugins/linux/cpuinfo.py#L65-L76
|
|
IronLanguages/main
|
a949455434b1fda8c783289e897e78a9a0caabb5
|
External.LCA_RESTRICTED/Languages/IronPython/27/Doc/sphinx/environment.py
|
python
|
BuildEnvironment.find_desc
|
(self, modname, classname, name, type, searchorder=0)
|
return newname, self.descrefs[newname]
|
Find a description node matching "name", perhaps using
the given module and/or classname.
|
Find a description node matching "name", perhaps using
the given module and/or classname.
|
[
"Find",
"a",
"description",
"node",
"matching",
"name",
"perhaps",
"using",
"the",
"given",
"module",
"and",
"/",
"or",
"classname",
"."
] |
def find_desc(self, modname, classname, name, type, searchorder=0):
"""Find a description node matching "name", perhaps using
the given module and/or classname."""
# skip parens
if name[-2:] == '()':
name = name[:-2]
if not name:
return None, None
# don't add module and class names for C things
if type[0] == 'c' and type not in ('class', 'const'):
# skip trailing star and whitespace
name = name.rstrip(' *')
if name in self.descrefs and self.descrefs[name][1][0] == 'c':
return name, self.descrefs[name]
return None, None
newname = None
if searchorder == 1:
if modname and classname and \
modname + '.' + classname + '.' + name in self.descrefs:
newname = modname + '.' + classname + '.' + name
elif modname and modname + '.' + name in self.descrefs:
newname = modname + '.' + name
elif name in self.descrefs:
newname = name
else:
if name in self.descrefs:
newname = name
elif modname and modname + '.' + name in self.descrefs:
newname = modname + '.' + name
elif modname and classname and \
modname + '.' + classname + '.' + name in self.descrefs:
newname = modname + '.' + classname + '.' + name
# special case: builtin exceptions have module "exceptions" set
elif type == 'exc' and '.' not in name and \
'exceptions.' + name in self.descrefs:
newname = 'exceptions.' + name
# special case: object methods
elif type in ('func', 'meth') and '.' not in name and \
'object.' + name in self.descrefs:
newname = 'object.' + name
if newname is None:
return None, None
return newname, self.descrefs[newname]
|
[
"def",
"find_desc",
"(",
"self",
",",
"modname",
",",
"classname",
",",
"name",
",",
"type",
",",
"searchorder",
"=",
"0",
")",
":",
"# skip parens",
"if",
"name",
"[",
"-",
"2",
":",
"]",
"==",
"'()'",
":",
"name",
"=",
"name",
"[",
":",
"-",
"2",
"]",
"if",
"not",
"name",
":",
"return",
"None",
",",
"None",
"# don't add module and class names for C things",
"if",
"type",
"[",
"0",
"]",
"==",
"'c'",
"and",
"type",
"not",
"in",
"(",
"'class'",
",",
"'const'",
")",
":",
"# skip trailing star and whitespace",
"name",
"=",
"name",
".",
"rstrip",
"(",
"' *'",
")",
"if",
"name",
"in",
"self",
".",
"descrefs",
"and",
"self",
".",
"descrefs",
"[",
"name",
"]",
"[",
"1",
"]",
"[",
"0",
"]",
"==",
"'c'",
":",
"return",
"name",
",",
"self",
".",
"descrefs",
"[",
"name",
"]",
"return",
"None",
",",
"None",
"newname",
"=",
"None",
"if",
"searchorder",
"==",
"1",
":",
"if",
"modname",
"and",
"classname",
"and",
"modname",
"+",
"'.'",
"+",
"classname",
"+",
"'.'",
"+",
"name",
"in",
"self",
".",
"descrefs",
":",
"newname",
"=",
"modname",
"+",
"'.'",
"+",
"classname",
"+",
"'.'",
"+",
"name",
"elif",
"modname",
"and",
"modname",
"+",
"'.'",
"+",
"name",
"in",
"self",
".",
"descrefs",
":",
"newname",
"=",
"modname",
"+",
"'.'",
"+",
"name",
"elif",
"name",
"in",
"self",
".",
"descrefs",
":",
"newname",
"=",
"name",
"else",
":",
"if",
"name",
"in",
"self",
".",
"descrefs",
":",
"newname",
"=",
"name",
"elif",
"modname",
"and",
"modname",
"+",
"'.'",
"+",
"name",
"in",
"self",
".",
"descrefs",
":",
"newname",
"=",
"modname",
"+",
"'.'",
"+",
"name",
"elif",
"modname",
"and",
"classname",
"and",
"modname",
"+",
"'.'",
"+",
"classname",
"+",
"'.'",
"+",
"name",
"in",
"self",
".",
"descrefs",
":",
"newname",
"=",
"modname",
"+",
"'.'",
"+",
"classname",
"+",
"'.'",
"+",
"name",
"# special case: builtin exceptions have module \"exceptions\" set",
"elif",
"type",
"==",
"'exc'",
"and",
"'.'",
"not",
"in",
"name",
"and",
"'exceptions.'",
"+",
"name",
"in",
"self",
".",
"descrefs",
":",
"newname",
"=",
"'exceptions.'",
"+",
"name",
"# special case: object methods",
"elif",
"type",
"in",
"(",
"'func'",
",",
"'meth'",
")",
"and",
"'.'",
"not",
"in",
"name",
"and",
"'object.'",
"+",
"name",
"in",
"self",
".",
"descrefs",
":",
"newname",
"=",
"'object.'",
"+",
"name",
"if",
"newname",
"is",
"None",
":",
"return",
"None",
",",
"None",
"return",
"newname",
",",
"self",
".",
"descrefs",
"[",
"newname",
"]"
] |
https://github.com/IronLanguages/main/blob/a949455434b1fda8c783289e897e78a9a0caabb5/External.LCA_RESTRICTED/Languages/IronPython/27/Doc/sphinx/environment.py#L1562-L1607
|
|
PanJinquan/tensorflow_models_learning
|
e7a2773d526e01c76fc8366868099ca3d7a819b4
|
slim/nets/inception_v3.py
|
python
|
inception_v3
|
(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.8,
min_depth=16,
depth_multiplier=1.0,
prediction_fn=slim.softmax,
spatial_squeeze=True,
reuse=None,
create_aux_logits=True,
scope='InceptionV3',
global_pool=False)
|
return logits, end_points
|
Inception model from http://arxiv.org/abs/1512.00567.
"Rethinking the Inception Architecture for Computer Vision"
Christian Szegedy, Vincent Vanhoucke, Sergey Ioffe, Jonathon Shlens,
Zbigniew Wojna.
With the default arguments this method constructs the exact model defined in
the paper. However, one can experiment with variations of the inception_v3
network by changing arguments dropout_keep_prob, min_depth and
depth_multiplier.
The default image size used to train this network is 299x299.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes. If 0 or None, the logits layer
is omitted and the input features to the logits layer (before dropout)
are returned instead.
is_training: whether is training or not.
dropout_keep_prob: the percentage of activation values that are retained.
min_depth: Minimum depth value (number of channels) for all convolution ops.
Enforced when depth_multiplier < 1, and not an active constraint when
depth_multiplier >= 1.
depth_multiplier: Float multiplier for the depth (number of channels)
for all convolution ops. The value must be greater than zero. Typical
usage will be to set this value in (0, 1) to reduce the number of
parameters or computation cost of the model.
prediction_fn: a function to get predictions out of logits.
spatial_squeeze: if True, logits is of shape [B, C], if false logits is of
shape [B, 1, 1, C], where B is batch_size and C is number of classes.
reuse: whether or not the network and its variables should be reused. To be
able to reuse 'scope' must be given.
create_aux_logits: Whether to create the auxiliary logits.
scope: Optional variable_scope.
global_pool: Optional boolean flag to control the avgpooling before the
logits layer. If false or unset, pooling is done with a fixed window
that reduces default-sized inputs to 1x1, while larger inputs lead to
larger outputs. If true, any input size is pooled down to 1x1.
Returns:
net: a Tensor with the logits (pre-softmax activations) if num_classes
is a non-zero integer, or the non-dropped-out input to the logits layer
if num_classes is 0 or None.
end_points: a dictionary from components of the network to the corresponding
activation.
Raises:
ValueError: if 'depth_multiplier' is less than or equal to zero.
|
Inception model from http://arxiv.org/abs/1512.00567.
|
[
"Inception",
"model",
"from",
"http",
":",
"//",
"arxiv",
".",
"org",
"/",
"abs",
"/",
"1512",
".",
"00567",
"."
] |
def inception_v3(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.8,
min_depth=16,
depth_multiplier=1.0,
prediction_fn=slim.softmax,
spatial_squeeze=True,
reuse=None,
create_aux_logits=True,
scope='InceptionV3',
global_pool=False):
"""Inception model from http://arxiv.org/abs/1512.00567.
"Rethinking the Inception Architecture for Computer Vision"
Christian Szegedy, Vincent Vanhoucke, Sergey Ioffe, Jonathon Shlens,
Zbigniew Wojna.
With the default arguments this method constructs the exact model defined in
the paper. However, one can experiment with variations of the inception_v3
network by changing arguments dropout_keep_prob, min_depth and
depth_multiplier.
The default image size used to train this network is 299x299.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes. If 0 or None, the logits layer
is omitted and the input features to the logits layer (before dropout)
are returned instead.
is_training: whether is training or not.
dropout_keep_prob: the percentage of activation values that are retained.
min_depth: Minimum depth value (number of channels) for all convolution ops.
Enforced when depth_multiplier < 1, and not an active constraint when
depth_multiplier >= 1.
depth_multiplier: Float multiplier for the depth (number of channels)
for all convolution ops. The value must be greater than zero. Typical
usage will be to set this value in (0, 1) to reduce the number of
parameters or computation cost of the model.
prediction_fn: a function to get predictions out of logits.
spatial_squeeze: if True, logits is of shape [B, C], if false logits is of
shape [B, 1, 1, C], where B is batch_size and C is number of classes.
reuse: whether or not the network and its variables should be reused. To be
able to reuse 'scope' must be given.
create_aux_logits: Whether to create the auxiliary logits.
scope: Optional variable_scope.
global_pool: Optional boolean flag to control the avgpooling before the
logits layer. If false or unset, pooling is done with a fixed window
that reduces default-sized inputs to 1x1, while larger inputs lead to
larger outputs. If true, any input size is pooled down to 1x1.
Returns:
net: a Tensor with the logits (pre-softmax activations) if num_classes
is a non-zero integer, or the non-dropped-out input to the logits layer
if num_classes is 0 or None.
end_points: a dictionary from components of the network to the corresponding
activation.
Raises:
ValueError: if 'depth_multiplier' is less than or equal to zero.
"""
if depth_multiplier <= 0:
raise ValueError('depth_multiplier is not greater than zero.')
depth = lambda d: max(int(d * depth_multiplier), min_depth)
with tf.variable_scope(scope, 'InceptionV3', [inputs], reuse=reuse) as scope:
with slim.arg_scope([slim.batch_norm, slim.dropout],
is_training=is_training):
net, end_points = inception_v3_base(
inputs, scope=scope, min_depth=min_depth,
depth_multiplier=depth_multiplier)
# Auxiliary Head logits
if create_aux_logits and num_classes:
with slim.arg_scope([slim.conv2d, slim.max_pool2d, slim.avg_pool2d],
stride=1, padding='SAME'):
aux_logits = end_points['Mixed_6e']
with tf.variable_scope('AuxLogits'):
aux_logits = slim.avg_pool2d(
aux_logits, [5, 5], stride=3, padding='VALID',
scope='AvgPool_1a_5x5')
aux_logits = slim.conv2d(aux_logits, depth(128), [1, 1],
scope='Conv2d_1b_1x1')
# Shape of feature map before the final layer.
kernel_size = _reduced_kernel_size_for_small_input(
aux_logits, [5, 5])
aux_logits = slim.conv2d(
aux_logits, depth(768), kernel_size,
weights_initializer=trunc_normal(0.01),
padding='VALID', scope='Conv2d_2a_{}x{}'.format(*kernel_size))
aux_logits = slim.conv2d(
aux_logits, num_classes, [1, 1], activation_fn=None,
normalizer_fn=None, weights_initializer=trunc_normal(0.001),
scope='Conv2d_2b_1x1')
if spatial_squeeze:
aux_logits = tf.squeeze(aux_logits, [1, 2], name='SpatialSqueeze')
end_points['AuxLogits'] = aux_logits
# Final pooling and prediction
with tf.variable_scope('Logits'):
if global_pool:
# Global average pooling.
net = tf.reduce_mean(net, [1, 2], keep_dims=True, name='GlobalPool')
end_points['global_pool'] = net
else:
# Pooling with a fixed kernel size.
kernel_size = _reduced_kernel_size_for_small_input(net, [8, 8])
net = slim.avg_pool2d(net, kernel_size, padding='VALID',
scope='AvgPool_1a_{}x{}'.format(*kernel_size))
end_points['AvgPool_1a'] = net
if not num_classes:
return net, end_points
# 1 x 1 x 2048
net = slim.dropout(net, keep_prob=dropout_keep_prob, scope='Dropout_1b')
end_points['PreLogits'] = net
# 2048
logits = slim.conv2d(net, num_classes, [1, 1], activation_fn=None,
normalizer_fn=None, scope='Conv2d_1c_1x1')
if spatial_squeeze:
logits = tf.squeeze(logits, [1, 2], name='SpatialSqueeze')
# 1000
end_points['Logits'] = logits
end_points['Predictions'] = prediction_fn(logits, scope='Predictions')
return logits, end_points
|
[
"def",
"inception_v3",
"(",
"inputs",
",",
"num_classes",
"=",
"1000",
",",
"is_training",
"=",
"True",
",",
"dropout_keep_prob",
"=",
"0.8",
",",
"min_depth",
"=",
"16",
",",
"depth_multiplier",
"=",
"1.0",
",",
"prediction_fn",
"=",
"slim",
".",
"softmax",
",",
"spatial_squeeze",
"=",
"True",
",",
"reuse",
"=",
"None",
",",
"create_aux_logits",
"=",
"True",
",",
"scope",
"=",
"'InceptionV3'",
",",
"global_pool",
"=",
"False",
")",
":",
"if",
"depth_multiplier",
"<=",
"0",
":",
"raise",
"ValueError",
"(",
"'depth_multiplier is not greater than zero.'",
")",
"depth",
"=",
"lambda",
"d",
":",
"max",
"(",
"int",
"(",
"d",
"*",
"depth_multiplier",
")",
",",
"min_depth",
")",
"with",
"tf",
".",
"variable_scope",
"(",
"scope",
",",
"'InceptionV3'",
",",
"[",
"inputs",
"]",
",",
"reuse",
"=",
"reuse",
")",
"as",
"scope",
":",
"with",
"slim",
".",
"arg_scope",
"(",
"[",
"slim",
".",
"batch_norm",
",",
"slim",
".",
"dropout",
"]",
",",
"is_training",
"=",
"is_training",
")",
":",
"net",
",",
"end_points",
"=",
"inception_v3_base",
"(",
"inputs",
",",
"scope",
"=",
"scope",
",",
"min_depth",
"=",
"min_depth",
",",
"depth_multiplier",
"=",
"depth_multiplier",
")",
"# Auxiliary Head logits",
"if",
"create_aux_logits",
"and",
"num_classes",
":",
"with",
"slim",
".",
"arg_scope",
"(",
"[",
"slim",
".",
"conv2d",
",",
"slim",
".",
"max_pool2d",
",",
"slim",
".",
"avg_pool2d",
"]",
",",
"stride",
"=",
"1",
",",
"padding",
"=",
"'SAME'",
")",
":",
"aux_logits",
"=",
"end_points",
"[",
"'Mixed_6e'",
"]",
"with",
"tf",
".",
"variable_scope",
"(",
"'AuxLogits'",
")",
":",
"aux_logits",
"=",
"slim",
".",
"avg_pool2d",
"(",
"aux_logits",
",",
"[",
"5",
",",
"5",
"]",
",",
"stride",
"=",
"3",
",",
"padding",
"=",
"'VALID'",
",",
"scope",
"=",
"'AvgPool_1a_5x5'",
")",
"aux_logits",
"=",
"slim",
".",
"conv2d",
"(",
"aux_logits",
",",
"depth",
"(",
"128",
")",
",",
"[",
"1",
",",
"1",
"]",
",",
"scope",
"=",
"'Conv2d_1b_1x1'",
")",
"# Shape of feature map before the final layer.",
"kernel_size",
"=",
"_reduced_kernel_size_for_small_input",
"(",
"aux_logits",
",",
"[",
"5",
",",
"5",
"]",
")",
"aux_logits",
"=",
"slim",
".",
"conv2d",
"(",
"aux_logits",
",",
"depth",
"(",
"768",
")",
",",
"kernel_size",
",",
"weights_initializer",
"=",
"trunc_normal",
"(",
"0.01",
")",
",",
"padding",
"=",
"'VALID'",
",",
"scope",
"=",
"'Conv2d_2a_{}x{}'",
".",
"format",
"(",
"*",
"kernel_size",
")",
")",
"aux_logits",
"=",
"slim",
".",
"conv2d",
"(",
"aux_logits",
",",
"num_classes",
",",
"[",
"1",
",",
"1",
"]",
",",
"activation_fn",
"=",
"None",
",",
"normalizer_fn",
"=",
"None",
",",
"weights_initializer",
"=",
"trunc_normal",
"(",
"0.001",
")",
",",
"scope",
"=",
"'Conv2d_2b_1x1'",
")",
"if",
"spatial_squeeze",
":",
"aux_logits",
"=",
"tf",
".",
"squeeze",
"(",
"aux_logits",
",",
"[",
"1",
",",
"2",
"]",
",",
"name",
"=",
"'SpatialSqueeze'",
")",
"end_points",
"[",
"'AuxLogits'",
"]",
"=",
"aux_logits",
"# Final pooling and prediction",
"with",
"tf",
".",
"variable_scope",
"(",
"'Logits'",
")",
":",
"if",
"global_pool",
":",
"# Global average pooling.",
"net",
"=",
"tf",
".",
"reduce_mean",
"(",
"net",
",",
"[",
"1",
",",
"2",
"]",
",",
"keep_dims",
"=",
"True",
",",
"name",
"=",
"'GlobalPool'",
")",
"end_points",
"[",
"'global_pool'",
"]",
"=",
"net",
"else",
":",
"# Pooling with a fixed kernel size.",
"kernel_size",
"=",
"_reduced_kernel_size_for_small_input",
"(",
"net",
",",
"[",
"8",
",",
"8",
"]",
")",
"net",
"=",
"slim",
".",
"avg_pool2d",
"(",
"net",
",",
"kernel_size",
",",
"padding",
"=",
"'VALID'",
",",
"scope",
"=",
"'AvgPool_1a_{}x{}'",
".",
"format",
"(",
"*",
"kernel_size",
")",
")",
"end_points",
"[",
"'AvgPool_1a'",
"]",
"=",
"net",
"if",
"not",
"num_classes",
":",
"return",
"net",
",",
"end_points",
"# 1 x 1 x 2048",
"net",
"=",
"slim",
".",
"dropout",
"(",
"net",
",",
"keep_prob",
"=",
"dropout_keep_prob",
",",
"scope",
"=",
"'Dropout_1b'",
")",
"end_points",
"[",
"'PreLogits'",
"]",
"=",
"net",
"# 2048",
"logits",
"=",
"slim",
".",
"conv2d",
"(",
"net",
",",
"num_classes",
",",
"[",
"1",
",",
"1",
"]",
",",
"activation_fn",
"=",
"None",
",",
"normalizer_fn",
"=",
"None",
",",
"scope",
"=",
"'Conv2d_1c_1x1'",
")",
"if",
"spatial_squeeze",
":",
"logits",
"=",
"tf",
".",
"squeeze",
"(",
"logits",
",",
"[",
"1",
",",
"2",
"]",
",",
"name",
"=",
"'SpatialSqueeze'",
")",
"# 1000",
"end_points",
"[",
"'Logits'",
"]",
"=",
"logits",
"end_points",
"[",
"'Predictions'",
"]",
"=",
"prediction_fn",
"(",
"logits",
",",
"scope",
"=",
"'Predictions'",
")",
"return",
"logits",
",",
"end_points"
] |
https://github.com/PanJinquan/tensorflow_models_learning/blob/e7a2773d526e01c76fc8366868099ca3d7a819b4/slim/nets/inception_v3.py#L419-L544
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.