nwo
stringlengths 5
86
| sha
stringlengths 40
40
| path
stringlengths 4
189
| language
stringclasses 1
value | identifier
stringlengths 1
94
| parameters
stringlengths 2
4.03k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
11.5k
| docstring
stringlengths 1
33.2k
| docstring_summary
stringlengths 0
5.15k
| docstring_tokens
list | function
stringlengths 34
151k
| function_tokens
list | url
stringlengths 90
278
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
oracle/graaljs
|
36a56e8e993d45fc40939a3a4d9c0c24990720f1
|
graal-nodejs/tools/inspector_protocol/jinja2/environment.py
|
python
|
_environment_sanity_check
|
(environment)
|
return environment
|
Perform a sanity check on the environment.
|
Perform a sanity check on the environment.
|
[
"Perform",
"a",
"sanity",
"check",
"on",
"the",
"environment",
"."
] |
def _environment_sanity_check(environment):
"""Perform a sanity check on the environment."""
assert issubclass(environment.undefined, Undefined), 'undefined must ' \
'be a subclass of undefined because filters depend on it.'
assert environment.block_start_string != \
environment.variable_start_string != \
environment.comment_start_string, 'block, variable and comment ' \
'start strings must be different'
assert environment.newline_sequence in ('\r', '\r\n', '\n'), \
'newline_sequence set to unknown line ending string.'
return environment
|
[
"def",
"_environment_sanity_check",
"(",
"environment",
")",
":",
"assert",
"issubclass",
"(",
"environment",
".",
"undefined",
",",
"Undefined",
")",
",",
"'undefined must '",
"'be a subclass of undefined because filters depend on it.'",
"assert",
"environment",
".",
"block_start_string",
"!=",
"environment",
".",
"variable_start_string",
"!=",
"environment",
".",
"comment_start_string",
",",
"'block, variable and comment '",
"'start strings must be different'",
"assert",
"environment",
".",
"newline_sequence",
"in",
"(",
"'\\r'",
",",
"'\\r\\n'",
",",
"'\\n'",
")",
",",
"'newline_sequence set to unknown line ending string.'",
"return",
"environment"
] |
https://github.com/oracle/graaljs/blob/36a56e8e993d45fc40939a3a4d9c0c24990720f1/graal-nodejs/tools/inspector_protocol/jinja2/environment.py#L100-L110
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/numpy/py2/numpy/linalg/lapack_lite/fortran.py
|
python
|
fortranSourceLines
|
(fo)
|
Return an iterator over statement lines of a Fortran source file.
Comment and blank lines are stripped out, and continuation lines are
merged.
|
Return an iterator over statement lines of a Fortran source file.
|
[
"Return",
"an",
"iterator",
"over",
"statement",
"lines",
"of",
"a",
"Fortran",
"source",
"file",
"."
] |
def fortranSourceLines(fo):
"""Return an iterator over statement lines of a Fortran source file.
Comment and blank lines are stripped out, and continuation lines are
merged.
"""
numberingiter = LineIterator(fo)
# add an extra '' at the end
with_extra = itertools.chain(numberingiter, [''])
pushbackiter = PushbackIterator(with_extra)
for line in pushbackiter:
t = lineType(line)
if t == COMMENT:
continue
elif t == STATEMENT:
lines = [line]
# this is where we need the extra '', so we don't finish reading
# the iterator when we don't want to handle that
for next_line in pushbackiter:
t = lineType(next_line)
if t == CONTINUATION:
lines.append(next_line[6:])
else:
pushbackiter.pushback(next_line)
break
yield numberingiter.lineno, ''.join(lines)
else:
raise ValueError("jammed: continuation line not expected: %s:%d" %
(fo.name, numberingiter.lineno))
|
[
"def",
"fortranSourceLines",
"(",
"fo",
")",
":",
"numberingiter",
"=",
"LineIterator",
"(",
"fo",
")",
"# add an extra '' at the end",
"with_extra",
"=",
"itertools",
".",
"chain",
"(",
"numberingiter",
",",
"[",
"''",
"]",
")",
"pushbackiter",
"=",
"PushbackIterator",
"(",
"with_extra",
")",
"for",
"line",
"in",
"pushbackiter",
":",
"t",
"=",
"lineType",
"(",
"line",
")",
"if",
"t",
"==",
"COMMENT",
":",
"continue",
"elif",
"t",
"==",
"STATEMENT",
":",
"lines",
"=",
"[",
"line",
"]",
"# this is where we need the extra '', so we don't finish reading",
"# the iterator when we don't want to handle that",
"for",
"next_line",
"in",
"pushbackiter",
":",
"t",
"=",
"lineType",
"(",
"next_line",
")",
"if",
"t",
"==",
"CONTINUATION",
":",
"lines",
".",
"append",
"(",
"next_line",
"[",
"6",
":",
"]",
")",
"else",
":",
"pushbackiter",
".",
"pushback",
"(",
"next_line",
")",
"break",
"yield",
"numberingiter",
".",
"lineno",
",",
"''",
".",
"join",
"(",
"lines",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"jammed: continuation line not expected: %s:%d\"",
"%",
"(",
"fo",
".",
"name",
",",
"numberingiter",
".",
"lineno",
")",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/numpy/py2/numpy/linalg/lapack_lite/fortran.py#L79-L107
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_cocoa/_windows.py
|
python
|
PageSetupDialogData.EnableHelp
|
(*args, **kwargs)
|
return _windows_.PageSetupDialogData_EnableHelp(*args, **kwargs)
|
EnableHelp(self, bool flag)
|
EnableHelp(self, bool flag)
|
[
"EnableHelp",
"(",
"self",
"bool",
"flag",
")"
] |
def EnableHelp(*args, **kwargs):
"""EnableHelp(self, bool flag)"""
return _windows_.PageSetupDialogData_EnableHelp(*args, **kwargs)
|
[
"def",
"EnableHelp",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_windows_",
".",
"PageSetupDialogData_EnableHelp",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_windows.py#L4866-L4868
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/windows/Lib/tkinter/ttk.py
|
python
|
Treeview.detach
|
(self, *items)
|
Unlinks all of the specified items from the tree.
The items and all of their descendants are still present, and may
be reinserted at another point in the tree, but will not be
displayed. The root item may not be detached.
|
Unlinks all of the specified items from the tree.
|
[
"Unlinks",
"all",
"of",
"the",
"specified",
"items",
"from",
"the",
"tree",
"."
] |
def detach(self, *items):
"""Unlinks all of the specified items from the tree.
The items and all of their descendants are still present, and may
be reinserted at another point in the tree, but will not be
displayed. The root item may not be detached."""
self.tk.call(self._w, "detach", items)
|
[
"def",
"detach",
"(",
"self",
",",
"*",
"items",
")",
":",
"self",
".",
"tk",
".",
"call",
"(",
"self",
".",
"_w",
",",
"\"detach\"",
",",
"items",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/tkinter/ttk.py#L1256-L1262
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/prompt-toolkit/py3/prompt_toolkit/filters/app.py
|
python
|
is_read_only
|
()
|
return get_app().current_buffer.read_only()
|
True when the current buffer is read only.
|
True when the current buffer is read only.
|
[
"True",
"when",
"the",
"current",
"buffer",
"is",
"read",
"only",
"."
] |
def is_read_only() -> bool:
"""
True when the current buffer is read only.
"""
return get_app().current_buffer.read_only()
|
[
"def",
"is_read_only",
"(",
")",
"->",
"bool",
":",
"return",
"get_app",
"(",
")",
".",
"current_buffer",
".",
"read_only",
"(",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/prompt-toolkit/py3/prompt_toolkit/filters/app.py#L136-L140
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/nntplib.py
|
python
|
_NNTPBase.__init__
|
(self, file, host,
readermode=None, timeout=_GLOBAL_DEFAULT_TIMEOUT)
|
Initialize an instance. Arguments:
- file: file-like object (open for read/write in binary mode)
- host: hostname of the server
- readermode: if true, send 'mode reader' command after
connecting.
- timeout: timeout (in seconds) used for socket connections
readermode is sometimes necessary if you are connecting to an
NNTP server on the local machine and intend to call
reader-specific commands, such as `group'. If you get
unexpected NNTPPermanentErrors, you might need to set
readermode.
|
Initialize an instance. Arguments:
- file: file-like object (open for read/write in binary mode)
- host: hostname of the server
- readermode: if true, send 'mode reader' command after
connecting.
- timeout: timeout (in seconds) used for socket connections
|
[
"Initialize",
"an",
"instance",
".",
"Arguments",
":",
"-",
"file",
":",
"file",
"-",
"like",
"object",
"(",
"open",
"for",
"read",
"/",
"write",
"in",
"binary",
"mode",
")",
"-",
"host",
":",
"hostname",
"of",
"the",
"server",
"-",
"readermode",
":",
"if",
"true",
"send",
"mode",
"reader",
"command",
"after",
"connecting",
".",
"-",
"timeout",
":",
"timeout",
"(",
"in",
"seconds",
")",
"used",
"for",
"socket",
"connections"
] |
def __init__(self, file, host,
readermode=None, timeout=_GLOBAL_DEFAULT_TIMEOUT):
"""Initialize an instance. Arguments:
- file: file-like object (open for read/write in binary mode)
- host: hostname of the server
- readermode: if true, send 'mode reader' command after
connecting.
- timeout: timeout (in seconds) used for socket connections
readermode is sometimes necessary if you are connecting to an
NNTP server on the local machine and intend to call
reader-specific commands, such as `group'. If you get
unexpected NNTPPermanentErrors, you might need to set
readermode.
"""
self.host = host
self.file = file
self.debugging = 0
self.welcome = self._getresp()
# Inquire about capabilities (RFC 3977).
self._caps = None
self.getcapabilities()
# 'MODE READER' is sometimes necessary to enable 'reader' mode.
# However, the order in which 'MODE READER' and 'AUTHINFO' need to
# arrive differs between some NNTP servers. If _setreadermode() fails
# with an authorization failed error, it will set this to True;
# the login() routine will interpret that as a request to try again
# after performing its normal function.
# Enable only if we're not already in READER mode anyway.
self.readermode_afterauth = False
if readermode and 'READER' not in self._caps:
self._setreadermode()
if not self.readermode_afterauth:
# Capabilities might have changed after MODE READER
self._caps = None
self.getcapabilities()
# RFC 4642 2.2.2: Both the client and the server MUST know if there is
# a TLS session active. A client MUST NOT attempt to start a TLS
# session if a TLS session is already active.
self.tls_on = False
# Log in and encryption setup order is left to subclasses.
self.authenticated = False
|
[
"def",
"__init__",
"(",
"self",
",",
"file",
",",
"host",
",",
"readermode",
"=",
"None",
",",
"timeout",
"=",
"_GLOBAL_DEFAULT_TIMEOUT",
")",
":",
"self",
".",
"host",
"=",
"host",
"self",
".",
"file",
"=",
"file",
"self",
".",
"debugging",
"=",
"0",
"self",
".",
"welcome",
"=",
"self",
".",
"_getresp",
"(",
")",
"# Inquire about capabilities (RFC 3977).",
"self",
".",
"_caps",
"=",
"None",
"self",
".",
"getcapabilities",
"(",
")",
"# 'MODE READER' is sometimes necessary to enable 'reader' mode.",
"# However, the order in which 'MODE READER' and 'AUTHINFO' need to",
"# arrive differs between some NNTP servers. If _setreadermode() fails",
"# with an authorization failed error, it will set this to True;",
"# the login() routine will interpret that as a request to try again",
"# after performing its normal function.",
"# Enable only if we're not already in READER mode anyway.",
"self",
".",
"readermode_afterauth",
"=",
"False",
"if",
"readermode",
"and",
"'READER'",
"not",
"in",
"self",
".",
"_caps",
":",
"self",
".",
"_setreadermode",
"(",
")",
"if",
"not",
"self",
".",
"readermode_afterauth",
":",
"# Capabilities might have changed after MODE READER",
"self",
".",
"_caps",
"=",
"None",
"self",
".",
"getcapabilities",
"(",
")",
"# RFC 4642 2.2.2: Both the client and the server MUST know if there is",
"# a TLS session active. A client MUST NOT attempt to start a TLS",
"# session if a TLS session is already active.",
"self",
".",
"tls_on",
"=",
"False",
"# Log in and encryption setup order is left to subclasses.",
"self",
".",
"authenticated",
"=",
"False"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/nntplib.py#L312-L357
|
||
trilinos/Trilinos
|
6168be6dd51e35e1cd681e9c4b24433e709df140
|
packages/seacas/scripts/exomerge2.py
|
python
|
ExodusModel._invert_element_blocks
|
(self, element_block_ids)
|
Invert all elements within one or more element blocks.
|
Invert all elements within one or more element blocks.
|
[
"Invert",
"all",
"elements",
"within",
"one",
"or",
"more",
"element",
"blocks",
"."
] |
def _invert_element_blocks(self, element_block_ids):
"""Invert all elements within one or more element blocks."""
element_block_ids = self._format_element_block_id_list(
element_block_ids)
for id_ in element_block_ids:
element_count = self.get_element_count(id_)
nodes_per_element = self.get_nodes_per_element(id_)
# invert the connectivity
inverted_mapping = self._get_inverted_connectivity(
self._get_element_type(id_))
connectivity = self.get_connectivity(id_)
new_connectivity = [
connectivity[element_index * nodes_per_element + x]
for element_index in xrange(element_count)
for x in inverted_mapping]
connectivity[:] = new_connectivity
# adjust side set members
new_face_indices = self._get_inverted_face_mapping(
self._get_element_type(id_))
for side_set_id in self.get_side_set_ids():
members = self.get_side_set_members(side_set_id)
for index, member in enumerate(members):
if member[0] == id_:
members[index] = (id_,
member[1],
new_face_indices[member[2]])
|
[
"def",
"_invert_element_blocks",
"(",
"self",
",",
"element_block_ids",
")",
":",
"element_block_ids",
"=",
"self",
".",
"_format_element_block_id_list",
"(",
"element_block_ids",
")",
"for",
"id_",
"in",
"element_block_ids",
":",
"element_count",
"=",
"self",
".",
"get_element_count",
"(",
"id_",
")",
"nodes_per_element",
"=",
"self",
".",
"get_nodes_per_element",
"(",
"id_",
")",
"# invert the connectivity",
"inverted_mapping",
"=",
"self",
".",
"_get_inverted_connectivity",
"(",
"self",
".",
"_get_element_type",
"(",
"id_",
")",
")",
"connectivity",
"=",
"self",
".",
"get_connectivity",
"(",
"id_",
")",
"new_connectivity",
"=",
"[",
"connectivity",
"[",
"element_index",
"*",
"nodes_per_element",
"+",
"x",
"]",
"for",
"element_index",
"in",
"xrange",
"(",
"element_count",
")",
"for",
"x",
"in",
"inverted_mapping",
"]",
"connectivity",
"[",
":",
"]",
"=",
"new_connectivity",
"# adjust side set members",
"new_face_indices",
"=",
"self",
".",
"_get_inverted_face_mapping",
"(",
"self",
".",
"_get_element_type",
"(",
"id_",
")",
")",
"for",
"side_set_id",
"in",
"self",
".",
"get_side_set_ids",
"(",
")",
":",
"members",
"=",
"self",
".",
"get_side_set_members",
"(",
"side_set_id",
")",
"for",
"index",
",",
"member",
"in",
"enumerate",
"(",
"members",
")",
":",
"if",
"member",
"[",
"0",
"]",
"==",
"id_",
":",
"members",
"[",
"index",
"]",
"=",
"(",
"id_",
",",
"member",
"[",
"1",
"]",
",",
"new_face_indices",
"[",
"member",
"[",
"2",
"]",
"]",
")"
] |
https://github.com/trilinos/Trilinos/blob/6168be6dd51e35e1cd681e9c4b24433e709df140/packages/seacas/scripts/exomerge2.py#L6074-L6099
|
||
introlab/rtabmap_ros
|
096d8731b4e13bcd24cf5dc386efd6988fdeb080
|
launch/jfr2018/evaluate_ate.py
|
python
|
plot_traj
|
(ax,stamps,traj,style,color,label)
|
Plot a trajectory using matplotlib.
Input:
ax -- the plot
stamps -- time stamps (1xn)
traj -- trajectory (3xn)
style -- line style
color -- line color
label -- plot legend
|
Plot a trajectory using matplotlib.
Input:
ax -- the plot
stamps -- time stamps (1xn)
traj -- trajectory (3xn)
style -- line style
color -- line color
label -- plot legend
|
[
"Plot",
"a",
"trajectory",
"using",
"matplotlib",
".",
"Input",
":",
"ax",
"--",
"the",
"plot",
"stamps",
"--",
"time",
"stamps",
"(",
"1xn",
")",
"traj",
"--",
"trajectory",
"(",
"3xn",
")",
"style",
"--",
"line",
"style",
"color",
"--",
"line",
"color",
"label",
"--",
"plot",
"legend"
] |
def plot_traj(ax,stamps,traj,style,color,label):
"""
Plot a trajectory using matplotlib.
Input:
ax -- the plot
stamps -- time stamps (1xn)
traj -- trajectory (3xn)
style -- line style
color -- line color
label -- plot legend
"""
stamps.sort()
interval = numpy.median([s-t for s,t in zip(stamps[1:],stamps[:-1])])
x = []
y = []
last = stamps[0]
for i in range(len(stamps)):
if stamps[i]-last < 2*interval:
x.append(traj[i][0])
y.append(traj[i][1])
elif len(x)>0:
ax.plot(x,y,style,color=color,label=label)
label=""
x=[]
y=[]
last= stamps[i]
if len(x)>0:
ax.plot(x,y,style,color=color,label=label)
|
[
"def",
"plot_traj",
"(",
"ax",
",",
"stamps",
",",
"traj",
",",
"style",
",",
"color",
",",
"label",
")",
":",
"stamps",
".",
"sort",
"(",
")",
"interval",
"=",
"numpy",
".",
"median",
"(",
"[",
"s",
"-",
"t",
"for",
"s",
",",
"t",
"in",
"zip",
"(",
"stamps",
"[",
"1",
":",
"]",
",",
"stamps",
"[",
":",
"-",
"1",
"]",
")",
"]",
")",
"x",
"=",
"[",
"]",
"y",
"=",
"[",
"]",
"last",
"=",
"stamps",
"[",
"0",
"]",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"stamps",
")",
")",
":",
"if",
"stamps",
"[",
"i",
"]",
"-",
"last",
"<",
"2",
"*",
"interval",
":",
"x",
".",
"append",
"(",
"traj",
"[",
"i",
"]",
"[",
"0",
"]",
")",
"y",
".",
"append",
"(",
"traj",
"[",
"i",
"]",
"[",
"1",
"]",
")",
"elif",
"len",
"(",
"x",
")",
">",
"0",
":",
"ax",
".",
"plot",
"(",
"x",
",",
"y",
",",
"style",
",",
"color",
"=",
"color",
",",
"label",
"=",
"label",
")",
"label",
"=",
"\"\"",
"x",
"=",
"[",
"]",
"y",
"=",
"[",
"]",
"last",
"=",
"stamps",
"[",
"i",
"]",
"if",
"len",
"(",
"x",
")",
">",
"0",
":",
"ax",
".",
"plot",
"(",
"x",
",",
"y",
",",
"style",
",",
"color",
"=",
"color",
",",
"label",
"=",
"label",
")"
] |
https://github.com/introlab/rtabmap_ros/blob/096d8731b4e13bcd24cf5dc386efd6988fdeb080/launch/jfr2018/evaluate_ate.py#L81-L110
|
||
gklz1982/caffe-yolov2
|
ebb27029db4ddc0d40e520634633b0fa9cdcc10d
|
scripts/cpp_lint.py
|
python
|
_FunctionState.Check
|
(self, error, filename, linenum)
|
Report if too many lines in function body.
Args:
error: The function to call with any errors found.
filename: The name of the current file.
linenum: The number of the line to check.
|
Report if too many lines in function body.
|
[
"Report",
"if",
"too",
"many",
"lines",
"in",
"function",
"body",
"."
] |
def Check(self, error, filename, linenum):
"""Report if too many lines in function body.
Args:
error: The function to call with any errors found.
filename: The name of the current file.
linenum: The number of the line to check.
"""
if Match(r'T(EST|est)', self.current_function):
base_trigger = self._TEST_TRIGGER
else:
base_trigger = self._NORMAL_TRIGGER
trigger = base_trigger * 2**_VerboseLevel()
if self.lines_in_function > trigger:
error_level = int(math.log(self.lines_in_function / base_trigger, 2))
# 50 => 0, 100 => 1, 200 => 2, 400 => 3, 800 => 4, 1600 => 5, ...
if error_level > 5:
error_level = 5
error(filename, linenum, 'readability/fn_size', error_level,
'Small and focused functions are preferred:'
' %s has %d non-comment lines'
' (error triggered by exceeding %d lines).' % (
self.current_function, self.lines_in_function, trigger))
|
[
"def",
"Check",
"(",
"self",
",",
"error",
",",
"filename",
",",
"linenum",
")",
":",
"if",
"Match",
"(",
"r'T(EST|est)'",
",",
"self",
".",
"current_function",
")",
":",
"base_trigger",
"=",
"self",
".",
"_TEST_TRIGGER",
"else",
":",
"base_trigger",
"=",
"self",
".",
"_NORMAL_TRIGGER",
"trigger",
"=",
"base_trigger",
"*",
"2",
"**",
"_VerboseLevel",
"(",
")",
"if",
"self",
".",
"lines_in_function",
">",
"trigger",
":",
"error_level",
"=",
"int",
"(",
"math",
".",
"log",
"(",
"self",
".",
"lines_in_function",
"/",
"base_trigger",
",",
"2",
")",
")",
"# 50 => 0, 100 => 1, 200 => 2, 400 => 3, 800 => 4, 1600 => 5, ...",
"if",
"error_level",
">",
"5",
":",
"error_level",
"=",
"5",
"error",
"(",
"filename",
",",
"linenum",
",",
"'readability/fn_size'",
",",
"error_level",
",",
"'Small and focused functions are preferred:'",
"' %s has %d non-comment lines'",
"' (error triggered by exceeding %d lines).'",
"%",
"(",
"self",
".",
"current_function",
",",
"self",
".",
"lines_in_function",
",",
"trigger",
")",
")"
] |
https://github.com/gklz1982/caffe-yolov2/blob/ebb27029db4ddc0d40e520634633b0fa9cdcc10d/scripts/cpp_lint.py#L836-L859
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/scikit-learn/py3/sklearn/mixture/_base.py
|
python
|
BaseMixture._estimate_weighted_log_prob
|
(self, X)
|
return self._estimate_log_prob(X) + self._estimate_log_weights()
|
Estimate the weighted log-probabilities, log P(X | Z) + log weights.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Returns
-------
weighted_log_prob : array, shape (n_samples, n_component)
|
Estimate the weighted log-probabilities, log P(X | Z) + log weights.
|
[
"Estimate",
"the",
"weighted",
"log",
"-",
"probabilities",
"log",
"P",
"(",
"X",
"|",
"Z",
")",
"+",
"log",
"weights",
"."
] |
def _estimate_weighted_log_prob(self, X):
"""Estimate the weighted log-probabilities, log P(X | Z) + log weights.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Returns
-------
weighted_log_prob : array, shape (n_samples, n_component)
"""
return self._estimate_log_prob(X) + self._estimate_log_weights()
|
[
"def",
"_estimate_weighted_log_prob",
"(",
"self",
",",
"X",
")",
":",
"return",
"self",
".",
"_estimate_log_prob",
"(",
"X",
")",
"+",
"self",
".",
"_estimate_log_weights",
"(",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py3/sklearn/mixture/_base.py#L443-L454
|
|
llvm/llvm-project
|
ffa6262cb4e2a335d26416fad39a581b4f98c5f4
|
lldb/examples/python/symbolication.py
|
python
|
Image.add_module
|
(self, target)
|
Add the Image described in this object to "target" and load the sections if "load" is True.
|
Add the Image described in this object to "target" and load the sections if "load" is True.
|
[
"Add",
"the",
"Image",
"described",
"in",
"this",
"object",
"to",
"target",
"and",
"load",
"the",
"sections",
"if",
"load",
"is",
"True",
"."
] |
def add_module(self, target):
'''Add the Image described in this object to "target" and load the sections if "load" is True.'''
if target:
# Try and find using UUID only first so that paths need not match
# up
uuid_str = self.get_normalized_uuid_string()
if uuid_str:
self.module = target.AddModule(None, None, uuid_str)
if not self.module:
self.locate_module_and_debug_symbols()
if self.unavailable:
return None
resolved_path = self.get_resolved_path()
self.module = target.AddModule(
resolved_path, None, uuid_str, self.symfile)
if not self.module:
return 'error: unable to get module for (%s) "%s"' % (
self.arch, self.get_resolved_path())
if self.has_section_load_info():
return self.load_module(target)
else:
return None # No sections, the module was added to the target, so success
else:
return 'error: invalid target'
|
[
"def",
"add_module",
"(",
"self",
",",
"target",
")",
":",
"if",
"target",
":",
"# Try and find using UUID only first so that paths need not match",
"# up",
"uuid_str",
"=",
"self",
".",
"get_normalized_uuid_string",
"(",
")",
"if",
"uuid_str",
":",
"self",
".",
"module",
"=",
"target",
".",
"AddModule",
"(",
"None",
",",
"None",
",",
"uuid_str",
")",
"if",
"not",
"self",
".",
"module",
":",
"self",
".",
"locate_module_and_debug_symbols",
"(",
")",
"if",
"self",
".",
"unavailable",
":",
"return",
"None",
"resolved_path",
"=",
"self",
".",
"get_resolved_path",
"(",
")",
"self",
".",
"module",
"=",
"target",
".",
"AddModule",
"(",
"resolved_path",
",",
"None",
",",
"uuid_str",
",",
"self",
".",
"symfile",
")",
"if",
"not",
"self",
".",
"module",
":",
"return",
"'error: unable to get module for (%s) \"%s\"'",
"%",
"(",
"self",
".",
"arch",
",",
"self",
".",
"get_resolved_path",
"(",
")",
")",
"if",
"self",
".",
"has_section_load_info",
"(",
")",
":",
"return",
"self",
".",
"load_module",
"(",
"target",
")",
"else",
":",
"return",
"None",
"# No sections, the module was added to the target, so success",
"else",
":",
"return",
"'error: invalid target'"
] |
https://github.com/llvm/llvm-project/blob/ffa6262cb4e2a335d26416fad39a581b4f98c5f4/lldb/examples/python/symbolication.py#L368-L391
|
||
H-uru/Plasma
|
c2140ea046e82e9c199e257a7f2e7edb42602871
|
Scripts/Python/xDialogToggle.py
|
python
|
xDialogToggle.OnNotify
|
(self,state,id,events)
|
Activated...
|
Activated...
|
[
"Activated",
"..."
] |
def OnNotify(self,state,id,events):
"Activated... "
global LocalAvatar
if state and id == Activate.id and PtWasLocallyNotified(self.key):
LocalAvatar = PtFindAvatar(events)
self.IStartDialog()
|
[
"def",
"OnNotify",
"(",
"self",
",",
"state",
",",
"id",
",",
"events",
")",
":",
"global",
"LocalAvatar",
"if",
"state",
"and",
"id",
"==",
"Activate",
".",
"id",
"and",
"PtWasLocallyNotified",
"(",
"self",
".",
"key",
")",
":",
"LocalAvatar",
"=",
"PtFindAvatar",
"(",
"events",
")",
"self",
".",
"IStartDialog",
"(",
")"
] |
https://github.com/H-uru/Plasma/blob/c2140ea046e82e9c199e257a7f2e7edb42602871/Scripts/Python/xDialogToggle.py#L107-L112
|
||
eventql/eventql
|
7ca0dbb2e683b525620ea30dc40540a22d5eb227
|
deps/3rdparty/spidermonkey/mozjs/python/mozbuild/mozbuild/backend/android_eclipse.py
|
python
|
AndroidEclipseBackend._Element_for_filtered_resources
|
(self, filtered_resources)
|
return filteredResources
|
Turn a list of filtered resource arguments like
['1.0-projectRelativePath-matches-false-false-*org/mozilla/gecko/resources/**']
into an XML Element, like:
<filteredResources>
<filter>
<id>1393009101322</id>
<name></name>
<type>30</type>
<matcher>
<id>org.eclipse.ui.ide.multiFilter</id>
<arguments>1.0-projectRelativePath-matches-false-false-*org/mozilla/gecko/resources/**</arguments>
</matcher>
</filter>
</filteredResources>
The id is random; the values are magic.
|
Turn a list of filtered resource arguments like
['1.0-projectRelativePath-matches-false-false-*org/mozilla/gecko/resources/**']
into an XML Element, like:
<filteredResources>
<filter>
<id>1393009101322</id>
<name></name>
<type>30</type>
<matcher>
<id>org.eclipse.ui.ide.multiFilter</id>
<arguments>1.0-projectRelativePath-matches-false-false-*org/mozilla/gecko/resources/**</arguments>
</matcher>
</filter>
</filteredResources>
|
[
"Turn",
"a",
"list",
"of",
"filtered",
"resource",
"arguments",
"like",
"[",
"1",
".",
"0",
"-",
"projectRelativePath",
"-",
"matches",
"-",
"false",
"-",
"false",
"-",
"*",
"org",
"/",
"mozilla",
"/",
"gecko",
"/",
"resources",
"/",
"**",
"]",
"into",
"an",
"XML",
"Element",
"like",
":",
"<filteredResources",
">",
"<filter",
">",
"<id",
">",
"1393009101322<",
"/",
"id",
">",
"<name",
">",
"<",
"/",
"name",
">",
"<type",
">",
"30<",
"/",
"type",
">",
"<matcher",
">",
"<id",
">",
"org",
".",
"eclipse",
".",
"ui",
".",
"ide",
".",
"multiFilter<",
"/",
"id",
">",
"<arguments",
">",
"1",
".",
"0",
"-",
"projectRelativePath",
"-",
"matches",
"-",
"false",
"-",
"false",
"-",
"*",
"org",
"/",
"mozilla",
"/",
"gecko",
"/",
"resources",
"/",
"**",
"<",
"/",
"arguments",
">",
"<",
"/",
"matcher",
">",
"<",
"/",
"filter",
">",
"<",
"/",
"filteredResources",
">"
] |
def _Element_for_filtered_resources(self, filtered_resources):
"""Turn a list of filtered resource arguments like
['1.0-projectRelativePath-matches-false-false-*org/mozilla/gecko/resources/**']
into an XML Element, like:
<filteredResources>
<filter>
<id>1393009101322</id>
<name></name>
<type>30</type>
<matcher>
<id>org.eclipse.ui.ide.multiFilter</id>
<arguments>1.0-projectRelativePath-matches-false-false-*org/mozilla/gecko/resources/**</arguments>
</matcher>
</filter>
</filteredResources>
The id is random; the values are magic."""
id = int(1000 * time.time())
filteredResources = ET.Element('filteredResources')
for arg in sorted(filtered_resources):
e = ET.SubElement(filteredResources, 'filter')
ET.SubElement(e, 'id').text = str(id)
id += 1
ET.SubElement(e, 'name')
ET.SubElement(e, 'type').text = '30' # It's magic!
matcher = ET.SubElement(e, 'matcher')
ET.SubElement(matcher, 'id').text = 'org.eclipse.ui.ide.multiFilter'
ET.SubElement(matcher, 'arguments').text = str(arg)
return filteredResources
|
[
"def",
"_Element_for_filtered_resources",
"(",
"self",
",",
"filtered_resources",
")",
":",
"id",
"=",
"int",
"(",
"1000",
"*",
"time",
".",
"time",
"(",
")",
")",
"filteredResources",
"=",
"ET",
".",
"Element",
"(",
"'filteredResources'",
")",
"for",
"arg",
"in",
"sorted",
"(",
"filtered_resources",
")",
":",
"e",
"=",
"ET",
".",
"SubElement",
"(",
"filteredResources",
",",
"'filter'",
")",
"ET",
".",
"SubElement",
"(",
"e",
",",
"'id'",
")",
".",
"text",
"=",
"str",
"(",
"id",
")",
"id",
"+=",
"1",
"ET",
".",
"SubElement",
"(",
"e",
",",
"'name'",
")",
"ET",
".",
"SubElement",
"(",
"e",
",",
"'type'",
")",
".",
"text",
"=",
"'30'",
"# It's magic!",
"matcher",
"=",
"ET",
".",
"SubElement",
"(",
"e",
",",
"'matcher'",
")",
"ET",
".",
"SubElement",
"(",
"matcher",
",",
"'id'",
")",
".",
"text",
"=",
"'org.eclipse.ui.ide.multiFilter'",
"ET",
".",
"SubElement",
"(",
"matcher",
",",
"'arguments'",
")",
".",
"text",
"=",
"str",
"(",
"arg",
")",
"return",
"filteredResources"
] |
https://github.com/eventql/eventql/blob/7ca0dbb2e683b525620ea30dc40540a22d5eb227/deps/3rdparty/spidermonkey/mozjs/python/mozbuild/mozbuild/backend/android_eclipse.py#L127-L156
|
|
eclipse/sumo
|
7132a9b8b6eea734bdec38479026b4d8c4336d03
|
tools/contributed/sumopy/agilepy/lib_wx/objpanel.py
|
python
|
ChecklistWidgetContainer.get_valuewidget_write
|
(self)
|
return widget
|
Return widget to edit numeric value of attribute
This is effectively the parametrisation of the masked.NumCtrl widget.
|
Return widget to edit numeric value of attribute
This is effectively the parametrisation of the masked.NumCtrl widget.
|
[
"Return",
"widget",
"to",
"edit",
"numeric",
"value",
"of",
"attribute",
"This",
"is",
"effectively",
"the",
"parametrisation",
"of",
"the",
"masked",
".",
"NumCtrl",
"widget",
"."
] |
def get_valuewidget_write(self):
"""
Return widget to edit numeric value of attribute
This is effectively the parametrisation of the masked.NumCtrl widget.
"""
value = self.get_value_obj()
# print 'ChoiceWidgetContainer.get_valuewidget_write',self._attrconf.attrname, value,type(value),self.immediate_apply
# print ' choices',self._attrconf.choices
# print ' self._choicenames',self._choicenames
# print ' self._choicevalues',self._choicevalues
widget = wx.CheckListBox(self.parent, -1, (80, 50), wx.DefaultSize, self._choicenames)
#wx.ComboBox(self.parent,choices = self._choicenames)
# print 'widget',widget,'dir:',dir(widget)
if self.immediate_apply:
# ATTENTION: this does not work because self.parent is not
# a panel, but a windoe, without EvtListBox !!!
#self.parent.Bind(wx.EVT_LISTBOX, self.parent.EvtListBox, widget)
self.parent.Bind(wx.EVT_CHECKLISTBOX, self.on_apply_immediate, widget)
self.set_checkbox(widget, value)
return widget
|
[
"def",
"get_valuewidget_write",
"(",
"self",
")",
":",
"value",
"=",
"self",
".",
"get_value_obj",
"(",
")",
"# print 'ChoiceWidgetContainer.get_valuewidget_write',self._attrconf.attrname, value,type(value),self.immediate_apply",
"# print ' choices',self._attrconf.choices",
"# print ' self._choicenames',self._choicenames",
"# print ' self._choicevalues',self._choicevalues",
"widget",
"=",
"wx",
".",
"CheckListBox",
"(",
"self",
".",
"parent",
",",
"-",
"1",
",",
"(",
"80",
",",
"50",
")",
",",
"wx",
".",
"DefaultSize",
",",
"self",
".",
"_choicenames",
")",
"#wx.ComboBox(self.parent,choices = self._choicenames)",
"# print 'widget',widget,'dir:',dir(widget)",
"if",
"self",
".",
"immediate_apply",
":",
"# ATTENTION: this does not work because self.parent is not",
"# a panel, but a windoe, without EvtListBox !!!",
"#self.parent.Bind(wx.EVT_LISTBOX, self.parent.EvtListBox, widget)",
"self",
".",
"parent",
".",
"Bind",
"(",
"wx",
".",
"EVT_CHECKLISTBOX",
",",
"self",
".",
"on_apply_immediate",
",",
"widget",
")",
"self",
".",
"set_checkbox",
"(",
"widget",
",",
"value",
")",
"return",
"widget"
] |
https://github.com/eclipse/sumo/blob/7132a9b8b6eea734bdec38479026b4d8c4336d03/tools/contributed/sumopy/agilepy/lib_wx/objpanel.py#L951-L976
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/numpy/py2/numpy/distutils/ccompiler.py
|
python
|
CCompiler_get_version
|
(self, force=False, ok_status=[0])
|
return version
|
Return compiler version, or None if compiler is not available.
Parameters
----------
force : bool, optional
If True, force a new determination of the version, even if the
compiler already has a version attribute. Default is False.
ok_status : list of int, optional
The list of status values returned by the version look-up process
for which a version string is returned. If the status value is not
in `ok_status`, None is returned. Default is ``[0]``.
Returns
-------
version : str or None
Version string, in the format of `distutils.version.LooseVersion`.
|
Return compiler version, or None if compiler is not available.
|
[
"Return",
"compiler",
"version",
"or",
"None",
"if",
"compiler",
"is",
"not",
"available",
"."
] |
def CCompiler_get_version(self, force=False, ok_status=[0]):
"""
Return compiler version, or None if compiler is not available.
Parameters
----------
force : bool, optional
If True, force a new determination of the version, even if the
compiler already has a version attribute. Default is False.
ok_status : list of int, optional
The list of status values returned by the version look-up process
for which a version string is returned. If the status value is not
in `ok_status`, None is returned. Default is ``[0]``.
Returns
-------
version : str or None
Version string, in the format of `distutils.version.LooseVersion`.
"""
if not force and hasattr(self, 'version'):
return self.version
self.find_executables()
try:
version_cmd = self.version_cmd
except AttributeError:
return None
if not version_cmd or not version_cmd[0]:
return None
try:
matcher = self.version_match
except AttributeError:
try:
pat = self.version_pattern
except AttributeError:
return None
def matcher(version_string):
m = re.match(pat, version_string)
if not m:
return None
version = m.group('version')
return version
try:
output = subprocess.check_output(version_cmd, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as exc:
output = exc.output
status = exc.returncode
except OSError:
# match the historical returns for a parent
# exception class caught by exec_command()
status = 127
output = b''
else:
# output isn't actually a filepath but we do this
# for now to match previous distutils behavior
output = filepath_from_subprocess_output(output)
status = 0
version = None
if status in ok_status:
version = matcher(output)
if version:
version = LooseVersion(version)
self.version = version
return version
|
[
"def",
"CCompiler_get_version",
"(",
"self",
",",
"force",
"=",
"False",
",",
"ok_status",
"=",
"[",
"0",
"]",
")",
":",
"if",
"not",
"force",
"and",
"hasattr",
"(",
"self",
",",
"'version'",
")",
":",
"return",
"self",
".",
"version",
"self",
".",
"find_executables",
"(",
")",
"try",
":",
"version_cmd",
"=",
"self",
".",
"version_cmd",
"except",
"AttributeError",
":",
"return",
"None",
"if",
"not",
"version_cmd",
"or",
"not",
"version_cmd",
"[",
"0",
"]",
":",
"return",
"None",
"try",
":",
"matcher",
"=",
"self",
".",
"version_match",
"except",
"AttributeError",
":",
"try",
":",
"pat",
"=",
"self",
".",
"version_pattern",
"except",
"AttributeError",
":",
"return",
"None",
"def",
"matcher",
"(",
"version_string",
")",
":",
"m",
"=",
"re",
".",
"match",
"(",
"pat",
",",
"version_string",
")",
"if",
"not",
"m",
":",
"return",
"None",
"version",
"=",
"m",
".",
"group",
"(",
"'version'",
")",
"return",
"version",
"try",
":",
"output",
"=",
"subprocess",
".",
"check_output",
"(",
"version_cmd",
",",
"stderr",
"=",
"subprocess",
".",
"STDOUT",
")",
"except",
"subprocess",
".",
"CalledProcessError",
"as",
"exc",
":",
"output",
"=",
"exc",
".",
"output",
"status",
"=",
"exc",
".",
"returncode",
"except",
"OSError",
":",
"# match the historical returns for a parent",
"# exception class caught by exec_command()",
"status",
"=",
"127",
"output",
"=",
"b''",
"else",
":",
"# output isn't actually a filepath but we do this",
"# for now to match previous distutils behavior",
"output",
"=",
"filepath_from_subprocess_output",
"(",
"output",
")",
"status",
"=",
"0",
"version",
"=",
"None",
"if",
"status",
"in",
"ok_status",
":",
"version",
"=",
"matcher",
"(",
"output",
")",
"if",
"version",
":",
"version",
"=",
"LooseVersion",
"(",
"version",
")",
"self",
".",
"version",
"=",
"version",
"return",
"version"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/numpy/py2/numpy/distutils/ccompiler.py#L598-L663
|
|
Samsung/veles
|
95ed733c2e49bc011ad98ccf2416ecec23fbf352
|
veles/genetics/core.py
|
python
|
bin_to_num
|
(binaries, delimeter, accuracy, codes)
|
return num
|
Convert gray codes of chromosomes to arrays of floats.
TODO(a.kazantsev): examine for correctness and possible optimizations.
|
Convert gray codes of chromosomes to arrays of floats.
|
[
"Convert",
"gray",
"codes",
"of",
"chromosomes",
"to",
"arrays",
"of",
"floats",
"."
] |
def bin_to_num(binaries, delimeter, accuracy, codes):
"""Convert gray codes of chromosomes to arrays of floats.
TODO(a.kazantsev): examine for correctness and possible optimizations.
"""
num = ([], [])
delimiter1 = 0
delimiter2 = delimeter
chromo_length = len(binaries[0])
binaries_num = len(binaries)
while delimiter1 < chromo_length:
for i in range(binaries_num):
cut = binaries[i][delimiter1:delimiter2]
# Gray codes to dec numbers
num[i].append(codes.index(cut[1:]) * accuracy
* (-1 if cut[0] == '0' else 1))
delimiter1 = delimiter2
delimiter2 += delimeter
return num
|
[
"def",
"bin_to_num",
"(",
"binaries",
",",
"delimeter",
",",
"accuracy",
",",
"codes",
")",
":",
"num",
"=",
"(",
"[",
"]",
",",
"[",
"]",
")",
"delimiter1",
"=",
"0",
"delimiter2",
"=",
"delimeter",
"chromo_length",
"=",
"len",
"(",
"binaries",
"[",
"0",
"]",
")",
"binaries_num",
"=",
"len",
"(",
"binaries",
")",
"while",
"delimiter1",
"<",
"chromo_length",
":",
"for",
"i",
"in",
"range",
"(",
"binaries_num",
")",
":",
"cut",
"=",
"binaries",
"[",
"i",
"]",
"[",
"delimiter1",
":",
"delimiter2",
"]",
"# Gray codes to dec numbers",
"num",
"[",
"i",
"]",
".",
"append",
"(",
"codes",
".",
"index",
"(",
"cut",
"[",
"1",
":",
"]",
")",
"*",
"accuracy",
"*",
"(",
"-",
"1",
"if",
"cut",
"[",
"0",
"]",
"==",
"'0'",
"else",
"1",
")",
")",
"delimiter1",
"=",
"delimiter2",
"delimiter2",
"+=",
"delimeter",
"return",
"num"
] |
https://github.com/Samsung/veles/blob/95ed733c2e49bc011ad98ccf2416ecec23fbf352/veles/genetics/core.py#L86-L104
|
|
CMU-Perceptual-Computing-Lab/caffe_rtpose
|
a4778bb1c3eb74d7250402016047216f77b4dba6
|
scripts/cpp_lint.py
|
python
|
FindNextMultiLineCommentStart
|
(lines, lineix)
|
return len(lines)
|
Find the beginning marker for a multiline comment.
|
Find the beginning marker for a multiline comment.
|
[
"Find",
"the",
"beginning",
"marker",
"for",
"a",
"multiline",
"comment",
"."
] |
def FindNextMultiLineCommentStart(lines, lineix):
"""Find the beginning marker for a multiline comment."""
while lineix < len(lines):
if lines[lineix].strip().startswith('/*'):
# Only return this marker if the comment goes beyond this line
if lines[lineix].strip().find('*/', 2) < 0:
return lineix
lineix += 1
return len(lines)
|
[
"def",
"FindNextMultiLineCommentStart",
"(",
"lines",
",",
"lineix",
")",
":",
"while",
"lineix",
"<",
"len",
"(",
"lines",
")",
":",
"if",
"lines",
"[",
"lineix",
"]",
".",
"strip",
"(",
")",
".",
"startswith",
"(",
"'/*'",
")",
":",
"# Only return this marker if the comment goes beyond this line",
"if",
"lines",
"[",
"lineix",
"]",
".",
"strip",
"(",
")",
".",
"find",
"(",
"'*/'",
",",
"2",
")",
"<",
"0",
":",
"return",
"lineix",
"lineix",
"+=",
"1",
"return",
"len",
"(",
"lines",
")"
] |
https://github.com/CMU-Perceptual-Computing-Lab/caffe_rtpose/blob/a4778bb1c3eb74d7250402016047216f77b4dba6/scripts/cpp_lint.py#L1123-L1131
|
|
PX4/PX4-Autopilot
|
0b9f60a0370be53d683352c63fd92db3d6586e18
|
src/lib/mixer/MultirotorMixer/mixer_multirotor.py
|
python
|
mix_yaw
|
(m_sp, u, P, u_min, u_max)
|
return u_ppp
|
Mix yaw by adding it to an existing output vector u
Desaturation behavior: thrust is allowed to be decreased up to 15% in order to allow
some yaw control on the upper end. On the lower end thrust will never be increased,
but yaw is decreased as much as required.
|
Mix yaw by adding it to an existing output vector u
|
[
"Mix",
"yaw",
"by",
"adding",
"it",
"to",
"an",
"existing",
"output",
"vector",
"u"
] |
def mix_yaw(m_sp, u, P, u_min, u_max):
"""
Mix yaw by adding it to an existing output vector u
Desaturation behavior: thrust is allowed to be decreased up to 15% in order to allow
some yaw control on the upper end. On the lower end thrust will never be increased,
but yaw is decreased as much as required.
"""
m_sp_yaw_only = np.matlib.zeros(m_sp.size).T
m_sp_yaw_only[2, 0] = m_sp[2, 0]
u_p = u + P * m_sp_yaw_only
# Change yaw acceleration to unsaturate the outputs if needed (do not change roll/pitch),
# and allow some yaw response at maximum thrust
u_r_dot = P[:,2]
u_pp = minimize_sat(u_p, u_min, u_max+0.15, u_r_dot)
u_T = P[:, 3]
u_ppp = minimize_sat(u_pp, 0, u_max, u_T)
# reduce thrust only
if (u_ppp > (u_pp)).any():
u_ppp = u_pp
return u_ppp
|
[
"def",
"mix_yaw",
"(",
"m_sp",
",",
"u",
",",
"P",
",",
"u_min",
",",
"u_max",
")",
":",
"m_sp_yaw_only",
"=",
"np",
".",
"matlib",
".",
"zeros",
"(",
"m_sp",
".",
"size",
")",
".",
"T",
"m_sp_yaw_only",
"[",
"2",
",",
"0",
"]",
"=",
"m_sp",
"[",
"2",
",",
"0",
"]",
"u_p",
"=",
"u",
"+",
"P",
"*",
"m_sp_yaw_only",
"# Change yaw acceleration to unsaturate the outputs if needed (do not change roll/pitch),",
"# and allow some yaw response at maximum thrust",
"u_r_dot",
"=",
"P",
"[",
":",
",",
"2",
"]",
"u_pp",
"=",
"minimize_sat",
"(",
"u_p",
",",
"u_min",
",",
"u_max",
"+",
"0.15",
",",
"u_r_dot",
")",
"u_T",
"=",
"P",
"[",
":",
",",
"3",
"]",
"u_ppp",
"=",
"minimize_sat",
"(",
"u_pp",
",",
"0",
",",
"u_max",
",",
"u_T",
")",
"# reduce thrust only",
"if",
"(",
"u_ppp",
">",
"(",
"u_pp",
")",
")",
".",
"any",
"(",
")",
":",
"u_ppp",
"=",
"u_pp",
"return",
"u_ppp"
] |
https://github.com/PX4/PX4-Autopilot/blob/0b9f60a0370be53d683352c63fd92db3d6586e18/src/lib/mixer/MultirotorMixer/mixer_multirotor.py#L78-L99
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/scipy/py3/scipy/signal/filter_design.py
|
python
|
freqz_zpk
|
(z, p, k, worN=512, whole=False, fs=2*pi)
|
return w, h
|
r"""
Compute the frequency response of a digital filter in ZPK form.
Given the Zeros, Poles and Gain of a digital filter, compute its frequency
response:
:math:`H(z)=k \prod_i (z - Z[i]) / \prod_j (z - P[j])`
where :math:`k` is the `gain`, :math:`Z` are the `zeros` and :math:`P` are
the `poles`.
Parameters
----------
z : array_like
Zeroes of a linear filter
p : array_like
Poles of a linear filter
k : scalar
Gain of a linear filter
worN : {None, int, array_like}, optional
If a single integer, then compute at that many frequencies (default is
N=512).
If an array_like, compute the response at the frequencies given.
These are in the same units as `fs`.
whole : bool, optional
Normally, frequencies are computed from 0 to the Nyquist frequency,
fs/2 (upper-half of unit-circle). If `whole` is True, compute
frequencies from 0 to fs. Ignored if w is array_like.
fs : float, optional
The sampling frequency of the digital system. Defaults to 2*pi
radians/sample (so w is from 0 to pi).
.. versionadded:: 1.2.0
Returns
-------
w : ndarray
The frequencies at which `h` was computed, in the same units as `fs`.
By default, `w` is normalized to the range [0, pi) (radians/sample).
h : ndarray
The frequency response, as complex numbers.
See Also
--------
freqs : Compute the frequency response of an analog filter in TF form
freqs_zpk : Compute the frequency response of an analog filter in ZPK form
freqz : Compute the frequency response of a digital filter in TF form
Notes
-----
.. versionadded:: 0.19.0
Examples
--------
Design a 4th-order digital Butterworth filter with cut-off of 100 Hz in a
system with sample rate of 1000 Hz, and plot the frequency response:
>>> from scipy import signal
>>> z, p, k = signal.butter(4, 100, output='zpk', fs=1000)
>>> w, h = signal.freqz_zpk(z, p, k, fs=1000)
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> ax1 = fig.add_subplot(1, 1, 1)
>>> ax1.set_title('Digital filter frequency response')
>>> ax1.plot(w, 20 * np.log10(abs(h)), 'b')
>>> ax1.set_ylabel('Amplitude [dB]', color='b')
>>> ax1.set_xlabel('Frequency [Hz]')
>>> ax1.grid()
>>> ax2 = ax1.twinx()
>>> angles = np.unwrap(np.angle(h))
>>> ax2.plot(w, angles, 'g')
>>> ax2.set_ylabel('Angle [radians]', color='g')
>>> plt.axis('tight')
>>> plt.show()
|
r"""
Compute the frequency response of a digital filter in ZPK form.
|
[
"r",
"Compute",
"the",
"frequency",
"response",
"of",
"a",
"digital",
"filter",
"in",
"ZPK",
"form",
"."
] |
def freqz_zpk(z, p, k, worN=512, whole=False, fs=2*pi):
r"""
Compute the frequency response of a digital filter in ZPK form.
Given the Zeros, Poles and Gain of a digital filter, compute its frequency
response:
:math:`H(z)=k \prod_i (z - Z[i]) / \prod_j (z - P[j])`
where :math:`k` is the `gain`, :math:`Z` are the `zeros` and :math:`P` are
the `poles`.
Parameters
----------
z : array_like
Zeroes of a linear filter
p : array_like
Poles of a linear filter
k : scalar
Gain of a linear filter
worN : {None, int, array_like}, optional
If a single integer, then compute at that many frequencies (default is
N=512).
If an array_like, compute the response at the frequencies given.
These are in the same units as `fs`.
whole : bool, optional
Normally, frequencies are computed from 0 to the Nyquist frequency,
fs/2 (upper-half of unit-circle). If `whole` is True, compute
frequencies from 0 to fs. Ignored if w is array_like.
fs : float, optional
The sampling frequency of the digital system. Defaults to 2*pi
radians/sample (so w is from 0 to pi).
.. versionadded:: 1.2.0
Returns
-------
w : ndarray
The frequencies at which `h` was computed, in the same units as `fs`.
By default, `w` is normalized to the range [0, pi) (radians/sample).
h : ndarray
The frequency response, as complex numbers.
See Also
--------
freqs : Compute the frequency response of an analog filter in TF form
freqs_zpk : Compute the frequency response of an analog filter in ZPK form
freqz : Compute the frequency response of a digital filter in TF form
Notes
-----
.. versionadded:: 0.19.0
Examples
--------
Design a 4th-order digital Butterworth filter with cut-off of 100 Hz in a
system with sample rate of 1000 Hz, and plot the frequency response:
>>> from scipy import signal
>>> z, p, k = signal.butter(4, 100, output='zpk', fs=1000)
>>> w, h = signal.freqz_zpk(z, p, k, fs=1000)
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> ax1 = fig.add_subplot(1, 1, 1)
>>> ax1.set_title('Digital filter frequency response')
>>> ax1.plot(w, 20 * np.log10(abs(h)), 'b')
>>> ax1.set_ylabel('Amplitude [dB]', color='b')
>>> ax1.set_xlabel('Frequency [Hz]')
>>> ax1.grid()
>>> ax2 = ax1.twinx()
>>> angles = np.unwrap(np.angle(h))
>>> ax2.plot(w, angles, 'g')
>>> ax2.set_ylabel('Angle [radians]', color='g')
>>> plt.axis('tight')
>>> plt.show()
"""
z, p = map(atleast_1d, (z, p))
if whole:
lastpoint = 2 * pi
else:
lastpoint = pi
if worN is None:
# For backwards compatibility
w = numpy.linspace(0, lastpoint, 512, endpoint=False)
elif _is_int_type(worN):
w = numpy.linspace(0, lastpoint, worN, endpoint=False)
else:
w = atleast_1d(worN)
w = 2*pi*w/fs
zm1 = exp(1j * w)
h = k * polyvalfromroots(zm1, z) / polyvalfromroots(zm1, p)
w = w*fs/(2*pi)
return w, h
|
[
"def",
"freqz_zpk",
"(",
"z",
",",
"p",
",",
"k",
",",
"worN",
"=",
"512",
",",
"whole",
"=",
"False",
",",
"fs",
"=",
"2",
"*",
"pi",
")",
":",
"z",
",",
"p",
"=",
"map",
"(",
"atleast_1d",
",",
"(",
"z",
",",
"p",
")",
")",
"if",
"whole",
":",
"lastpoint",
"=",
"2",
"*",
"pi",
"else",
":",
"lastpoint",
"=",
"pi",
"if",
"worN",
"is",
"None",
":",
"# For backwards compatibility",
"w",
"=",
"numpy",
".",
"linspace",
"(",
"0",
",",
"lastpoint",
",",
"512",
",",
"endpoint",
"=",
"False",
")",
"elif",
"_is_int_type",
"(",
"worN",
")",
":",
"w",
"=",
"numpy",
".",
"linspace",
"(",
"0",
",",
"lastpoint",
",",
"worN",
",",
"endpoint",
"=",
"False",
")",
"else",
":",
"w",
"=",
"atleast_1d",
"(",
"worN",
")",
"w",
"=",
"2",
"*",
"pi",
"*",
"w",
"/",
"fs",
"zm1",
"=",
"exp",
"(",
"1j",
"*",
"w",
")",
"h",
"=",
"k",
"*",
"polyvalfromroots",
"(",
"zm1",
",",
"z",
")",
"/",
"polyvalfromroots",
"(",
"zm1",
",",
"p",
")",
"w",
"=",
"w",
"*",
"fs",
"/",
"(",
"2",
"*",
"pi",
")",
"return",
"w",
",",
"h"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/signal/filter_design.py#L480-L583
|
|
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
tools/python/google/httpd_utils.py
|
python
|
GetCygserverPath
|
(start_dir, apache2=False)
|
return cygserver_path
|
Returns the path to the directory holding cygserver.exe file.
|
Returns the path to the directory holding cygserver.exe file.
|
[
"Returns",
"the",
"path",
"to",
"the",
"directory",
"holding",
"cygserver",
".",
"exe",
"file",
"."
] |
def GetCygserverPath(start_dir, apache2=False):
"""Returns the path to the directory holding cygserver.exe file."""
cygserver_path = None
if apache2:
cygserver_path = google.path_utils.FindUpward(start_dir, 'third_party',
'cygwin', 'usr', 'sbin')
return cygserver_path
|
[
"def",
"GetCygserverPath",
"(",
"start_dir",
",",
"apache2",
"=",
"False",
")",
":",
"cygserver_path",
"=",
"None",
"if",
"apache2",
":",
"cygserver_path",
"=",
"google",
".",
"path_utils",
".",
"FindUpward",
"(",
"start_dir",
",",
"'third_party'",
",",
"'cygwin'",
",",
"'usr'",
",",
"'sbin'",
")",
"return",
"cygserver_path"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/tools/python/google/httpd_utils.py#L52-L58
|
|
mantidproject/mantid
|
03deeb89254ec4289edb8771e0188c2090a02f32
|
qt/python/mantidqtinterfaces/mantidqtinterfaces/drill/view/DrillHeaderView.py
|
python
|
DrillHeaderView.mouseReleaseEvent
|
(self, event)
|
Deal with mouse release event. Override of
QTableView::mouseReleaseEvent. This function change the state of a
eventual currently pressed button.
Args:
event (QMouseEvent): mouse release event
|
Deal with mouse release event. Override of
QTableView::mouseReleaseEvent. This function change the state of a
eventual currently pressed button.
|
[
"Deal",
"with",
"mouse",
"release",
"event",
".",
"Override",
"of",
"QTableView",
"::",
"mouseReleaseEvent",
".",
"This",
"function",
"change",
"the",
"state",
"of",
"a",
"eventual",
"currently",
"pressed",
"button",
"."
] |
def mouseReleaseEvent(self, event):
"""
Deal with mouse release event. Override of
QTableView::mouseReleaseEvent. This function change the state of a
eventual currently pressed button.
Args:
event (QMouseEvent): mouse release event
"""
if self.buttonPressed is not None:
li = self.buttonPressed
self.buttonPressed = None
self.changeSectionFolding(li)
self.updateSection(li)
else:
super(DrillHeaderView, self).mouseReleaseEvent(event)
|
[
"def",
"mouseReleaseEvent",
"(",
"self",
",",
"event",
")",
":",
"if",
"self",
".",
"buttonPressed",
"is",
"not",
"None",
":",
"li",
"=",
"self",
".",
"buttonPressed",
"self",
".",
"buttonPressed",
"=",
"None",
"self",
".",
"changeSectionFolding",
"(",
"li",
")",
"self",
".",
"updateSection",
"(",
"li",
")",
"else",
":",
"super",
"(",
"DrillHeaderView",
",",
"self",
")",
".",
"mouseReleaseEvent",
"(",
"event",
")"
] |
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/drill/view/DrillHeaderView.py#L191-L206
|
||
microsoft/TSS.MSR
|
0f2516fca2cd9929c31d5450e39301c9bde43688
|
TSS.Py/src/TpmTypes.py
|
python
|
TPMT_PUBLIC_PARMS.type
|
(self)
|
return parameters.GetUnionSelector()
|
The algorithm to be tested
|
The algorithm to be tested
|
[
"The",
"algorithm",
"to",
"be",
"tested"
] |
def type(self): # TPM_ALG_ID
""" The algorithm to be tested """
return parameters.GetUnionSelector()
|
[
"def",
"type",
"(",
"self",
")",
":",
"# TPM_ALG_ID",
"return",
"parameters",
".",
"GetUnionSelector",
"(",
")"
] |
https://github.com/microsoft/TSS.MSR/blob/0f2516fca2cd9929c31d5450e39301c9bde43688/TSS.Py/src/TpmTypes.py#L8138-L8140
|
|
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/x86/toolchain/lib/python2.7/lib-tk/Tix.py
|
python
|
Grid.move_row
|
(self, from_, to, offset)
|
Moves the range of rows from position FROM through TO by
the distance indicated by OFFSET.
For example, move_row(2, 4, 1) moves the rows 2,3,4 to rows 3,4,5.
|
Moves the range of rows from position FROM through TO by
the distance indicated by OFFSET.
For example, move_row(2, 4, 1) moves the rows 2,3,4 to rows 3,4,5.
|
[
"Moves",
"the",
"range",
"of",
"rows",
"from",
"position",
"FROM",
"through",
"TO",
"by",
"the",
"distance",
"indicated",
"by",
"OFFSET",
".",
"For",
"example",
"move_row",
"(",
"2",
"4",
"1",
")",
"moves",
"the",
"rows",
"2",
"3",
"4",
"to",
"rows",
"3",
"4",
"5",
"."
] |
def move_row(self, from_, to, offset):
"""Moves the range of rows from position FROM through TO by
the distance indicated by OFFSET.
For example, move_row(2, 4, 1) moves the rows 2,3,4 to rows 3,4,5."""
self.tk.call(self, 'move', 'row', from_, to, offset)
|
[
"def",
"move_row",
"(",
"self",
",",
"from_",
",",
"to",
",",
"offset",
")",
":",
"self",
".",
"tk",
".",
"call",
"(",
"self",
",",
"'move'",
",",
"'row'",
",",
"from_",
",",
"to",
",",
"offset",
")"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/lib-tk/Tix.py#L1882-L1886
|
||
PaddlePaddle/Paddle
|
1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c
|
python/paddle/reader/decorator.py
|
python
|
buffered
|
(reader, size)
|
return data_reader
|
Creates a buffered data reader.
The buffered data reader will read and save data entries into a
buffer. Reading from the buffered data reader will proceed as long
as the buffer is not empty.
Args:
reader(generator): the data reader to read from.
size(int): max buffer size.
Returns:
generator: the buffered data reader.
Examples:
.. code-block:: python
import paddle
def reader():
for i in range(3):
yield i
# Create a buffered reader, and the buffer size is 2.
buffered_reader = paddle.io.buffered(reader, 2)
# Output: 0 1 2
for i in buffered_reader():
print(i)
|
Creates a buffered data reader.
|
[
"Creates",
"a",
"buffered",
"data",
"reader",
"."
] |
def buffered(reader, size):
"""
Creates a buffered data reader.
The buffered data reader will read and save data entries into a
buffer. Reading from the buffered data reader will proceed as long
as the buffer is not empty.
Args:
reader(generator): the data reader to read from.
size(int): max buffer size.
Returns:
generator: the buffered data reader.
Examples:
.. code-block:: python
import paddle
def reader():
for i in range(3):
yield i
# Create a buffered reader, and the buffer size is 2.
buffered_reader = paddle.io.buffered(reader, 2)
# Output: 0 1 2
for i in buffered_reader():
print(i)
"""
class EndSignal():
pass
end = EndSignal()
def read_worker(r, q):
for d in r:
q.put(d)
q.put(end)
def data_reader():
r = reader()
q = Queue(maxsize=size)
t = Thread(
target=read_worker, args=(
r,
q, ))
t.daemon = True
t.start()
e = q.get()
while e != end:
yield e
e = q.get()
return data_reader
|
[
"def",
"buffered",
"(",
"reader",
",",
"size",
")",
":",
"class",
"EndSignal",
"(",
")",
":",
"pass",
"end",
"=",
"EndSignal",
"(",
")",
"def",
"read_worker",
"(",
"r",
",",
"q",
")",
":",
"for",
"d",
"in",
"r",
":",
"q",
".",
"put",
"(",
"d",
")",
"q",
".",
"put",
"(",
"end",
")",
"def",
"data_reader",
"(",
")",
":",
"r",
"=",
"reader",
"(",
")",
"q",
"=",
"Queue",
"(",
"maxsize",
"=",
"size",
")",
"t",
"=",
"Thread",
"(",
"target",
"=",
"read_worker",
",",
"args",
"=",
"(",
"r",
",",
"q",
",",
")",
")",
"t",
".",
"daemon",
"=",
"True",
"t",
".",
"start",
"(",
")",
"e",
"=",
"q",
".",
"get",
"(",
")",
"while",
"e",
"!=",
"end",
":",
"yield",
"e",
"e",
"=",
"q",
".",
"get",
"(",
")",
"return",
"data_reader"
] |
https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/reader/decorator.py#L308-L364
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/urllib3/contrib/securetransport.py
|
python
|
WrappedSocket._custom_validate
|
(self, verify, trust_bundle)
|
Called when we have set custom validation. We do this in two cases:
first, when cert validation is entirely disabled; and second, when
using a custom trust DB.
|
Called when we have set custom validation. We do this in two cases:
first, when cert validation is entirely disabled; and second, when
using a custom trust DB.
|
[
"Called",
"when",
"we",
"have",
"set",
"custom",
"validation",
".",
"We",
"do",
"this",
"in",
"two",
"cases",
":",
"first",
"when",
"cert",
"validation",
"is",
"entirely",
"disabled",
";",
"and",
"second",
"when",
"using",
"a",
"custom",
"trust",
"DB",
"."
] |
def _custom_validate(self, verify, trust_bundle):
"""
Called when we have set custom validation. We do this in two cases:
first, when cert validation is entirely disabled; and second, when
using a custom trust DB.
"""
# If we disabled cert validation, just say: cool.
if not verify:
return
# We want data in memory, so load it up.
if os.path.isfile(trust_bundle):
with open(trust_bundle, "rb") as f:
trust_bundle = f.read()
cert_array = None
trust = Security.SecTrustRef()
try:
# Get a CFArray that contains the certs we want.
cert_array = _cert_array_from_pem(trust_bundle)
# Ok, now the hard part. We want to get the SecTrustRef that ST has
# created for this connection, shove our CAs into it, tell ST to
# ignore everything else it knows, and then ask if it can build a
# chain. This is a buuuunch of code.
result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
_assert_no_error(result)
if not trust:
raise ssl.SSLError("Failed to copy trust reference")
result = Security.SecTrustSetAnchorCertificates(trust, cert_array)
_assert_no_error(result)
result = Security.SecTrustSetAnchorCertificatesOnly(trust, True)
_assert_no_error(result)
trust_result = Security.SecTrustResultType()
result = Security.SecTrustEvaluate(trust, ctypes.byref(trust_result))
_assert_no_error(result)
finally:
if trust:
CoreFoundation.CFRelease(trust)
if cert_array is not None:
CoreFoundation.CFRelease(cert_array)
# Ok, now we can look at what the result was.
successes = (
SecurityConst.kSecTrustResultUnspecified,
SecurityConst.kSecTrustResultProceed,
)
if trust_result.value not in successes:
raise ssl.SSLError(
"certificate verify failed, error code: %d" % trust_result.value
)
|
[
"def",
"_custom_validate",
"(",
"self",
",",
"verify",
",",
"trust_bundle",
")",
":",
"# If we disabled cert validation, just say: cool.",
"if",
"not",
"verify",
":",
"return",
"# We want data in memory, so load it up.",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"trust_bundle",
")",
":",
"with",
"open",
"(",
"trust_bundle",
",",
"\"rb\"",
")",
"as",
"f",
":",
"trust_bundle",
"=",
"f",
".",
"read",
"(",
")",
"cert_array",
"=",
"None",
"trust",
"=",
"Security",
".",
"SecTrustRef",
"(",
")",
"try",
":",
"# Get a CFArray that contains the certs we want.",
"cert_array",
"=",
"_cert_array_from_pem",
"(",
"trust_bundle",
")",
"# Ok, now the hard part. We want to get the SecTrustRef that ST has",
"# created for this connection, shove our CAs into it, tell ST to",
"# ignore everything else it knows, and then ask if it can build a",
"# chain. This is a buuuunch of code.",
"result",
"=",
"Security",
".",
"SSLCopyPeerTrust",
"(",
"self",
".",
"context",
",",
"ctypes",
".",
"byref",
"(",
"trust",
")",
")",
"_assert_no_error",
"(",
"result",
")",
"if",
"not",
"trust",
":",
"raise",
"ssl",
".",
"SSLError",
"(",
"\"Failed to copy trust reference\"",
")",
"result",
"=",
"Security",
".",
"SecTrustSetAnchorCertificates",
"(",
"trust",
",",
"cert_array",
")",
"_assert_no_error",
"(",
"result",
")",
"result",
"=",
"Security",
".",
"SecTrustSetAnchorCertificatesOnly",
"(",
"trust",
",",
"True",
")",
"_assert_no_error",
"(",
"result",
")",
"trust_result",
"=",
"Security",
".",
"SecTrustResultType",
"(",
")",
"result",
"=",
"Security",
".",
"SecTrustEvaluate",
"(",
"trust",
",",
"ctypes",
".",
"byref",
"(",
"trust_result",
")",
")",
"_assert_no_error",
"(",
"result",
")",
"finally",
":",
"if",
"trust",
":",
"CoreFoundation",
".",
"CFRelease",
"(",
"trust",
")",
"if",
"cert_array",
"is",
"not",
"None",
":",
"CoreFoundation",
".",
"CFRelease",
"(",
"cert_array",
")",
"# Ok, now we can look at what the result was.",
"successes",
"=",
"(",
"SecurityConst",
".",
"kSecTrustResultUnspecified",
",",
"SecurityConst",
".",
"kSecTrustResultProceed",
",",
")",
"if",
"trust_result",
".",
"value",
"not",
"in",
"successes",
":",
"raise",
"ssl",
".",
"SSLError",
"(",
"\"certificate verify failed, error code: %d\"",
"%",
"trust_result",
".",
"value",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/urllib3/contrib/securetransport.py#L377-L432
|
||
Xilinx/Vitis-AI
|
fc74d404563d9951b57245443c73bef389f3657f
|
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/boosted_trees/python/ops/stats_accumulator_ops.py
|
python
|
StatsAccumulatorSaveable.serialize
|
(self)
|
Serializes the stats accumulator state.
|
Serializes the stats accumulator state.
|
[
"Serializes",
"the",
"stats",
"accumulator",
"state",
"."
] |
def serialize(self):
"""Serializes the stats accumulator state."""
if self._is_scalar:
return gen_stats_accumulator_ops.stats_accumulator_scalar_serialize(
self._resource_handle)
else:
return gen_stats_accumulator_ops.stats_accumulator_tensor_serialize(
self._resource_handle)
|
[
"def",
"serialize",
"(",
"self",
")",
":",
"if",
"self",
".",
"_is_scalar",
":",
"return",
"gen_stats_accumulator_ops",
".",
"stats_accumulator_scalar_serialize",
"(",
"self",
".",
"_resource_handle",
")",
"else",
":",
"return",
"gen_stats_accumulator_ops",
".",
"stats_accumulator_tensor_serialize",
"(",
"self",
".",
"_resource_handle",
")"
] |
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/boosted_trees/python/ops/stats_accumulator_ops.py#L62-L69
|
||
nnrg/opennero
|
43e12a1bcba6e228639db3886fec1dc47ddc24cb
|
mods/Roomba/module.py
|
python
|
SandboxMod.remove_bots
|
(self)
|
remove all existing bots from the environment
|
remove all existing bots from the environment
|
[
"remove",
"all",
"existing",
"bots",
"from",
"the",
"environment"
] |
def remove_bots(self):
""" remove all existing bots from the environment """
OpenNero.disable_ai()
for id in self.agent_ids:
common.removeObject(id) # delete id from Registry, not from list
self.agent_ids = []
|
[
"def",
"remove_bots",
"(",
"self",
")",
":",
"OpenNero",
".",
"disable_ai",
"(",
")",
"for",
"id",
"in",
"self",
".",
"agent_ids",
":",
"common",
".",
"removeObject",
"(",
"id",
")",
"# delete id from Registry, not from list",
"self",
".",
"agent_ids",
"=",
"[",
"]"
] |
https://github.com/nnrg/opennero/blob/43e12a1bcba6e228639db3886fec1dc47ddc24cb/mods/Roomba/module.py#L79-L84
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/cgutils.py
|
python
|
gep_inbounds
|
(builder, ptr, *inds, **kws)
|
return gep(builder, ptr, *inds, inbounds=True, **kws)
|
Same as *gep*, but add the `inbounds` keyword.
|
Same as *gep*, but add the `inbounds` keyword.
|
[
"Same",
"as",
"*",
"gep",
"*",
"but",
"add",
"the",
"inbounds",
"keyword",
"."
] |
def gep_inbounds(builder, ptr, *inds, **kws):
"""
Same as *gep*, but add the `inbounds` keyword.
"""
return gep(builder, ptr, *inds, inbounds=True, **kws)
|
[
"def",
"gep_inbounds",
"(",
"builder",
",",
"ptr",
",",
"*",
"inds",
",",
"*",
"*",
"kws",
")",
":",
"return",
"gep",
"(",
"builder",
",",
"ptr",
",",
"*",
"inds",
",",
"inbounds",
"=",
"True",
",",
"*",
"*",
"kws",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/cgutils.py#L859-L863
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/pandas/py2/pandas/core/base.py
|
python
|
PandasObject._reset_cache
|
(self, key=None)
|
Reset cached properties. If ``key`` is passed, only clears that key.
|
Reset cached properties. If ``key`` is passed, only clears that key.
|
[
"Reset",
"cached",
"properties",
".",
"If",
"key",
"is",
"passed",
"only",
"clears",
"that",
"key",
"."
] |
def _reset_cache(self, key=None):
"""
Reset cached properties. If ``key`` is passed, only clears that key.
"""
if getattr(self, '_cache', None) is None:
return
if key is None:
self._cache.clear()
else:
self._cache.pop(key, None)
|
[
"def",
"_reset_cache",
"(",
"self",
",",
"key",
"=",
"None",
")",
":",
"if",
"getattr",
"(",
"self",
",",
"'_cache'",
",",
"None",
")",
"is",
"None",
":",
"return",
"if",
"key",
"is",
"None",
":",
"self",
".",
"_cache",
".",
"clear",
"(",
")",
"else",
":",
"self",
".",
"_cache",
".",
"pop",
"(",
"key",
",",
"None",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py2/pandas/core/base.py#L100-L109
|
||
hughperkins/tf-coriander
|
970d3df6c11400ad68405f22b0c42a52374e94ca
|
tensorflow/python/debug/framework.py
|
python
|
OnRunStartResponse.__init__
|
(self, action, debug_urls)
|
Constructor of OnRunStartResponse.
Args:
action: (OnRunStartAction) the action actually taken by the wrapped
session for the run() call.
debug_urls: (list of str) debug_urls used in watching the tensors during
the run() call.
|
Constructor of OnRunStartResponse.
|
[
"Constructor",
"of",
"OnRunStartResponse",
"."
] |
def __init__(self, action, debug_urls):
"""Constructor of OnRunStartResponse.
Args:
action: (OnRunStartAction) the action actually taken by the wrapped
session for the run() call.
debug_urls: (list of str) debug_urls used in watching the tensors during
the run() call.
"""
_check_type(action, str)
self.action = action
_check_type(debug_urls, list)
self.debug_urls = debug_urls
|
[
"def",
"__init__",
"(",
"self",
",",
"action",
",",
"debug_urls",
")",
":",
"_check_type",
"(",
"action",
",",
"str",
")",
"self",
".",
"action",
"=",
"action",
"_check_type",
"(",
"debug_urls",
",",
"list",
")",
"self",
".",
"debug_urls",
"=",
"debug_urls"
] |
https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/python/debug/framework.py#L233-L247
|
||
SpenceKonde/megaTinyCore
|
1c4a70b18a149fe6bcb551dfa6db11ca50b8997b
|
megaavr/tools/libs/serial/serialutil.py
|
python
|
SerialBase.__repr__
|
(self)
|
return '{name}<id=0x{id:x}, open={p.is_open}>(port={p.portstr!r}, ' \
'baudrate={p.baudrate!r}, bytesize={p.bytesize!r}, parity={p.parity!r}, ' \
'stopbits={p.stopbits!r}, timeout={p.timeout!r}, xonxoff={p.xonxoff!r}, ' \
'rtscts={p.rtscts!r}, dsrdtr={p.dsrdtr!r})'.format(
name=self.__class__.__name__, id=id(self), p=self)
|
String representation of the current port settings and its state.
|
String representation of the current port settings and its state.
|
[
"String",
"representation",
"of",
"the",
"current",
"port",
"settings",
"and",
"its",
"state",
"."
] |
def __repr__(self):
"""String representation of the current port settings and its state."""
return '{name}<id=0x{id:x}, open={p.is_open}>(port={p.portstr!r}, ' \
'baudrate={p.baudrate!r}, bytesize={p.bytesize!r}, parity={p.parity!r}, ' \
'stopbits={p.stopbits!r}, timeout={p.timeout!r}, xonxoff={p.xonxoff!r}, ' \
'rtscts={p.rtscts!r}, dsrdtr={p.dsrdtr!r})'.format(
name=self.__class__.__name__, id=id(self), p=self)
|
[
"def",
"__repr__",
"(",
"self",
")",
":",
"return",
"'{name}<id=0x{id:x}, open={p.is_open}>(port={p.portstr!r}, '",
"'baudrate={p.baudrate!r}, bytesize={p.bytesize!r}, parity={p.parity!r}, '",
"'stopbits={p.stopbits!r}, timeout={p.timeout!r}, xonxoff={p.xonxoff!r}, '",
"'rtscts={p.rtscts!r}, dsrdtr={p.dsrdtr!r})'",
".",
"format",
"(",
"name",
"=",
"self",
".",
"__class__",
".",
"__name__",
",",
"id",
"=",
"id",
"(",
"self",
")",
",",
"p",
"=",
"self",
")"
] |
https://github.com/SpenceKonde/megaTinyCore/blob/1c4a70b18a149fe6bcb551dfa6db11ca50b8997b/megaavr/tools/libs/serial/serialutil.py#L523-L529
|
|
facebookincubator/BOLT
|
88c70afe9d388ad430cc150cc158641701397f70
|
llvm/bindings/python/llvm/object.py
|
python
|
ObjectFile.get_symbols
|
(self, cache=False)
|
Obtain the symbols in this object file.
This is a generator for llvm.object.Symbol instances.
Each Symbol instance is a limited-use object. See this module's
documentation on iterators for more.
|
Obtain the symbols in this object file.
|
[
"Obtain",
"the",
"symbols",
"in",
"this",
"object",
"file",
"."
] |
def get_symbols(self, cache=False):
"""Obtain the symbols in this object file.
This is a generator for llvm.object.Symbol instances.
Each Symbol instance is a limited-use object. See this module's
documentation on iterators for more.
"""
symbols = lib.LLVMGetSymbols(self)
last = None
while True:
if lib.LLVMIsSymbolIteratorAtEnd(self, symbols):
break
last = Symbol(symbols, self)
if cache:
last.cache()
yield last
lib.LLVMMoveToNextSymbol(symbols)
last.expire()
if last is not None:
last.expire()
lib.LLVMDisposeSymbolIterator(symbols)
|
[
"def",
"get_symbols",
"(",
"self",
",",
"cache",
"=",
"False",
")",
":",
"symbols",
"=",
"lib",
".",
"LLVMGetSymbols",
"(",
"self",
")",
"last",
"=",
"None",
"while",
"True",
":",
"if",
"lib",
".",
"LLVMIsSymbolIteratorAtEnd",
"(",
"self",
",",
"symbols",
")",
":",
"break",
"last",
"=",
"Symbol",
"(",
"symbols",
",",
"self",
")",
"if",
"cache",
":",
"last",
".",
"cache",
"(",
")",
"yield",
"last",
"lib",
".",
"LLVMMoveToNextSymbol",
"(",
"symbols",
")",
"last",
".",
"expire",
"(",
")",
"if",
"last",
"is",
"not",
"None",
":",
"last",
".",
"expire",
"(",
")",
"lib",
".",
"LLVMDisposeSymbolIterator",
"(",
"symbols",
")"
] |
https://github.com/facebookincubator/BOLT/blob/88c70afe9d388ad430cc150cc158641701397f70/llvm/bindings/python/llvm/object.py#L150-L176
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/pandas/py3/pandas/core/arrays/categorical.py
|
python
|
Categorical._repr_categories_info
|
(self)
|
return levheader + "[" + levstring.replace(" < ... < ", " ... ") + "]"
|
Returns a string representation of the footer.
|
Returns a string representation of the footer.
|
[
"Returns",
"a",
"string",
"representation",
"of",
"the",
"footer",
"."
] |
def _repr_categories_info(self) -> str:
"""
Returns a string representation of the footer.
"""
category_strs = self._repr_categories()
dtype = str(self.categories.dtype)
levheader = f"Categories ({len(self.categories)}, {dtype}): "
width, height = get_terminal_size()
max_width = get_option("display.width") or width
if console.in_ipython_frontend():
# 0 = no breaks
max_width = 0
levstring = ""
start = True
cur_col_len = len(levheader) # header
sep_len, sep = (3, " < ") if self.ordered else (2, ", ")
linesep = sep.rstrip() + "\n" # remove whitespace
for val in category_strs:
if max_width != 0 and cur_col_len + sep_len + len(val) > max_width:
levstring += linesep + (" " * (len(levheader) + 1))
cur_col_len = len(levheader) + 1 # header + a whitespace
elif not start:
levstring += sep
cur_col_len += len(val)
levstring += val
start = False
# replace to simple save space by
return levheader + "[" + levstring.replace(" < ... < ", " ... ") + "]"
|
[
"def",
"_repr_categories_info",
"(",
"self",
")",
"->",
"str",
":",
"category_strs",
"=",
"self",
".",
"_repr_categories",
"(",
")",
"dtype",
"=",
"str",
"(",
"self",
".",
"categories",
".",
"dtype",
")",
"levheader",
"=",
"f\"Categories ({len(self.categories)}, {dtype}): \"",
"width",
",",
"height",
"=",
"get_terminal_size",
"(",
")",
"max_width",
"=",
"get_option",
"(",
"\"display.width\"",
")",
"or",
"width",
"if",
"console",
".",
"in_ipython_frontend",
"(",
")",
":",
"# 0 = no breaks",
"max_width",
"=",
"0",
"levstring",
"=",
"\"\"",
"start",
"=",
"True",
"cur_col_len",
"=",
"len",
"(",
"levheader",
")",
"# header",
"sep_len",
",",
"sep",
"=",
"(",
"3",
",",
"\" < \"",
")",
"if",
"self",
".",
"ordered",
"else",
"(",
"2",
",",
"\", \"",
")",
"linesep",
"=",
"sep",
".",
"rstrip",
"(",
")",
"+",
"\"\\n\"",
"# remove whitespace",
"for",
"val",
"in",
"category_strs",
":",
"if",
"max_width",
"!=",
"0",
"and",
"cur_col_len",
"+",
"sep_len",
"+",
"len",
"(",
"val",
")",
">",
"max_width",
":",
"levstring",
"+=",
"linesep",
"+",
"(",
"\" \"",
"*",
"(",
"len",
"(",
"levheader",
")",
"+",
"1",
")",
")",
"cur_col_len",
"=",
"len",
"(",
"levheader",
")",
"+",
"1",
"# header + a whitespace",
"elif",
"not",
"start",
":",
"levstring",
"+=",
"sep",
"cur_col_len",
"+=",
"len",
"(",
"val",
")",
"levstring",
"+=",
"val",
"start",
"=",
"False",
"# replace to simple save space by",
"return",
"levheader",
"+",
"\"[\"",
"+",
"levstring",
".",
"replace",
"(",
"\" < ... < \"",
",",
"\" ... \"",
")",
"+",
"\"]\""
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py3/pandas/core/arrays/categorical.py#L1950-L1977
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/scikit-learn/py2/sklearn/mixture/dpgmm.py
|
python
|
_DPGMMBase._do_mstep
|
(self, X, z, params)
|
Maximize the variational lower bound
Update each of the parameters to maximize the lower bound.
|
Maximize the variational lower bound
|
[
"Maximize",
"the",
"variational",
"lower",
"bound"
] |
def _do_mstep(self, X, z, params):
"""Maximize the variational lower bound
Update each of the parameters to maximize the lower bound."""
self._monitor(X, z, "z")
self._update_concentration(z)
self._monitor(X, z, "gamma")
if 'm' in params:
self._update_means(X, z)
self._monitor(X, z, "mu")
if 'c' in params:
self._update_precisions(X, z)
self._monitor(X, z, "a and b", end=True)
|
[
"def",
"_do_mstep",
"(",
"self",
",",
"X",
",",
"z",
",",
"params",
")",
":",
"self",
".",
"_monitor",
"(",
"X",
",",
"z",
",",
"\"z\"",
")",
"self",
".",
"_update_concentration",
"(",
"z",
")",
"self",
".",
"_monitor",
"(",
"X",
",",
"z",
",",
"\"gamma\"",
")",
"if",
"'m'",
"in",
"params",
":",
"self",
".",
"_update_means",
"(",
"X",
",",
"z",
")",
"self",
".",
"_monitor",
"(",
"X",
",",
"z",
",",
"\"mu\"",
")",
"if",
"'c'",
"in",
"params",
":",
"self",
".",
"_update_precisions",
"(",
"X",
",",
"z",
")",
"self",
".",
"_monitor",
"(",
"X",
",",
"z",
",",
"\"a and b\"",
",",
"end",
"=",
"True",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py2/sklearn/mixture/dpgmm.py#L394-L406
|
||
windystrife/UnrealEngine_NVIDIAGameWorks
|
b50e6338a7c5b26374d66306ebc7807541ff815e
|
Engine/Extras/Maya_AnimationRiggingTools/ARTv1/MayaTools/General/Scripts/P4.py
|
python
|
P4.run_login
|
(self, *args)
|
return self.run("login", *args)
|
Simple interface to make login easier
|
Simple interface to make login easier
|
[
"Simple",
"interface",
"to",
"make",
"login",
"easier"
] |
def run_login(self, *args):
"Simple interface to make login easier"
self.input = self.password
return self.run("login", *args)
|
[
"def",
"run_login",
"(",
"self",
",",
"*",
"args",
")",
":",
"self",
".",
"input",
"=",
"self",
".",
"password",
"return",
"self",
".",
"run",
"(",
"\"login\"",
",",
"*",
"args",
")"
] |
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/Maya_AnimationRiggingTools/ARTv1/MayaTools/General/Scripts/P4.py#L1097-L1103
|
|
apple/turicreate
|
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
|
deps/src/libxml2-2.9.1/python/libxml2.py
|
python
|
xmlNs.copyNamespaceList
|
(self)
|
return __tmp
|
Do a copy of an namespace list.
|
Do a copy of an namespace list.
|
[
"Do",
"a",
"copy",
"of",
"an",
"namespace",
"list",
"."
] |
def copyNamespaceList(self):
"""Do a copy of an namespace list. """
ret = libxml2mod.xmlCopyNamespaceList(self._o)
if ret is None:raise treeError('xmlCopyNamespaceList() failed')
__tmp = xmlNs(_obj=ret)
return __tmp
|
[
"def",
"copyNamespaceList",
"(",
"self",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlCopyNamespaceList",
"(",
"self",
".",
"_o",
")",
"if",
"ret",
"is",
"None",
":",
"raise",
"treeError",
"(",
"'xmlCopyNamespaceList() failed'",
")",
"__tmp",
"=",
"xmlNs",
"(",
"_obj",
"=",
"ret",
")",
"return",
"__tmp"
] |
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2.py#L5874-L5879
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/dictobject.py
|
python
|
_iterator_codegen
|
(resty)
|
return codegen
|
The common codegen for iterator intrinsics.
Populates the iterator struct and increfs.
|
The common codegen for iterator intrinsics.
|
[
"The",
"common",
"codegen",
"for",
"iterator",
"intrinsics",
"."
] |
def _iterator_codegen(resty):
"""The common codegen for iterator intrinsics.
Populates the iterator struct and increfs.
"""
def codegen(context, builder, sig, args):
[d] = args
[td] = sig.args
iterhelper = context.make_helper(builder, resty)
iterhelper.parent = d
iterhelper.state = iterhelper.state.type(None)
return impl_ret_borrowed(
context,
builder,
resty,
iterhelper._getvalue(),
)
return codegen
|
[
"def",
"_iterator_codegen",
"(",
"resty",
")",
":",
"def",
"codegen",
"(",
"context",
",",
"builder",
",",
"sig",
",",
"args",
")",
":",
"[",
"d",
"]",
"=",
"args",
"[",
"td",
"]",
"=",
"sig",
".",
"args",
"iterhelper",
"=",
"context",
".",
"make_helper",
"(",
"builder",
",",
"resty",
")",
"iterhelper",
".",
"parent",
"=",
"d",
"iterhelper",
".",
"state",
"=",
"iterhelper",
".",
"state",
".",
"type",
"(",
"None",
")",
"return",
"impl_ret_borrowed",
"(",
"context",
",",
"builder",
",",
"resty",
",",
"iterhelper",
".",
"_getvalue",
"(",
")",
",",
")",
"return",
"codegen"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/dictobject.py#L549-L568
|
|
sideeffects/HoudiniEngineForUnreal
|
a52be617d90495bda6072fe732f0d2eec33b54f3
|
Content/Python/HoudiniEngineV2/asyncprocessor.py
|
python
|
ProcessHDA.spawn_in_level_override
|
(self)
|
return self._spawn_in_level_override
|
The level to spawn in. If both this and world_context_object is not set, spawn in the editor context's level.
|
The level to spawn in. If both this and world_context_object is not set, spawn in the editor context's level.
|
[
"The",
"level",
"to",
"spawn",
"in",
".",
"If",
"both",
"this",
"and",
"world_context_object",
"is",
"not",
"set",
"spawn",
"in",
"the",
"editor",
"context",
"s",
"level",
"."
] |
def spawn_in_level_override(self):
""" The level to spawn in. If both this and world_context_object is not set, spawn in the editor context's level. """
return self._spawn_in_level_override
|
[
"def",
"spawn_in_level_override",
"(",
"self",
")",
":",
"return",
"self",
".",
"_spawn_in_level_override"
] |
https://github.com/sideeffects/HoudiniEngineForUnreal/blob/a52be617d90495bda6072fe732f0d2eec33b54f3/Content/Python/HoudiniEngineV2/asyncprocessor.py#L180-L182
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/dateutil/tz/tz.py
|
python
|
datetime_exists
|
(dt, tz=None)
|
return dt == dt_rt
|
Given a datetime and a time zone, determine whether or not a given datetime
would fall in a gap.
:param dt:
A :class:`datetime.datetime` (whose time zone will be ignored if ``tz``
is provided.)
:param tz:
A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If
``None`` or not provided, the datetime's own time zone will be used.
:return:
Returns a boolean value whether or not the "wall time" exists in
``tz``.
.. versionadded:: 2.7.0
|
Given a datetime and a time zone, determine whether or not a given datetime
would fall in a gap.
|
[
"Given",
"a",
"datetime",
"and",
"a",
"time",
"zone",
"determine",
"whether",
"or",
"not",
"a",
"given",
"datetime",
"would",
"fall",
"in",
"a",
"gap",
"."
] |
def datetime_exists(dt, tz=None):
"""
Given a datetime and a time zone, determine whether or not a given datetime
would fall in a gap.
:param dt:
A :class:`datetime.datetime` (whose time zone will be ignored if ``tz``
is provided.)
:param tz:
A :class:`datetime.tzinfo` with support for the ``fold`` attribute. If
``None`` or not provided, the datetime's own time zone will be used.
:return:
Returns a boolean value whether or not the "wall time" exists in
``tz``.
.. versionadded:: 2.7.0
"""
if tz is None:
if dt.tzinfo is None:
raise ValueError('Datetime is naive and no time zone provided.')
tz = dt.tzinfo
dt = dt.replace(tzinfo=None)
# This is essentially a test of whether or not the datetime can survive
# a round trip to UTC.
dt_rt = dt.replace(tzinfo=tz).astimezone(UTC).astimezone(tz)
dt_rt = dt_rt.replace(tzinfo=None)
return dt == dt_rt
|
[
"def",
"datetime_exists",
"(",
"dt",
",",
"tz",
"=",
"None",
")",
":",
"if",
"tz",
"is",
"None",
":",
"if",
"dt",
".",
"tzinfo",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'Datetime is naive and no time zone provided.'",
")",
"tz",
"=",
"dt",
".",
"tzinfo",
"dt",
"=",
"dt",
".",
"replace",
"(",
"tzinfo",
"=",
"None",
")",
"# This is essentially a test of whether or not the datetime can survive",
"# a round trip to UTC.",
"dt_rt",
"=",
"dt",
".",
"replace",
"(",
"tzinfo",
"=",
"tz",
")",
".",
"astimezone",
"(",
"UTC",
")",
".",
"astimezone",
"(",
"tz",
")",
"dt_rt",
"=",
"dt_rt",
".",
"replace",
"(",
"tzinfo",
"=",
"None",
")",
"return",
"dt",
"==",
"dt_rt"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/dateutil/tz/tz.py#L1683-L1714
|
|
cornell-zhang/heterocl
|
6d9e4b4acc2ee2707b2d25b27298c0335bccedfd
|
python/heterocl/tvm/_ffi/ndarray.py
|
python
|
numpyasarray
|
(np_data)
|
return arr, shape
|
Return a TVMArray representation of a numpy array.
|
Return a TVMArray representation of a numpy array.
|
[
"Return",
"a",
"TVMArray",
"representation",
"of",
"a",
"numpy",
"array",
"."
] |
def numpyasarray(np_data):
"""Return a TVMArray representation of a numpy array.
"""
data = np_data
assert data.flags['C_CONTIGUOUS']
arr = TVMArray()
shape = c_array(tvm_shape_index_t, data.shape)
arr.data = data.ctypes.data_as(ctypes.c_void_p)
arr.shape = shape
arr.strides = None
arr.dtype = TVMType(np.dtype(data.dtype).name)
arr.ndim = data.ndim
# CPU device
arr.ctx = context(1, 0)
return arr, shape
|
[
"def",
"numpyasarray",
"(",
"np_data",
")",
":",
"data",
"=",
"np_data",
"assert",
"data",
".",
"flags",
"[",
"'C_CONTIGUOUS'",
"]",
"arr",
"=",
"TVMArray",
"(",
")",
"shape",
"=",
"c_array",
"(",
"tvm_shape_index_t",
",",
"data",
".",
"shape",
")",
"arr",
".",
"data",
"=",
"data",
".",
"ctypes",
".",
"data_as",
"(",
"ctypes",
".",
"c_void_p",
")",
"arr",
".",
"shape",
"=",
"shape",
"arr",
".",
"strides",
"=",
"None",
"arr",
".",
"dtype",
"=",
"TVMType",
"(",
"np",
".",
"dtype",
"(",
"data",
".",
"dtype",
")",
".",
"name",
")",
"arr",
".",
"ndim",
"=",
"data",
".",
"ndim",
"# CPU device",
"arr",
".",
"ctx",
"=",
"context",
"(",
"1",
",",
"0",
")",
"return",
"arr",
",",
"shape"
] |
https://github.com/cornell-zhang/heterocl/blob/6d9e4b4acc2ee2707b2d25b27298c0335bccedfd/python/heterocl/tvm/_ffi/ndarray.py#L65-L79
|
|
miyosuda/TensorFlowAndroidDemo
|
35903e0221aa5f109ea2dbef27f20b52e317f42d
|
jni-build/jni/include/tensorflow/models/embedding/word2vec_optimized.py
|
python
|
main
|
(_)
|
Train a word2vec model.
|
Train a word2vec model.
|
[
"Train",
"a",
"word2vec",
"model",
"."
] |
def main(_):
"""Train a word2vec model."""
if not FLAGS.train_data or not FLAGS.eval_data or not FLAGS.save_path:
print("--train_data --eval_data and --save_path must be specified.")
sys.exit(1)
opts = Options()
with tf.Graph().as_default(), tf.Session() as session:
with tf.device("/cpu:0"):
model = Word2Vec(opts, session)
for _ in xrange(opts.epochs_to_train):
model.train() # Process one epoch
model.eval() # Eval analogies.
# Perform a final save.
model.saver.save(session, os.path.join(opts.save_path, "model.ckpt"),
global_step=model.step)
if FLAGS.interactive:
# E.g.,
# [0]: model.analogy(b'france', b'paris', b'russia')
# [1]: model.nearby([b'proton', b'elephant', b'maxwell'])
_start_shell(locals())
|
[
"def",
"main",
"(",
"_",
")",
":",
"if",
"not",
"FLAGS",
".",
"train_data",
"or",
"not",
"FLAGS",
".",
"eval_data",
"or",
"not",
"FLAGS",
".",
"save_path",
":",
"print",
"(",
"\"--train_data --eval_data and --save_path must be specified.\"",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"opts",
"=",
"Options",
"(",
")",
"with",
"tf",
".",
"Graph",
"(",
")",
".",
"as_default",
"(",
")",
",",
"tf",
".",
"Session",
"(",
")",
"as",
"session",
":",
"with",
"tf",
".",
"device",
"(",
"\"/cpu:0\"",
")",
":",
"model",
"=",
"Word2Vec",
"(",
"opts",
",",
"session",
")",
"for",
"_",
"in",
"xrange",
"(",
"opts",
".",
"epochs_to_train",
")",
":",
"model",
".",
"train",
"(",
")",
"# Process one epoch",
"model",
".",
"eval",
"(",
")",
"# Eval analogies.",
"# Perform a final save.",
"model",
".",
"saver",
".",
"save",
"(",
"session",
",",
"os",
".",
"path",
".",
"join",
"(",
"opts",
".",
"save_path",
",",
"\"model.ckpt\"",
")",
",",
"global_step",
"=",
"model",
".",
"step",
")",
"if",
"FLAGS",
".",
"interactive",
":",
"# E.g.,",
"# [0]: model.analogy(b'france', b'paris', b'russia')",
"# [1]: model.nearby([b'proton', b'elephant', b'maxwell'])",
"_start_shell",
"(",
"locals",
"(",
")",
")"
] |
https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/models/embedding/word2vec_optimized.py#L408-L427
|
||
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
third_party/catapult/third_party/gsutil/third_party/rsa/rsa/_version133.py
|
python
|
randomized_primality_testing
|
(n, k)
|
return True
|
Calculates whether n is composite (which is always correct) or
prime (which is incorrect with error probability 2**-k)
Returns False if the number if composite, and True if it's
probably prime.
|
Calculates whether n is composite (which is always correct) or
prime (which is incorrect with error probability 2**-k)
|
[
"Calculates",
"whether",
"n",
"is",
"composite",
"(",
"which",
"is",
"always",
"correct",
")",
"or",
"prime",
"(",
"which",
"is",
"incorrect",
"with",
"error",
"probability",
"2",
"**",
"-",
"k",
")"
] |
def randomized_primality_testing(n, k):
"""Calculates whether n is composite (which is always correct) or
prime (which is incorrect with error probability 2**-k)
Returns False if the number if composite, and True if it's
probably prime.
"""
q = 0.5 # Property of the jacobi_witness function
# t = int(math.ceil(k / math.log(1/q, 2)))
t = ceil(k / math.log(1/q, 2))
for i in range(t+1):
x = randint(1, n-1)
if jacobi_witness(x, n): return False
return True
|
[
"def",
"randomized_primality_testing",
"(",
"n",
",",
"k",
")",
":",
"q",
"=",
"0.5",
"# Property of the jacobi_witness function",
"# t = int(math.ceil(k / math.log(1/q, 2)))",
"t",
"=",
"ceil",
"(",
"k",
"/",
"math",
".",
"log",
"(",
"1",
"/",
"q",
",",
"2",
")",
")",
"for",
"i",
"in",
"range",
"(",
"t",
"+",
"1",
")",
":",
"x",
"=",
"randint",
"(",
"1",
",",
"n",
"-",
"1",
")",
"if",
"jacobi_witness",
"(",
"x",
",",
"n",
")",
":",
"return",
"False",
"return",
"True"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/rsa/rsa/_version133.py#L171-L187
|
|
v8mips/v8mips
|
f0c9cc0bbfd461c7f516799d9a58e9a7395f737e
|
tools/push-to-trunk/push_to_trunk.py
|
python
|
PrepareChangeLog.Reload
|
(self, body)
|
return body
|
Attempts to reload the commit message from rietveld in order to allow
late changes to the LOG flag. Note: This is brittle to future changes of
the web page name or structure.
|
Attempts to reload the commit message from rietveld in order to allow
late changes to the LOG flag. Note: This is brittle to future changes of
the web page name or structure.
|
[
"Attempts",
"to",
"reload",
"the",
"commit",
"message",
"from",
"rietveld",
"in",
"order",
"to",
"allow",
"late",
"changes",
"to",
"the",
"LOG",
"flag",
".",
"Note",
":",
"This",
"is",
"brittle",
"to",
"future",
"changes",
"of",
"the",
"web",
"page",
"name",
"or",
"structure",
"."
] |
def Reload(self, body):
"""Attempts to reload the commit message from rietveld in order to allow
late changes to the LOG flag. Note: This is brittle to future changes of
the web page name or structure.
"""
match = re.search(r"^Review URL: https://codereview\.chromium\.org/(\d+)$",
body, flags=re.M)
if match:
cl_url = ("https://codereview.chromium.org/%s/description"
% match.group(1))
try:
# Fetch from Rietveld but only retry once with one second delay since
# there might be many revisions.
body = self.ReadURL(cl_url, wait_plan=[1])
except urllib2.URLError: # pragma: no cover
pass
return body
|
[
"def",
"Reload",
"(",
"self",
",",
"body",
")",
":",
"match",
"=",
"re",
".",
"search",
"(",
"r\"^Review URL: https://codereview\\.chromium\\.org/(\\d+)$\"",
",",
"body",
",",
"flags",
"=",
"re",
".",
"M",
")",
"if",
"match",
":",
"cl_url",
"=",
"(",
"\"https://codereview.chromium.org/%s/description\"",
"%",
"match",
".",
"group",
"(",
"1",
")",
")",
"try",
":",
"# Fetch from Rietveld but only retry once with one second delay since",
"# there might be many revisions.",
"body",
"=",
"self",
".",
"ReadURL",
"(",
"cl_url",
",",
"wait_plan",
"=",
"[",
"1",
"]",
")",
"except",
"urllib2",
".",
"URLError",
":",
"# pragma: no cover",
"pass",
"return",
"body"
] |
https://github.com/v8mips/v8mips/blob/f0c9cc0bbfd461c7f516799d9a58e9a7395f737e/tools/push-to-trunk/push_to_trunk.py#L184-L200
|
|
thalium/icebox
|
99d147d5b9269222225443ce171b4fd46d8985d4
|
third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py
|
python
|
uCSIsCatLu
|
(code)
|
return ret
|
Check whether the character is part of Lu UCS Category
|
Check whether the character is part of Lu UCS Category
|
[
"Check",
"whether",
"the",
"character",
"is",
"part",
"of",
"Lu",
"UCS",
"Category"
] |
def uCSIsCatLu(code):
"""Check whether the character is part of Lu UCS Category """
ret = libxml2mod.xmlUCSIsCatLu(code)
return ret
|
[
"def",
"uCSIsCatLu",
"(",
"code",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlUCSIsCatLu",
"(",
"code",
")",
"return",
"ret"
] |
https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py#L1508-L1511
|
|
mantidproject/mantid
|
03deeb89254ec4289edb8771e0188c2090a02f32
|
qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/utilities/load_utils.py
|
python
|
get_table_workspace_names_from_ADS
|
()
|
return table_names
|
Return a list of names of TableWorkspace objects which are in the ADS.
|
Return a list of names of TableWorkspace objects which are in the ADS.
|
[
"Return",
"a",
"list",
"of",
"names",
"of",
"TableWorkspace",
"objects",
"which",
"are",
"in",
"the",
"ADS",
"."
] |
def get_table_workspace_names_from_ADS():
"""
Return a list of names of TableWorkspace objects which are in the ADS.
"""
names = api.AnalysisDataService.Instance().getObjectNames()
table_names = [name for name in names if isinstance(mtd[name], ITableWorkspace)]
return table_names
|
[
"def",
"get_table_workspace_names_from_ADS",
"(",
")",
":",
"names",
"=",
"api",
".",
"AnalysisDataService",
".",
"Instance",
"(",
")",
".",
"getObjectNames",
"(",
")",
"table_names",
"=",
"[",
"name",
"for",
"name",
"in",
"names",
"if",
"isinstance",
"(",
"mtd",
"[",
"name",
"]",
",",
"ITableWorkspace",
")",
"]",
"return",
"table_names"
] |
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/utilities/load_utils.py#L261-L267
|
|
miyosuda/TensorFlowAndroidMNIST
|
7b5a4603d2780a8a2834575706e9001977524007
|
jni-build/jni/include/tensorflow/python/summary/event_file_inspector.py
|
python
|
generators_from_logdir
|
(logdir)
|
return generators
|
Returns a list of event generators for subdirectories with event files.
The number of generators returned should equal the number of directories
within logdir that contain event files. If only logdir contains event files,
returns a list of length one.
Args:
logdir: A log directory that contains event files.
Returns:
List of event generators for each subdirectory with event files.
|
Returns a list of event generators for subdirectories with event files.
|
[
"Returns",
"a",
"list",
"of",
"event",
"generators",
"for",
"subdirectories",
"with",
"event",
"files",
"."
] |
def generators_from_logdir(logdir):
"""Returns a list of event generators for subdirectories with event files.
The number of generators returned should equal the number of directories
within logdir that contain event files. If only logdir contains event files,
returns a list of length one.
Args:
logdir: A log directory that contains event files.
Returns:
List of event generators for each subdirectory with event files.
"""
subdirs = event_multiplexer.GetLogdirSubdirectories(logdir)
generators = [itertools.chain(*[
generator_from_event_file(os.path.join(subdir, f))
for f in gfile.ListDirectory(subdir)
if event_accumulator.IsTensorFlowEventsFile(os.path.join(subdir, f))
]) for subdir in subdirs]
return generators
|
[
"def",
"generators_from_logdir",
"(",
"logdir",
")",
":",
"subdirs",
"=",
"event_multiplexer",
".",
"GetLogdirSubdirectories",
"(",
"logdir",
")",
"generators",
"=",
"[",
"itertools",
".",
"chain",
"(",
"*",
"[",
"generator_from_event_file",
"(",
"os",
".",
"path",
".",
"join",
"(",
"subdir",
",",
"f",
")",
")",
"for",
"f",
"in",
"gfile",
".",
"ListDirectory",
"(",
"subdir",
")",
"if",
"event_accumulator",
".",
"IsTensorFlowEventsFile",
"(",
"os",
".",
"path",
".",
"join",
"(",
"subdir",
",",
"f",
")",
")",
"]",
")",
"for",
"subdir",
"in",
"subdirs",
"]",
"return",
"generators"
] |
https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/python/summary/event_file_inspector.py#L314-L333
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/ipython/py2/IPython/core/interactiveshell.py
|
python
|
InteractiveShell.drop_by_id
|
(self, variables)
|
Remove a dict of variables from the user namespace, if they are the
same as the values in the dictionary.
This is intended for use by extensions: variables that they've added can
be taken back out if they are unloaded, without removing any that the
user has overwritten.
Parameters
----------
variables : dict
A dictionary mapping object names (as strings) to the objects.
|
Remove a dict of variables from the user namespace, if they are the
same as the values in the dictionary.
This is intended for use by extensions: variables that they've added can
be taken back out if they are unloaded, without removing any that the
user has overwritten.
Parameters
----------
variables : dict
A dictionary mapping object names (as strings) to the objects.
|
[
"Remove",
"a",
"dict",
"of",
"variables",
"from",
"the",
"user",
"namespace",
"if",
"they",
"are",
"the",
"same",
"as",
"the",
"values",
"in",
"the",
"dictionary",
".",
"This",
"is",
"intended",
"for",
"use",
"by",
"extensions",
":",
"variables",
"that",
"they",
"ve",
"added",
"can",
"be",
"taken",
"back",
"out",
"if",
"they",
"are",
"unloaded",
"without",
"removing",
"any",
"that",
"the",
"user",
"has",
"overwritten",
".",
"Parameters",
"----------",
"variables",
":",
"dict",
"A",
"dictionary",
"mapping",
"object",
"names",
"(",
"as",
"strings",
")",
"to",
"the",
"objects",
"."
] |
def drop_by_id(self, variables):
"""Remove a dict of variables from the user namespace, if they are the
same as the values in the dictionary.
This is intended for use by extensions: variables that they've added can
be taken back out if they are unloaded, without removing any that the
user has overwritten.
Parameters
----------
variables : dict
A dictionary mapping object names (as strings) to the objects.
"""
for name, obj in iteritems(variables):
if name in self.user_ns and self.user_ns[name] is obj:
del self.user_ns[name]
self.user_ns_hidden.pop(name, None)
|
[
"def",
"drop_by_id",
"(",
"self",
",",
"variables",
")",
":",
"for",
"name",
",",
"obj",
"in",
"iteritems",
"(",
"variables",
")",
":",
"if",
"name",
"in",
"self",
".",
"user_ns",
"and",
"self",
".",
"user_ns",
"[",
"name",
"]",
"is",
"obj",
":",
"del",
"self",
".",
"user_ns",
"[",
"name",
"]",
"self",
".",
"user_ns_hidden",
".",
"pop",
"(",
"name",
",",
"None",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/ipython/py2/IPython/core/interactiveshell.py#L1358-L1374
|
||
swift/swift
|
12d031cf8177fdec0137f9aa7e2912fa23c4416b
|
3rdParty/SCons/scons-3.0.1/engine/SCons/Node/FS.py
|
python
|
EntryProxy.__get_posix_path
|
(self)
|
Return the path with / as the path separator,
regardless of platform.
|
Return the path with / as the path separator,
regardless of platform.
|
[
"Return",
"the",
"path",
"with",
"/",
"as",
"the",
"path",
"separator",
"regardless",
"of",
"platform",
"."
] |
def __get_posix_path(self):
"""Return the path with / as the path separator,
regardless of platform."""
if os_sep_is_slash:
return self
else:
entry = self.get()
r = entry.get_path().replace(OS_SEP, '/')
return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_posix")
|
[
"def",
"__get_posix_path",
"(",
"self",
")",
":",
"if",
"os_sep_is_slash",
":",
"return",
"self",
"else",
":",
"entry",
"=",
"self",
".",
"get",
"(",
")",
"r",
"=",
"entry",
".",
"get_path",
"(",
")",
".",
"replace",
"(",
"OS_SEP",
",",
"'/'",
")",
"return",
"SCons",
".",
"Subst",
".",
"SpecialAttrWrapper",
"(",
"r",
",",
"entry",
".",
"name",
"+",
"\"_posix\"",
")"
] |
https://github.com/swift/swift/blob/12d031cf8177fdec0137f9aa7e2912fa23c4416b/3rdParty/SCons/scons-3.0.1/engine/SCons/Node/FS.py#L455-L463
|
||
sdhash/sdhash
|
b9eff63e4e5867e910f41fd69032bbb1c94a2a5e
|
sdhash-ui/cherrypy/lib/static.py
|
python
|
staticdir
|
(section, dir, root="", match="", content_types=None, index="",
debug=False)
|
return handled
|
Serve a static resource from the given (root +) dir.
match
If given, request.path_info will be searched for the given
regular expression before attempting to serve static content.
content_types
If given, it should be a Python dictionary of
{file-extension: content-type} pairs, where 'file-extension' is
a string (e.g. "gif") and 'content-type' is the value to write
out in the Content-Type response header (e.g. "image/gif").
index
If provided, it should be the (relative) name of a file to
serve for directory requests. For example, if the dir argument is
'/home/me', the Request-URI is 'myapp', and the index arg is
'index.html', the file '/home/me/myapp/index.html' will be sought.
|
Serve a static resource from the given (root +) dir.
match
If given, request.path_info will be searched for the given
regular expression before attempting to serve static content.
content_types
If given, it should be a Python dictionary of
{file-extension: content-type} pairs, where 'file-extension' is
a string (e.g. "gif") and 'content-type' is the value to write
out in the Content-Type response header (e.g. "image/gif").
index
If provided, it should be the (relative) name of a file to
serve for directory requests. For example, if the dir argument is
'/home/me', the Request-URI is 'myapp', and the index arg is
'index.html', the file '/home/me/myapp/index.html' will be sought.
|
[
"Serve",
"a",
"static",
"resource",
"from",
"the",
"given",
"(",
"root",
"+",
")",
"dir",
".",
"match",
"If",
"given",
"request",
".",
"path_info",
"will",
"be",
"searched",
"for",
"the",
"given",
"regular",
"expression",
"before",
"attempting",
"to",
"serve",
"static",
"content",
".",
"content_types",
"If",
"given",
"it",
"should",
"be",
"a",
"Python",
"dictionary",
"of",
"{",
"file",
"-",
"extension",
":",
"content",
"-",
"type",
"}",
"pairs",
"where",
"file",
"-",
"extension",
"is",
"a",
"string",
"(",
"e",
".",
"g",
".",
"gif",
")",
"and",
"content",
"-",
"type",
"is",
"the",
"value",
"to",
"write",
"out",
"in",
"the",
"Content",
"-",
"Type",
"response",
"header",
"(",
"e",
".",
"g",
".",
"image",
"/",
"gif",
")",
".",
"index",
"If",
"provided",
"it",
"should",
"be",
"the",
"(",
"relative",
")",
"name",
"of",
"a",
"file",
"to",
"serve",
"for",
"directory",
"requests",
".",
"For",
"example",
"if",
"the",
"dir",
"argument",
"is",
"/",
"home",
"/",
"me",
"the",
"Request",
"-",
"URI",
"is",
"myapp",
"and",
"the",
"index",
"arg",
"is",
"index",
".",
"html",
"the",
"file",
"/",
"home",
"/",
"me",
"/",
"myapp",
"/",
"index",
".",
"html",
"will",
"be",
"sought",
"."
] |
def staticdir(section, dir, root="", match="", content_types=None, index="",
debug=False):
"""Serve a static resource from the given (root +) dir.
match
If given, request.path_info will be searched for the given
regular expression before attempting to serve static content.
content_types
If given, it should be a Python dictionary of
{file-extension: content-type} pairs, where 'file-extension' is
a string (e.g. "gif") and 'content-type' is the value to write
out in the Content-Type response header (e.g. "image/gif").
index
If provided, it should be the (relative) name of a file to
serve for directory requests. For example, if the dir argument is
'/home/me', the Request-URI is 'myapp', and the index arg is
'index.html', the file '/home/me/myapp/index.html' will be sought.
"""
request = cherrypy.serving.request
if request.method not in ('GET', 'HEAD'):
if debug:
cherrypy.log('request.method not GET or HEAD', 'TOOLS.STATICDIR')
return False
if match and not re.search(match, request.path_info):
if debug:
cherrypy.log('request.path_info %r does not match pattern %r' %
(request.path_info, match), 'TOOLS.STATICDIR')
return False
# Allow the use of '~' to refer to a user's home directory.
dir = os.path.expanduser(dir)
# If dir is relative, make absolute using "root".
if not os.path.isabs(dir):
if not root:
msg = "Static dir requires an absolute dir (or root)."
if debug:
cherrypy.log(msg, 'TOOLS.STATICDIR')
raise ValueError(msg)
dir = os.path.join(root, dir)
# Determine where we are in the object tree relative to 'section'
# (where the static tool was defined).
if section == 'global':
section = "/"
section = section.rstrip(r"\/")
branch = request.path_info[len(section) + 1:]
branch = unquote(branch.lstrip(r"\/"))
# If branch is "", filename will end in a slash
filename = os.path.join(dir, branch)
if debug:
cherrypy.log('Checking file %r to fulfill %r' %
(filename, request.path_info), 'TOOLS.STATICDIR')
# There's a chance that the branch pulled from the URL might
# have ".." or similar uplevel attacks in it. Check that the final
# filename is a child of dir.
if not os.path.normpath(filename).startswith(os.path.normpath(dir)):
raise cherrypy.HTTPError(403) # Forbidden
handled = _attempt(filename, content_types)
if not handled:
# Check for an index file if a folder was requested.
if index:
handled = _attempt(os.path.join(filename, index), content_types)
if handled:
request.is_index = filename[-1] in (r"\/")
return handled
|
[
"def",
"staticdir",
"(",
"section",
",",
"dir",
",",
"root",
"=",
"\"\"",
",",
"match",
"=",
"\"\"",
",",
"content_types",
"=",
"None",
",",
"index",
"=",
"\"\"",
",",
"debug",
"=",
"False",
")",
":",
"request",
"=",
"cherrypy",
".",
"serving",
".",
"request",
"if",
"request",
".",
"method",
"not",
"in",
"(",
"'GET'",
",",
"'HEAD'",
")",
":",
"if",
"debug",
":",
"cherrypy",
".",
"log",
"(",
"'request.method not GET or HEAD'",
",",
"'TOOLS.STATICDIR'",
")",
"return",
"False",
"if",
"match",
"and",
"not",
"re",
".",
"search",
"(",
"match",
",",
"request",
".",
"path_info",
")",
":",
"if",
"debug",
":",
"cherrypy",
".",
"log",
"(",
"'request.path_info %r does not match pattern %r'",
"%",
"(",
"request",
".",
"path_info",
",",
"match",
")",
",",
"'TOOLS.STATICDIR'",
")",
"return",
"False",
"# Allow the use of '~' to refer to a user's home directory.",
"dir",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"dir",
")",
"# If dir is relative, make absolute using \"root\".",
"if",
"not",
"os",
".",
"path",
".",
"isabs",
"(",
"dir",
")",
":",
"if",
"not",
"root",
":",
"msg",
"=",
"\"Static dir requires an absolute dir (or root).\"",
"if",
"debug",
":",
"cherrypy",
".",
"log",
"(",
"msg",
",",
"'TOOLS.STATICDIR'",
")",
"raise",
"ValueError",
"(",
"msg",
")",
"dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"root",
",",
"dir",
")",
"# Determine where we are in the object tree relative to 'section'",
"# (where the static tool was defined).",
"if",
"section",
"==",
"'global'",
":",
"section",
"=",
"\"/\"",
"section",
"=",
"section",
".",
"rstrip",
"(",
"r\"\\/\"",
")",
"branch",
"=",
"request",
".",
"path_info",
"[",
"len",
"(",
"section",
")",
"+",
"1",
":",
"]",
"branch",
"=",
"unquote",
"(",
"branch",
".",
"lstrip",
"(",
"r\"\\/\"",
")",
")",
"# If branch is \"\", filename will end in a slash",
"filename",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dir",
",",
"branch",
")",
"if",
"debug",
":",
"cherrypy",
".",
"log",
"(",
"'Checking file %r to fulfill %r'",
"%",
"(",
"filename",
",",
"request",
".",
"path_info",
")",
",",
"'TOOLS.STATICDIR'",
")",
"# There's a chance that the branch pulled from the URL might",
"# have \"..\" or similar uplevel attacks in it. Check that the final",
"# filename is a child of dir.",
"if",
"not",
"os",
".",
"path",
".",
"normpath",
"(",
"filename",
")",
".",
"startswith",
"(",
"os",
".",
"path",
".",
"normpath",
"(",
"dir",
")",
")",
":",
"raise",
"cherrypy",
".",
"HTTPError",
"(",
"403",
")",
"# Forbidden",
"handled",
"=",
"_attempt",
"(",
"filename",
",",
"content_types",
")",
"if",
"not",
"handled",
":",
"# Check for an index file if a folder was requested.",
"if",
"index",
":",
"handled",
"=",
"_attempt",
"(",
"os",
".",
"path",
".",
"join",
"(",
"filename",
",",
"index",
")",
",",
"content_types",
")",
"if",
"handled",
":",
"request",
".",
"is_index",
"=",
"filename",
"[",
"-",
"1",
"]",
"in",
"(",
"r\"\\/\"",
")",
"return",
"handled"
] |
https://github.com/sdhash/sdhash/blob/b9eff63e4e5867e910f41fd69032bbb1c94a2a5e/sdhash-ui/cherrypy/lib/static.py#L255-L326
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/_misc.py
|
python
|
DataObjectSimple.GetFormat
|
(*args, **kwargs)
|
return _misc_.DataObjectSimple_GetFormat(*args, **kwargs)
|
GetFormat(self) -> DataFormat
Returns the (one and only one) format supported by this object. It is
assumed that the format is supported in both directions.
|
GetFormat(self) -> DataFormat
|
[
"GetFormat",
"(",
"self",
")",
"-",
">",
"DataFormat"
] |
def GetFormat(*args, **kwargs):
"""
GetFormat(self) -> DataFormat
Returns the (one and only one) format supported by this object. It is
assumed that the format is supported in both directions.
"""
return _misc_.DataObjectSimple_GetFormat(*args, **kwargs)
|
[
"def",
"GetFormat",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_misc_",
".",
"DataObjectSimple_GetFormat",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_misc.py#L5019-L5026
|
|
echronos/echronos
|
c996f1d2c8af6c6536205eb319c1bf1d4d84569c
|
prj/app/prj.py
|
python
|
Module.prepare
|
(self, system, config, **_)
|
Prepare the `system` for building based on a specific module `config`.
This method should be implemented in a sub-class. It should update the system object
making it ready to be passed to a Builder module. Additionally it may update the
filesystem to generate files.
|
Prepare the `system` for building based on a specific module `config`.
|
[
"Prepare",
"the",
"system",
"for",
"building",
"based",
"on",
"a",
"specific",
"module",
"config",
"."
] |
def prepare(self, system, config, **_):
"""Prepare the `system` for building based on a specific module `config`.
This method should be implemented in a sub-class. It should update the system object
making it ready to be passed to a Builder module. Additionally it may update the
filesystem to generate files.
"""
self._prepare_files(system, config, stage="prepare")
|
[
"def",
"prepare",
"(",
"self",
",",
"system",
",",
"config",
",",
"*",
"*",
"_",
")",
":",
"self",
".",
"_prepare_files",
"(",
"system",
",",
"config",
",",
"stage",
"=",
"\"prepare\"",
")"
] |
https://github.com/echronos/echronos/blob/c996f1d2c8af6c6536205eb319c1bf1d4d84569c/prj/app/prj.py#L390-L398
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_cocoa/_core.py
|
python
|
MenuItem.IsEnabled
|
(*args, **kwargs)
|
return _core_.MenuItem_IsEnabled(*args, **kwargs)
|
IsEnabled(self) -> bool
|
IsEnabled(self) -> bool
|
[
"IsEnabled",
"(",
"self",
")",
"-",
">",
"bool"
] |
def IsEnabled(*args, **kwargs):
"""IsEnabled(self) -> bool"""
return _core_.MenuItem_IsEnabled(*args, **kwargs)
|
[
"def",
"IsEnabled",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"MenuItem_IsEnabled",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_core.py#L12517-L12519
|
|
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
third_party/catapult/third_party/gsutil/third_party/apitools/samples/storage_sample/storage/storage_v1.py
|
python
|
PyShell.Run
|
(self, _)
|
Run an interactive python shell with the client.
|
Run an interactive python shell with the client.
|
[
"Run",
"an",
"interactive",
"python",
"shell",
"with",
"the",
"client",
"."
] |
def Run(self, _):
"""Run an interactive python shell with the client."""
client = GetClientFromFlags()
params = GetGlobalParamsFromFlags()
for field in params.all_fields():
value = params.get_assigned_value(field.name)
if value != field.default:
client.AddGlobalParam(field.name, value)
banner = """
== storage interactive console ==
client: a storage client
apitools_base: base apitools module
messages: the generated messages module
"""
local_vars = {
'apitools_base': apitools_base,
'client': client,
'client_lib': client_lib,
'messages': messages,
}
if platform.system() == 'Linux':
console = apitools_base_cli.ConsoleWithReadline(
local_vars, histfile=FLAGS.history_file)
else:
console = code.InteractiveConsole(local_vars)
try:
console.interact(banner)
except SystemExit as e:
return e.code
|
[
"def",
"Run",
"(",
"self",
",",
"_",
")",
":",
"client",
"=",
"GetClientFromFlags",
"(",
")",
"params",
"=",
"GetGlobalParamsFromFlags",
"(",
")",
"for",
"field",
"in",
"params",
".",
"all_fields",
"(",
")",
":",
"value",
"=",
"params",
".",
"get_assigned_value",
"(",
"field",
".",
"name",
")",
"if",
"value",
"!=",
"field",
".",
"default",
":",
"client",
".",
"AddGlobalParam",
"(",
"field",
".",
"name",
",",
"value",
")",
"banner",
"=",
"\"\"\"\n == storage interactive console ==\n client: a storage client\n apitools_base: base apitools module\n messages: the generated messages module\n \"\"\"",
"local_vars",
"=",
"{",
"'apitools_base'",
":",
"apitools_base",
",",
"'client'",
":",
"client",
",",
"'client_lib'",
":",
"client_lib",
",",
"'messages'",
":",
"messages",
",",
"}",
"if",
"platform",
".",
"system",
"(",
")",
"==",
"'Linux'",
":",
"console",
"=",
"apitools_base_cli",
".",
"ConsoleWithReadline",
"(",
"local_vars",
",",
"histfile",
"=",
"FLAGS",
".",
"history_file",
")",
"else",
":",
"console",
"=",
"code",
".",
"InteractiveConsole",
"(",
"local_vars",
")",
"try",
":",
"console",
".",
"interact",
"(",
"banner",
")",
"except",
"SystemExit",
"as",
"e",
":",
"return",
"e",
".",
"code"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/apitools/samples/storage_sample/storage/storage_v1.py#L135-L163
|
||
protocolbuffers/protobuf
|
b5ab0b7a18b7336c60130f4ddb2d97c51792f896
|
python/google/protobuf/text_format.py
|
python
|
_ConsumeInt32
|
(tokenizer)
|
return _ConsumeInteger(tokenizer, is_signed=True, is_long=False)
|
Consumes a signed 32bit integer number from tokenizer.
Args:
tokenizer: A tokenizer used to parse the number.
Returns:
The integer parsed.
Raises:
ParseError: If a signed 32bit integer couldn't be consumed.
|
Consumes a signed 32bit integer number from tokenizer.
|
[
"Consumes",
"a",
"signed",
"32bit",
"integer",
"number",
"from",
"tokenizer",
"."
] |
def _ConsumeInt32(tokenizer):
"""Consumes a signed 32bit integer number from tokenizer.
Args:
tokenizer: A tokenizer used to parse the number.
Returns:
The integer parsed.
Raises:
ParseError: If a signed 32bit integer couldn't be consumed.
"""
return _ConsumeInteger(tokenizer, is_signed=True, is_long=False)
|
[
"def",
"_ConsumeInt32",
"(",
"tokenizer",
")",
":",
"return",
"_ConsumeInteger",
"(",
"tokenizer",
",",
"is_signed",
"=",
"True",
",",
"is_long",
"=",
"False",
")"
] |
https://github.com/protocolbuffers/protobuf/blob/b5ab0b7a18b7336c60130f4ddb2d97c51792f896/python/google/protobuf/text_format.py#L1561-L1573
|
|
netket/netket
|
0d534e54ecbf25b677ea72af6b85947979420652
|
netket/vqs/mc/kernels.py
|
python
|
local_value_op_op_cost
|
(logpsi: Callable, pars: PyTree, σ: Array, args: PyTree):
|
return jnp.sum(mel * jnp.exp(logpsi(pars, σ_σp) - logpsi(pars, σ_σ)))
|
local_value kernel for MCMixedState and generic operators
|
local_value kernel for MCMixedState and generic operators
|
[
"local_value",
"kernel",
"for",
"MCMixedState",
"and",
"generic",
"operators"
] |
def local_value_op_op_cost(logpsi: Callable, pars: PyTree, σ: Array, args: PyTree):
"""
local_value kernel for MCMixedState and generic operators
"""
σp, mel = args
σ_σp = jax.vmap(lambda σp, σ: jnp.hstack((σp, σ)), in_axes=(0, None))(σp, σ)
σ_σ = jnp.hstack((σ, σ))
return jnp.sum(mel * jnp.exp(logpsi(pars, σ_σp) - logpsi(pars, σ_σ)))
|
[
"def",
"local_value_op_op_cost",
"(",
"logpsi",
":",
"Callable",
",",
"pars",
":",
"PyTree",
",",
"σ:",
" ",
"rray,",
" ",
"rgs:",
" ",
"yTree)",
":",
"",
"σp,",
" ",
"el ",
" ",
"rgs",
"σ_σp =",
"j",
"x.v",
"m",
"ap(l",
"a",
"mbda σ",
", σ",
":",
"jn",
"p",
"hst",
"a",
"ck((σp",
",",
" ",
"σ))",
",",
"in",
"_",
"a",
"x",
"s=(0, N",
"o",
"n",
"e",
")",
"(σp,",
" ",
"σ",
")",
"",
"",
"",
"",
"σ_σ =",
"j",
"p.h",
"s",
"tack((",
"σ",
",",
" σ",
")",
"",
"",
"",
"return",
"jnp",
".",
"sum",
"(",
"mel",
"*",
"jnp",
".",
"exp",
"(",
"logpsi",
"(",
"pars",
",",
"σ_σp) ",
"-",
"l",
"gpsi(p",
"a",
"rs, ",
"σ",
"σ)))",
"",
"",
""
] |
https://github.com/netket/netket/blob/0d534e54ecbf25b677ea72af6b85947979420652/netket/vqs/mc/kernels.py#L70-L78
|
|
giuspen/cherrytree
|
84712f206478fcf9acf30174009ad28c648c6344
|
pygtk2/modules/core.py
|
python
|
CherryTree.image_cut
|
(self, *args)
|
Cut Image
|
Cut Image
|
[
"Cut",
"Image"
] |
def image_cut(self, *args):
"""Cut Image"""
self.object_set_selection(self.curr_image_anchor)
self.sourceview.emit("cut-clipboard")
|
[
"def",
"image_cut",
"(",
"self",
",",
"*",
"args",
")",
":",
"self",
".",
"object_set_selection",
"(",
"self",
".",
"curr_image_anchor",
")",
"self",
".",
"sourceview",
".",
"emit",
"(",
"\"cut-clipboard\"",
")"
] |
https://github.com/giuspen/cherrytree/blob/84712f206478fcf9acf30174009ad28c648c6344/pygtk2/modules/core.py#L4357-L4360
|
||
citizenfx/fivem
|
88276d40cc7baf8285d02754cc5ae42ec7a8563f
|
vendor/chromium/base/win/embedded_i18n/create_string_rc.py
|
python
|
StringRcMaker.__ReadSourceAndTranslatedStrings
|
(self)
|
return translated_strings
|
Reads the source strings and translations from all inputs.
|
Reads the source strings and translations from all inputs.
|
[
"Reads",
"the",
"source",
"strings",
"and",
"translations",
"from",
"all",
"inputs",
"."
] |
def __ReadSourceAndTranslatedStrings(self):
"""Reads the source strings and translations from all inputs."""
translated_strings = []
all_xtb_files = []
for grd_file, xtb_dir in self.inputs:
# Get the name of the grd file sans extension.
source_name = os.path.splitext(os.path.basename(grd_file))[0]
# Compute a glob for the translation files.
xtb_pattern = os.path.join(os.path.dirname(grd_file), xtb_dir,
'%s*.xtb' % source_name)
local_xtb_files = [x.replace('\\', '/') for x in glob.glob(xtb_pattern)]
all_xtb_files.extend(local_xtb_files)
translated_strings.extend(
self.__ReadSourceAndTranslationsFrom(grd_file, local_xtb_files))
translated_strings.sort()
all_xtb_files.sort()
if self.expected_xtb_input_files != all_xtb_files:
extra = list(set(all_xtb_files) - set(self.expected_xtb_input_files))
missing = list(set(self.expected_xtb_input_files) - set(all_xtb_files))
error = '''Asserted file list does not match.
Expected input files:
{}
Actual input files:
{}
Missing input files:
{}
Extra input files:
{}
'''
print(error.format('\n'.join(self.expected_xtb_input_files),
'\n'.join(all_xtb_files), '\n'.join(missing),
'\n'.join(extra)))
sys.exit(1)
return translated_strings
|
[
"def",
"__ReadSourceAndTranslatedStrings",
"(",
"self",
")",
":",
"translated_strings",
"=",
"[",
"]",
"all_xtb_files",
"=",
"[",
"]",
"for",
"grd_file",
",",
"xtb_dir",
"in",
"self",
".",
"inputs",
":",
"# Get the name of the grd file sans extension.",
"source_name",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"grd_file",
")",
")",
"[",
"0",
"]",
"# Compute a glob for the translation files.",
"xtb_pattern",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"grd_file",
")",
",",
"xtb_dir",
",",
"'%s*.xtb'",
"%",
"source_name",
")",
"local_xtb_files",
"=",
"[",
"x",
".",
"replace",
"(",
"'\\\\'",
",",
"'/'",
")",
"for",
"x",
"in",
"glob",
".",
"glob",
"(",
"xtb_pattern",
")",
"]",
"all_xtb_files",
".",
"extend",
"(",
"local_xtb_files",
")",
"translated_strings",
".",
"extend",
"(",
"self",
".",
"__ReadSourceAndTranslationsFrom",
"(",
"grd_file",
",",
"local_xtb_files",
")",
")",
"translated_strings",
".",
"sort",
"(",
")",
"all_xtb_files",
".",
"sort",
"(",
")",
"if",
"self",
".",
"expected_xtb_input_files",
"!=",
"all_xtb_files",
":",
"extra",
"=",
"list",
"(",
"set",
"(",
"all_xtb_files",
")",
"-",
"set",
"(",
"self",
".",
"expected_xtb_input_files",
")",
")",
"missing",
"=",
"list",
"(",
"set",
"(",
"self",
".",
"expected_xtb_input_files",
")",
"-",
"set",
"(",
"all_xtb_files",
")",
")",
"error",
"=",
"'''Asserted file list does not match.\n\nExpected input files:\n{}\nActual input files:\n{}\nMissing input files:\n{}\nExtra input files:\n{}\n'''",
"print",
"(",
"error",
".",
"format",
"(",
"'\\n'",
".",
"join",
"(",
"self",
".",
"expected_xtb_input_files",
")",
",",
"'\\n'",
".",
"join",
"(",
"all_xtb_files",
")",
",",
"'\\n'",
".",
"join",
"(",
"missing",
")",
",",
"'\\n'",
".",
"join",
"(",
"extra",
")",
")",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"return",
"translated_strings"
] |
https://github.com/citizenfx/fivem/blob/88276d40cc7baf8285d02754cc5ae42ec7a8563f/vendor/chromium/base/win/embedded_i18n/create_string_rc.py#L291-L326
|
|
mindspore-ai/mindspore
|
fb8fd3338605bb34fa5cea054e535a8b1d753fab
|
mindspore/python/mindspore/profiler/parser/integrator.py
|
python
|
GpuTimelineGenerator.init_timeline
|
(self)
|
Init timeline metadata, adding all collected info.
|
Init timeline metadata, adding all collected info.
|
[
"Init",
"timeline",
"metadata",
"adding",
"all",
"collected",
"info",
"."
] |
def init_timeline(self):
"""Init timeline metadata, adding all collected info."""
timeline_list = self._load_timeline_data()
# Init a dict for counting the num of streams.
stream_count_dict = {}
for timeline in timeline_list:
self._parse_timeline_data(timeline, 0)
# Updating the collection of streams.
if len(timeline) == 4:
self._update_num_of_streams(timeline, stream_count_dict)
# Add format thread meta data.
self._format_meta_data_list.extend(self._timeline_meta)
self._timeline_meta = self._format_meta_data_list
# Update timeline summary info
self._timeline_summary['num_of_streams'] += len(stream_count_dict.keys())
|
[
"def",
"init_timeline",
"(",
"self",
")",
":",
"timeline_list",
"=",
"self",
".",
"_load_timeline_data",
"(",
")",
"# Init a dict for counting the num of streams.",
"stream_count_dict",
"=",
"{",
"}",
"for",
"timeline",
"in",
"timeline_list",
":",
"self",
".",
"_parse_timeline_data",
"(",
"timeline",
",",
"0",
")",
"# Updating the collection of streams.",
"if",
"len",
"(",
"timeline",
")",
"==",
"4",
":",
"self",
".",
"_update_num_of_streams",
"(",
"timeline",
",",
"stream_count_dict",
")",
"# Add format thread meta data.",
"self",
".",
"_format_meta_data_list",
".",
"extend",
"(",
"self",
".",
"_timeline_meta",
")",
"self",
".",
"_timeline_meta",
"=",
"self",
".",
"_format_meta_data_list",
"# Update timeline summary info",
"self",
".",
"_timeline_summary",
"[",
"'num_of_streams'",
"]",
"+=",
"len",
"(",
"stream_count_dict",
".",
"keys",
"(",
")",
")"
] |
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/profiler/parser/integrator.py#L964-L981
|
||
ukoethe/vigra
|
093d57d15c8c237adf1704d96daa6393158ce299
|
vigranumpy/lib/arraytypes.py
|
python
|
VigraArray.permutationFromVigraOrder
|
(self)
|
return list(self.axistags.permutationFromVigraOrder())
|
Create the permutation that would transpose an array that is
in VIGRA order (ascending spatial order, but with the channel
axis at the last position) into the axis order of this array.
(e.g. 'x y c' into 'c x y').
|
Create the permutation that would transpose an array that is
in VIGRA order (ascending spatial order, but with the channel
axis at the last position) into the axis order of this array.
(e.g. 'x y c' into 'c x y').
|
[
"Create",
"the",
"permutation",
"that",
"would",
"transpose",
"an",
"array",
"that",
"is",
"in",
"VIGRA",
"order",
"(",
"ascending",
"spatial",
"order",
"but",
"with",
"the",
"channel",
"axis",
"at",
"the",
"last",
"position",
")",
"into",
"the",
"axis",
"order",
"of",
"this",
"array",
".",
"(",
"e",
".",
"g",
".",
"x",
"y",
"c",
"into",
"c",
"x",
"y",
")",
"."
] |
def permutationFromVigraOrder(self):
'''Create the permutation that would transpose an array that is
in VIGRA order (ascending spatial order, but with the channel
axis at the last position) into the axis order of this array.
(e.g. 'x y c' into 'c x y').
'''
return list(self.axistags.permutationFromVigraOrder())
|
[
"def",
"permutationFromVigraOrder",
"(",
"self",
")",
":",
"return",
"list",
"(",
"self",
".",
"axistags",
".",
"permutationFromVigraOrder",
"(",
")",
")"
] |
https://github.com/ukoethe/vigra/blob/093d57d15c8c237adf1704d96daa6393158ce299/vigranumpy/lib/arraytypes.py#L1192-L1198
|
|
apple/turicreate
|
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
|
deps/src/libxml2-2.9.1/python/libxml2class.py
|
python
|
xmlTextReader.MoveToElement
|
(self)
|
return ret
|
Moves the position of the current instance to the node that
contains the current Attribute node.
|
Moves the position of the current instance to the node that
contains the current Attribute node.
|
[
"Moves",
"the",
"position",
"of",
"the",
"current",
"instance",
"to",
"the",
"node",
"that",
"contains",
"the",
"current",
"Attribute",
"node",
"."
] |
def MoveToElement(self):
"""Moves the position of the current instance to the node that
contains the current Attribute node. """
ret = libxml2mod.xmlTextReaderMoveToElement(self._o)
return ret
|
[
"def",
"MoveToElement",
"(",
"self",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlTextReaderMoveToElement",
"(",
"self",
".",
"_o",
")",
"return",
"ret"
] |
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2class.py#L5916-L5920
|
|
gemrb/gemrb
|
730206eed8d1dd358ca5e69a62f9e099aa22ffc6
|
gemrb/GUIScripts/DualClass.py
|
python
|
DCClassDonePress
|
()
|
return
|
Stores the selected class and moves to the next step.
|
Stores the selected class and moves to the next step.
|
[
"Stores",
"the",
"selected",
"class",
"and",
"moves",
"to",
"the",
"next",
"step",
"."
] |
def DCClassDonePress ():
"""Stores the selected class and moves to the next step."""
global DCMainStep, ClassName, NewClassId
# unload our class selection window
if DCClassWindow:
DCClassWindow.Unload ()
DCMainWindow.ShowModal (MODAL_SHADOW_GRAY)
# enable the skills button and disable the class selection button
DCMainClassButton.SetState (IE_GUI_BUTTON_DISABLED)
DCMainSkillsButton.SetState (IE_GUI_BUTTON_ENABLED)
# save the class
ClassName = DCClasses[DCClass]
NewClassId = CommonTables.Classes.GetValue (ClassName, "ID", GTV_INT)
# set our step to 2 so that the back button knows where we are
DCMainStep = 2
return
|
[
"def",
"DCClassDonePress",
"(",
")",
":",
"global",
"DCMainStep",
",",
"ClassName",
",",
"NewClassId",
"# unload our class selection window",
"if",
"DCClassWindow",
":",
"DCClassWindow",
".",
"Unload",
"(",
")",
"DCMainWindow",
".",
"ShowModal",
"(",
"MODAL_SHADOW_GRAY",
")",
"# enable the skills button and disable the class selection button",
"DCMainClassButton",
".",
"SetState",
"(",
"IE_GUI_BUTTON_DISABLED",
")",
"DCMainSkillsButton",
".",
"SetState",
"(",
"IE_GUI_BUTTON_ENABLED",
")",
"# save the class",
"ClassName",
"=",
"DCClasses",
"[",
"DCClass",
"]",
"NewClassId",
"=",
"CommonTables",
".",
"Classes",
".",
"GetValue",
"(",
"ClassName",
",",
"\"ID\"",
",",
"GTV_INT",
")",
"# set our step to 2 so that the back button knows where we are",
"DCMainStep",
"=",
"2",
"return"
] |
https://github.com/gemrb/gemrb/blob/730206eed8d1dd358ca5e69a62f9e099aa22ffc6/gemrb/GUIScripts/DualClass.py#L395-L416
|
|
ChromiumWebApps/chromium
|
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
|
ui/gl/generate_bindings.py
|
python
|
ParseExtensionFunctionsFromHeader
|
(header_file)
|
return extensions
|
Parse a C extension header file and return a map from extension names to
a list of functions.
Args:
header_file: Line-iterable C header file.
Returns:
Map of extension name => functions.
|
Parse a C extension header file and return a map from extension names to
a list of functions.
|
[
"Parse",
"a",
"C",
"extension",
"header",
"file",
"and",
"return",
"a",
"map",
"from",
"extension",
"names",
"to",
"a",
"list",
"of",
"functions",
"."
] |
def ParseExtensionFunctionsFromHeader(header_file):
"""Parse a C extension header file and return a map from extension names to
a list of functions.
Args:
header_file: Line-iterable C header file.
Returns:
Map of extension name => functions.
"""
extension_start = re.compile(
r'#ifndef ((?:GL|EGL|WGL|GLX)_[A-Z]+_[a-zA-Z]\w+)')
extension_function = re.compile(r'.+\s+([a-z]+\w+)\s*\(')
typedef = re.compile(r'typedef .*')
macro_start = re.compile(r'^#(if|ifdef|ifndef).*')
macro_end = re.compile(r'^#endif.*')
macro_depth = 0
current_extension = None
current_extension_depth = 0
extensions = collections.defaultdict(lambda: [])
for line in header_file:
if macro_start.match(line):
macro_depth += 1
elif macro_end.match(line):
macro_depth -= 1
if macro_depth < current_extension_depth:
current_extension = None
match = extension_start.match(line)
if match:
current_extension = match.group(1)
current_extension_depth = macro_depth
assert current_extension not in extensions, \
"Duplicate extension: " + current_extension
match = extension_function.match(line)
if match and current_extension and not typedef.match(line):
extensions[current_extension].append(match.group(1))
return extensions
|
[
"def",
"ParseExtensionFunctionsFromHeader",
"(",
"header_file",
")",
":",
"extension_start",
"=",
"re",
".",
"compile",
"(",
"r'#ifndef ((?:GL|EGL|WGL|GLX)_[A-Z]+_[a-zA-Z]\\w+)'",
")",
"extension_function",
"=",
"re",
".",
"compile",
"(",
"r'.+\\s+([a-z]+\\w+)\\s*\\('",
")",
"typedef",
"=",
"re",
".",
"compile",
"(",
"r'typedef .*'",
")",
"macro_start",
"=",
"re",
".",
"compile",
"(",
"r'^#(if|ifdef|ifndef).*'",
")",
"macro_end",
"=",
"re",
".",
"compile",
"(",
"r'^#endif.*'",
")",
"macro_depth",
"=",
"0",
"current_extension",
"=",
"None",
"current_extension_depth",
"=",
"0",
"extensions",
"=",
"collections",
".",
"defaultdict",
"(",
"lambda",
":",
"[",
"]",
")",
"for",
"line",
"in",
"header_file",
":",
"if",
"macro_start",
".",
"match",
"(",
"line",
")",
":",
"macro_depth",
"+=",
"1",
"elif",
"macro_end",
".",
"match",
"(",
"line",
")",
":",
"macro_depth",
"-=",
"1",
"if",
"macro_depth",
"<",
"current_extension_depth",
":",
"current_extension",
"=",
"None",
"match",
"=",
"extension_start",
".",
"match",
"(",
"line",
")",
"if",
"match",
":",
"current_extension",
"=",
"match",
".",
"group",
"(",
"1",
")",
"current_extension_depth",
"=",
"macro_depth",
"assert",
"current_extension",
"not",
"in",
"extensions",
",",
"\"Duplicate extension: \"",
"+",
"current_extension",
"match",
"=",
"extension_function",
".",
"match",
"(",
"line",
")",
"if",
"match",
"and",
"current_extension",
"and",
"not",
"typedef",
".",
"match",
"(",
"line",
")",
":",
"extensions",
"[",
"current_extension",
"]",
".",
"append",
"(",
"match",
".",
"group",
"(",
"1",
")",
")",
"return",
"extensions"
] |
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/ui/gl/generate_bindings.py#L1796-L1831
|
|
ApolloAuto/apollo-platform
|
86d9dc6743b496ead18d597748ebabd34a513289
|
ros/ros_comm/rosservice/src/rosservice/__init__.py
|
python
|
get_service_headers
|
(service_name, service_uri)
|
Utility for connecting to a service and retrieving the TCPROS
headers. Services currently do not declare their type with the
master, so instead we probe the service for its headers.
@param service_name: name of service
@type service_name: str
@param service_uri: ROSRPC URI of service
@type service_uri: str
@return: map of header fields
@rtype: dict
@raise ROSServiceException: if service has invalid information
@raise ROSServiceIOException: if unable to communicate with service
|
Utility for connecting to a service and retrieving the TCPROS
headers. Services currently do not declare their type with the
master, so instead we probe the service for its headers.
|
[
"Utility",
"for",
"connecting",
"to",
"a",
"service",
"and",
"retrieving",
"the",
"TCPROS",
"headers",
".",
"Services",
"currently",
"do",
"not",
"declare",
"their",
"type",
"with",
"the",
"master",
"so",
"instead",
"we",
"probe",
"the",
"service",
"for",
"its",
"headers",
"."
] |
def get_service_headers(service_name, service_uri):
"""
Utility for connecting to a service and retrieving the TCPROS
headers. Services currently do not declare their type with the
master, so instead we probe the service for its headers.
@param service_name: name of service
@type service_name: str
@param service_uri: ROSRPC URI of service
@type service_uri: str
@return: map of header fields
@rtype: dict
@raise ROSServiceException: if service has invalid information
@raise ROSServiceIOException: if unable to communicate with service
"""
try:
dest_addr, dest_port = rospy.parse_rosrpc_uri(service_uri)
except:
raise ROSServiceException("service [%s] has an invalid RPC URI [%s]"%(service_name, service_uri))
if rosgraph.network.use_ipv6():
s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM)
else:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
try:
# connect to service and probe it to get the headers
s.settimeout(5.0)
s.connect((dest_addr, dest_port))
header = { 'probe':'1', 'md5sum':'*',
'callerid':'/rosservice', 'service':service_name}
rosgraph.network.write_ros_handshake_header(s, header)
return rosgraph.network.read_ros_handshake_header(s, StringIO(), 2048)
except socket.error:
raise ROSServiceIOException("Unable to communicate with service [%s], address [%s]"%(service_name, service_uri))
finally:
if s is not None:
s.close()
|
[
"def",
"get_service_headers",
"(",
"service_name",
",",
"service_uri",
")",
":",
"try",
":",
"dest_addr",
",",
"dest_port",
"=",
"rospy",
".",
"parse_rosrpc_uri",
"(",
"service_uri",
")",
"except",
":",
"raise",
"ROSServiceException",
"(",
"\"service [%s] has an invalid RPC URI [%s]\"",
"%",
"(",
"service_name",
",",
"service_uri",
")",
")",
"if",
"rosgraph",
".",
"network",
".",
"use_ipv6",
"(",
")",
":",
"s",
"=",
"socket",
".",
"socket",
"(",
"socket",
".",
"AF_INET6",
",",
"socket",
".",
"SOCK_STREAM",
")",
"else",
":",
"s",
"=",
"socket",
".",
"socket",
"(",
"socket",
".",
"AF_INET",
",",
"socket",
".",
"SOCK_STREAM",
")",
"try",
":",
"try",
":",
"# connect to service and probe it to get the headers",
"s",
".",
"settimeout",
"(",
"5.0",
")",
"s",
".",
"connect",
"(",
"(",
"dest_addr",
",",
"dest_port",
")",
")",
"header",
"=",
"{",
"'probe'",
":",
"'1'",
",",
"'md5sum'",
":",
"'*'",
",",
"'callerid'",
":",
"'/rosservice'",
",",
"'service'",
":",
"service_name",
"}",
"rosgraph",
".",
"network",
".",
"write_ros_handshake_header",
"(",
"s",
",",
"header",
")",
"return",
"rosgraph",
".",
"network",
".",
"read_ros_handshake_header",
"(",
"s",
",",
"StringIO",
"(",
")",
",",
"2048",
")",
"except",
"socket",
".",
"error",
":",
"raise",
"ROSServiceIOException",
"(",
"\"Unable to communicate with service [%s], address [%s]\"",
"%",
"(",
"service_name",
",",
"service_uri",
")",
")",
"finally",
":",
"if",
"s",
"is",
"not",
"None",
":",
"s",
".",
"close",
"(",
")"
] |
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/ros_comm/rosservice/src/rosservice/__init__.py#L94-L129
|
||
Manu343726/siplasplas
|
9fae7559f87087cf8ef34f04bd1e774b84b2ea9c
|
reference/cindex.py
|
python
|
Type.is_volatile_qualified
|
(self)
|
return conf.lib.clang_isVolatileQualifiedType(self)
|
Determine whether a Type has the "volatile" qualifier set.
This does not look through typedefs that may have added "volatile"
at a different level.
|
Determine whether a Type has the "volatile" qualifier set.
|
[
"Determine",
"whether",
"a",
"Type",
"has",
"the",
"volatile",
"qualifier",
"set",
"."
] |
def is_volatile_qualified(self):
"""Determine whether a Type has the "volatile" qualifier set.
This does not look through typedefs that may have added "volatile"
at a different level.
"""
return conf.lib.clang_isVolatileQualifiedType(self)
|
[
"def",
"is_volatile_qualified",
"(",
"self",
")",
":",
"return",
"conf",
".",
"lib",
".",
"clang_isVolatileQualifiedType",
"(",
"self",
")"
] |
https://github.com/Manu343726/siplasplas/blob/9fae7559f87087cf8ef34f04bd1e774b84b2ea9c/reference/cindex.py#L1869-L1875
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/io/pytables.py
|
python
|
HDFStore._validate_format
|
(self, format: str)
|
return format
|
validate / deprecate formats
|
validate / deprecate formats
|
[
"validate",
"/",
"deprecate",
"formats"
] |
def _validate_format(self, format: str) -> str:
""" validate / deprecate formats """
# validate
try:
format = _FORMAT_MAP[format.lower()]
except KeyError:
raise TypeError(f"invalid HDFStore format specified [{format}]")
return format
|
[
"def",
"_validate_format",
"(",
"self",
",",
"format",
":",
"str",
")",
"->",
"str",
":",
"# validate",
"try",
":",
"format",
"=",
"_FORMAT_MAP",
"[",
"format",
".",
"lower",
"(",
")",
"]",
"except",
"KeyError",
":",
"raise",
"TypeError",
"(",
"f\"invalid HDFStore format specified [{format}]\"",
")",
"return",
"format"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/io/pytables.py#L1524-L1533
|
|
pytorch/pytorch
|
7176c92687d3cc847cc046bf002269c6949a21c2
|
torch/distributions/continuous_bernoulli.py
|
python
|
ContinuousBernoulli._log_normalizer
|
(self, x)
|
return torch.where(out_unst_reg, log_norm, taylor)
|
computes the log normalizing constant as a function of the natural parameter
|
computes the log normalizing constant as a function of the natural parameter
|
[
"computes",
"the",
"log",
"normalizing",
"constant",
"as",
"a",
"function",
"of",
"the",
"natural",
"parameter"
] |
def _log_normalizer(self, x):
"""computes the log normalizing constant as a function of the natural parameter"""
out_unst_reg = torch.max(torch.le(x, self._lims[0] - 0.5),
torch.gt(x, self._lims[1] - 0.5))
cut_nat_params = torch.where(out_unst_reg,
x,
(self._lims[0] - 0.5) * torch.ones_like(x))
log_norm = torch.log(torch.abs(torch.exp(cut_nat_params) - 1.0)) - torch.log(torch.abs(cut_nat_params))
taylor = 0.5 * x + torch.pow(x, 2) / 24.0 - torch.pow(x, 4) / 2880.0
return torch.where(out_unst_reg, log_norm, taylor)
|
[
"def",
"_log_normalizer",
"(",
"self",
",",
"x",
")",
":",
"out_unst_reg",
"=",
"torch",
".",
"max",
"(",
"torch",
".",
"le",
"(",
"x",
",",
"self",
".",
"_lims",
"[",
"0",
"]",
"-",
"0.5",
")",
",",
"torch",
".",
"gt",
"(",
"x",
",",
"self",
".",
"_lims",
"[",
"1",
"]",
"-",
"0.5",
")",
")",
"cut_nat_params",
"=",
"torch",
".",
"where",
"(",
"out_unst_reg",
",",
"x",
",",
"(",
"self",
".",
"_lims",
"[",
"0",
"]",
"-",
"0.5",
")",
"*",
"torch",
".",
"ones_like",
"(",
"x",
")",
")",
"log_norm",
"=",
"torch",
".",
"log",
"(",
"torch",
".",
"abs",
"(",
"torch",
".",
"exp",
"(",
"cut_nat_params",
")",
"-",
"1.0",
")",
")",
"-",
"torch",
".",
"log",
"(",
"torch",
".",
"abs",
"(",
"cut_nat_params",
")",
")",
"taylor",
"=",
"0.5",
"*",
"x",
"+",
"torch",
".",
"pow",
"(",
"x",
",",
"2",
")",
"/",
"24.0",
"-",
"torch",
".",
"pow",
"(",
"x",
",",
"4",
")",
"/",
"2880.0",
"return",
"torch",
".",
"where",
"(",
"out_unst_reg",
",",
"log_norm",
",",
"taylor",
")"
] |
https://github.com/pytorch/pytorch/blob/7176c92687d3cc847cc046bf002269c6949a21c2/torch/distributions/continuous_bernoulli.py#L187-L196
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/tools/python3/src/Lib/functools.py
|
python
|
cmp_to_key
|
(mycmp)
|
return K
|
Convert a cmp= function into a key= function
|
Convert a cmp= function into a key= function
|
[
"Convert",
"a",
"cmp",
"=",
"function",
"into",
"a",
"key",
"=",
"function"
] |
def cmp_to_key(mycmp):
"""Convert a cmp= function into a key= function"""
class K(object):
__slots__ = ['obj']
def __init__(self, obj):
self.obj = obj
def __lt__(self, other):
return mycmp(self.obj, other.obj) < 0
def __gt__(self, other):
return mycmp(self.obj, other.obj) > 0
def __eq__(self, other):
return mycmp(self.obj, other.obj) == 0
def __le__(self, other):
return mycmp(self.obj, other.obj) <= 0
def __ge__(self, other):
return mycmp(self.obj, other.obj) >= 0
__hash__ = None
return K
|
[
"def",
"cmp_to_key",
"(",
"mycmp",
")",
":",
"class",
"K",
"(",
"object",
")",
":",
"__slots__",
"=",
"[",
"'obj'",
"]",
"def",
"__init__",
"(",
"self",
",",
"obj",
")",
":",
"self",
".",
"obj",
"=",
"obj",
"def",
"__lt__",
"(",
"self",
",",
"other",
")",
":",
"return",
"mycmp",
"(",
"self",
".",
"obj",
",",
"other",
".",
"obj",
")",
"<",
"0",
"def",
"__gt__",
"(",
"self",
",",
"other",
")",
":",
"return",
"mycmp",
"(",
"self",
".",
"obj",
",",
"other",
".",
"obj",
")",
">",
"0",
"def",
"__eq__",
"(",
"self",
",",
"other",
")",
":",
"return",
"mycmp",
"(",
"self",
".",
"obj",
",",
"other",
".",
"obj",
")",
"==",
"0",
"def",
"__le__",
"(",
"self",
",",
"other",
")",
":",
"return",
"mycmp",
"(",
"self",
".",
"obj",
",",
"other",
".",
"obj",
")",
"<=",
"0",
"def",
"__ge__",
"(",
"self",
",",
"other",
")",
":",
"return",
"mycmp",
"(",
"self",
".",
"obj",
",",
"other",
".",
"obj",
")",
">=",
"0",
"__hash__",
"=",
"None",
"return",
"K"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/functools.py#L206-L223
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/gtk/_windows.py
|
python
|
WindowModalDialogEvent.GetReturnCode
|
(*args, **kwargs)
|
return _windows_.WindowModalDialogEvent_GetReturnCode(*args, **kwargs)
|
GetReturnCode(self) -> int
|
GetReturnCode(self) -> int
|
[
"GetReturnCode",
"(",
"self",
")",
"-",
">",
"int"
] |
def GetReturnCode(*args, **kwargs):
"""GetReturnCode(self) -> int"""
return _windows_.WindowModalDialogEvent_GetReturnCode(*args, **kwargs)
|
[
"def",
"GetReturnCode",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_windows_",
".",
"WindowModalDialogEvent_GetReturnCode",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_windows.py#L1057-L1059
|
|
deepmodeling/deepmd-kit
|
159e45d248b0429844fb6a8cb3b3a201987c8d79
|
deepmd/infer/deep_wfc.py
|
python
|
DeepWFC.get_dim_aparam
|
(self)
|
Unsupported in this model.
|
Unsupported in this model.
|
[
"Unsupported",
"in",
"this",
"model",
"."
] |
def get_dim_aparam(self) -> int:
"""Unsupported in this model."""
raise NotImplementedError("This model type does not support this attribute")
|
[
"def",
"get_dim_aparam",
"(",
"self",
")",
"->",
"int",
":",
"raise",
"NotImplementedError",
"(",
"\"This model type does not support this attribute\"",
")"
] |
https://github.com/deepmodeling/deepmd-kit/blob/159e45d248b0429844fb6a8cb3b3a201987c8d79/deepmd/infer/deep_wfc.py#L52-L54
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/tools/python3/src/Lib/imghdr.py
|
python
|
test_pbm
|
(h, f)
|
PBM (portable bitmap)
|
PBM (portable bitmap)
|
[
"PBM",
"(",
"portable",
"bitmap",
")"
] |
def test_pbm(h, f):
"""PBM (portable bitmap)"""
if len(h) >= 3 and \
h[0] == ord(b'P') and h[1] in b'14' and h[2] in b' \t\n\r':
return 'pbm'
|
[
"def",
"test_pbm",
"(",
"h",
",",
"f",
")",
":",
"if",
"len",
"(",
"h",
")",
">=",
"3",
"and",
"h",
"[",
"0",
"]",
"==",
"ord",
"(",
"b'P'",
")",
"and",
"h",
"[",
"1",
"]",
"in",
"b'14'",
"and",
"h",
"[",
"2",
"]",
"in",
"b' \\t\\n\\r'",
":",
"return",
"'pbm'"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/imghdr.py#L71-L75
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/tools/python/src/Tools/pybench/pybench.py
|
python
|
Benchmark.get_timer
|
(self)
|
return get_timer(self.timer)
|
Return the timer function to use for the test.
|
Return the timer function to use for the test.
|
[
"Return",
"the",
"timer",
"function",
"to",
"use",
"for",
"the",
"test",
"."
] |
def get_timer(self):
""" Return the timer function to use for the test.
"""
return get_timer(self.timer)
|
[
"def",
"get_timer",
"(",
"self",
")",
":",
"return",
"get_timer",
"(",
"self",
".",
"timer",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Tools/pybench/pybench.py#L447-L452
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
wx/lib/agw/aui/auibook.py
|
python
|
TabNavigatorWindow.CloseDialog
|
(self)
|
Closes the :class:`TabNavigatorWindow` dialog, setting selection in :class:`AuiNotebook`.
|
Closes the :class:`TabNavigatorWindow` dialog, setting selection in :class:`AuiNotebook`.
|
[
"Closes",
"the",
":",
"class",
":",
"TabNavigatorWindow",
"dialog",
"setting",
"selection",
"in",
":",
"class",
":",
"AuiNotebook",
"."
] |
def CloseDialog(self):
""" Closes the :class:`TabNavigatorWindow` dialog, setting selection in :class:`AuiNotebook`. """
bk = self.GetParent()
self._selectedItem = self._listBox.GetSelection()
self.EndModal(wx.ID_OK)
|
[
"def",
"CloseDialog",
"(",
"self",
")",
":",
"bk",
"=",
"self",
".",
"GetParent",
"(",
")",
"self",
".",
"_selectedItem",
"=",
"self",
".",
"_listBox",
".",
"GetSelection",
"(",
")",
"self",
".",
"EndModal",
"(",
"wx",
".",
"ID_OK",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/aui/auibook.py#L758-L763
|
||
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/collections.py
|
python
|
OrderedDict.viewkeys
|
(self)
|
return KeysView(self)
|
od.viewkeys() -> a set-like object providing a view on od's keys
|
od.viewkeys() -> a set-like object providing a view on od's keys
|
[
"od",
".",
"viewkeys",
"()",
"-",
">",
"a",
"set",
"-",
"like",
"object",
"providing",
"a",
"view",
"on",
"od",
"s",
"keys"
] |
def viewkeys(self):
"od.viewkeys() -> a set-like object providing a view on od's keys"
return KeysView(self)
|
[
"def",
"viewkeys",
"(",
"self",
")",
":",
"return",
"KeysView",
"(",
"self",
")"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/collections.py#L217-L219
|
|
apple/turicreate
|
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
|
src/external/coremltools_wrap/coremltools/coremltools/converters/sklearn/_converter.py
|
python
|
convert
|
(sk_obj, input_features=None, output_feature_names=None)
|
return model
|
Convert scikit-learn pipeline, classifier, or regressor to Core ML format.
Parameters
----------
sk_obj: model | [model] of scikit-learn format.
Scikit learn model(s) to convert to a Core ML format.
The input model may be a single scikit learn model, a scikit learn
pipeline model, or a list of scikit learn models.
Currently supported scikit learn models are:
- Linear and Logistic Regression
- LinearSVC and LinearSVR
- SVC and SVR
- NuSVC and NuSVR
- Gradient Boosting Classifier and Regressor
- Decision Tree Classifier and Regressor
- Random Forest Classifier and Regressor
- Normalizer
- Imputer
- Standard Scaler
- DictVectorizer
- One Hot Encoder
- KNeighborsClassifier
The input model, or the last model in a pipeline or list of models,
determines whether this is exposed as a Transformer, Regressor,
or Classifier.
Note that there may not be a one-to-one correspondence between scikit
learn models and which Core ML models are used to represent them. For
example, many scikit learn models are embedded in a pipeline to handle
processing of input features.
input_features: str | dict | list
Optional name(s) that can be given to the inputs of the scikit-learn
model. Defaults to 'input'.
Input features can be specified in a number of forms.
- Single string: In this case, the input is assumed to be a single
array, with the number of dimensions set using num_dimensions.
- List of strings: In this case, the overall input dimensions to the
scikit-learn model is assumed to be the length of the list. If
neighboring names are identical, they are assumed to be an input
array of that length. For example:
["a", "b", "c"]
resolves to
[("a", Double), ("b", Double), ("c", Double)].
And:
["a", "a", "b"]
resolves to
[("a", Array(2)), ("b", Double)].
- Dictionary: Where the keys are the names and the indices or ranges of
feature indices.
In this case, it's presented as a mapping from keys to indices or
ranges of contiguous indices. For example,
{"a" : 0, "b" : [2,3], "c" : 1}
Resolves to
[("a", Double), ("c", Double), ("b", Array(2))].
Note that the ordering is determined by the indices.
- List of tuples of the form `(name, datatype)`. Here, `name` is the
name of the exposed feature, and `datatype` is an instance of
`String`, `Double`, `Int64`, `Array`, or `Dictionary`.
output_feature_names: string or list of strings
Optional name(s) that can be given to the inputs of the scikit-learn
model.
The output_feature_names is interpreted according to the model type:
- If the scikit-learn model is a transformer, it is the name of the
array feature output by the final sequence of the transformer
(defaults to "output").
- If it is a classifier, it should be a 2-tuple of names giving the top
class prediction and the array of scores for each class (defaults to
"classLabel" and "classScores").
- If it is a regressor, it should give the name of the prediction value
(defaults to "prediction").
Returns
-------
model:MLModel
Returns an MLModel instance representing a Core ML model.
Examples
--------
.. sourcecode:: python
>>> from sklearn.linear_model import LinearRegression
>>> import pandas as pd
# Load data
>>> data = pd.read_csv('houses.csv')
# Train a model
>>> model = LinearRegression()
>>> model.fit(data[["bedroom", "bath", "size"]], data["price"])
# Convert and save the scikit-learn model
>>> import coremltools
>>> coreml_model = coremltools.converters.sklearn.convert(model,
["bedroom", "bath", "size"],
"price")
>>> coreml_model.save('HousePricer.mlmodel')
|
Convert scikit-learn pipeline, classifier, or regressor to Core ML format.
|
[
"Convert",
"scikit",
"-",
"learn",
"pipeline",
"classifier",
"or",
"regressor",
"to",
"Core",
"ML",
"format",
"."
] |
def convert(sk_obj, input_features=None, output_feature_names=None):
"""
Convert scikit-learn pipeline, classifier, or regressor to Core ML format.
Parameters
----------
sk_obj: model | [model] of scikit-learn format.
Scikit learn model(s) to convert to a Core ML format.
The input model may be a single scikit learn model, a scikit learn
pipeline model, or a list of scikit learn models.
Currently supported scikit learn models are:
- Linear and Logistic Regression
- LinearSVC and LinearSVR
- SVC and SVR
- NuSVC and NuSVR
- Gradient Boosting Classifier and Regressor
- Decision Tree Classifier and Regressor
- Random Forest Classifier and Regressor
- Normalizer
- Imputer
- Standard Scaler
- DictVectorizer
- One Hot Encoder
- KNeighborsClassifier
The input model, or the last model in a pipeline or list of models,
determines whether this is exposed as a Transformer, Regressor,
or Classifier.
Note that there may not be a one-to-one correspondence between scikit
learn models and which Core ML models are used to represent them. For
example, many scikit learn models are embedded in a pipeline to handle
processing of input features.
input_features: str | dict | list
Optional name(s) that can be given to the inputs of the scikit-learn
model. Defaults to 'input'.
Input features can be specified in a number of forms.
- Single string: In this case, the input is assumed to be a single
array, with the number of dimensions set using num_dimensions.
- List of strings: In this case, the overall input dimensions to the
scikit-learn model is assumed to be the length of the list. If
neighboring names are identical, they are assumed to be an input
array of that length. For example:
["a", "b", "c"]
resolves to
[("a", Double), ("b", Double), ("c", Double)].
And:
["a", "a", "b"]
resolves to
[("a", Array(2)), ("b", Double)].
- Dictionary: Where the keys are the names and the indices or ranges of
feature indices.
In this case, it's presented as a mapping from keys to indices or
ranges of contiguous indices. For example,
{"a" : 0, "b" : [2,3], "c" : 1}
Resolves to
[("a", Double), ("c", Double), ("b", Array(2))].
Note that the ordering is determined by the indices.
- List of tuples of the form `(name, datatype)`. Here, `name` is the
name of the exposed feature, and `datatype` is an instance of
`String`, `Double`, `Int64`, `Array`, or `Dictionary`.
output_feature_names: string or list of strings
Optional name(s) that can be given to the inputs of the scikit-learn
model.
The output_feature_names is interpreted according to the model type:
- If the scikit-learn model is a transformer, it is the name of the
array feature output by the final sequence of the transformer
(defaults to "output").
- If it is a classifier, it should be a 2-tuple of names giving the top
class prediction and the array of scores for each class (defaults to
"classLabel" and "classScores").
- If it is a regressor, it should give the name of the prediction value
(defaults to "prediction").
Returns
-------
model:MLModel
Returns an MLModel instance representing a Core ML model.
Examples
--------
.. sourcecode:: python
>>> from sklearn.linear_model import LinearRegression
>>> import pandas as pd
# Load data
>>> data = pd.read_csv('houses.csv')
# Train a model
>>> model = LinearRegression()
>>> model.fit(data[["bedroom", "bath", "size"]], data["price"])
# Convert and save the scikit-learn model
>>> import coremltools
>>> coreml_model = coremltools.converters.sklearn.convert(model,
["bedroom", "bath", "size"],
"price")
>>> coreml_model.save('HousePricer.mlmodel')
"""
# This function is just a thin wrapper around the internal converter so
# that sklearn isn't actually imported unless this function is called
from ...models import MLModel
# NOTE: Providing user-defined class labels will be enabled when
# several issues with the ordering of the classes are worked out. For now,
# to use custom class labels, directly import the internal function below.
from ._converter_internal import _convert_sklearn_model
spec = _convert_sklearn_model(
sk_obj, input_features, output_feature_names, class_labels=None
)
model = MLModel(spec)
from sklearn import __version__ as sklearn_version
model.user_defined_metadata[_METADATA_VERSION] = ct_version
model.user_defined_metadata[_METADATA_SOURCE] = "scikit-learn=={0}".format(
sklearn_version
)
return model
|
[
"def",
"convert",
"(",
"sk_obj",
",",
"input_features",
"=",
"None",
",",
"output_feature_names",
"=",
"None",
")",
":",
"# This function is just a thin wrapper around the internal converter so",
"# that sklearn isn't actually imported unless this function is called",
"from",
".",
".",
".",
"models",
"import",
"MLModel",
"# NOTE: Providing user-defined class labels will be enabled when",
"# several issues with the ordering of the classes are worked out. For now,",
"# to use custom class labels, directly import the internal function below.",
"from",
".",
"_converter_internal",
"import",
"_convert_sklearn_model",
"spec",
"=",
"_convert_sklearn_model",
"(",
"sk_obj",
",",
"input_features",
",",
"output_feature_names",
",",
"class_labels",
"=",
"None",
")",
"model",
"=",
"MLModel",
"(",
"spec",
")",
"from",
"sklearn",
"import",
"__version__",
"as",
"sklearn_version",
"model",
".",
"user_defined_metadata",
"[",
"_METADATA_VERSION",
"]",
"=",
"ct_version",
"model",
".",
"user_defined_metadata",
"[",
"_METADATA_SOURCE",
"]",
"=",
"\"scikit-learn=={0}\"",
".",
"format",
"(",
"sklearn_version",
")",
"return",
"model"
] |
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/external/coremltools_wrap/coremltools/coremltools/converters/sklearn/_converter.py#L14-L161
|
|
apple/turicreate
|
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
|
src/external/coremltools_wrap/coremltools/deps/protobuf/python/google/protobuf/internal/encoder.py
|
python
|
GroupEncoder
|
(field_number, is_repeated, is_packed)
|
Returns an encoder for a group field.
|
Returns an encoder for a group field.
|
[
"Returns",
"an",
"encoder",
"for",
"a",
"group",
"field",
"."
] |
def GroupEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a group field."""
start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP)
end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP)
assert not is_packed
if is_repeated:
def EncodeRepeatedField(write, value):
for element in value:
write(start_tag)
element._InternalSerialize(write)
write(end_tag)
return EncodeRepeatedField
else:
def EncodeField(write, value):
write(start_tag)
value._InternalSerialize(write)
return write(end_tag)
return EncodeField
|
[
"def",
"GroupEncoder",
"(",
"field_number",
",",
"is_repeated",
",",
"is_packed",
")",
":",
"start_tag",
"=",
"TagBytes",
"(",
"field_number",
",",
"wire_format",
".",
"WIRETYPE_START_GROUP",
")",
"end_tag",
"=",
"TagBytes",
"(",
"field_number",
",",
"wire_format",
".",
"WIRETYPE_END_GROUP",
")",
"assert",
"not",
"is_packed",
"if",
"is_repeated",
":",
"def",
"EncodeRepeatedField",
"(",
"write",
",",
"value",
")",
":",
"for",
"element",
"in",
"value",
":",
"write",
"(",
"start_tag",
")",
"element",
".",
"_InternalSerialize",
"(",
"write",
")",
"write",
"(",
"end_tag",
")",
"return",
"EncodeRepeatedField",
"else",
":",
"def",
"EncodeField",
"(",
"write",
",",
"value",
")",
":",
"write",
"(",
"start_tag",
")",
"value",
".",
"_InternalSerialize",
"(",
"write",
")",
"return",
"write",
"(",
"end_tag",
")",
"return",
"EncodeField"
] |
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/external/coremltools_wrap/coremltools/deps/protobuf/python/google/protobuf/internal/encoder.py#L728-L746
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/grid.py
|
python
|
GridCellAttr.HasReadWriteMode
|
(*args, **kwargs)
|
return _grid.GridCellAttr_HasReadWriteMode(*args, **kwargs)
|
HasReadWriteMode(self) -> bool
|
HasReadWriteMode(self) -> bool
|
[
"HasReadWriteMode",
"(",
"self",
")",
"-",
">",
"bool"
] |
def HasReadWriteMode(*args, **kwargs):
"""HasReadWriteMode(self) -> bool"""
return _grid.GridCellAttr_HasReadWriteMode(*args, **kwargs)
|
[
"def",
"HasReadWriteMode",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_grid",
".",
"GridCellAttr_HasReadWriteMode",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/grid.py#L607-L609
|
|
Salensoft/thu-cst-cracker
|
f7f6b4de460aaac6da3d776ab28d9175e8b32ae2
|
大三上/软件工程/hw/2015/Homework/作业2 - 代码风格/01_code_style/my_re.py
|
python
|
subn
|
(pattern, repl, string, count=0, flags=0)
|
return _compile(pattern, flags).subn(repl, string, count)
|
Return a 2-tuple containing (new_string, number).
new_string is the string obtained by replacing the leftmost
non-overlapping occurrences of the pattern in the source
string by the replacement repl. number is the number of
substitutions that were made. repl can be either a string or a
callable; if a string, backslash escapes in it are processed.
If it is a callable, it's passed the match object and must
return a replacement string to be used.
|
Return a 2-tuple containing (new_string, number).
new_string is the string obtained by replacing the leftmost
non-overlapping occurrences of the pattern in the source
string by the replacement repl. number is the number of
substitutions that were made. repl can be either a string or a
callable; if a string, backslash escapes in it are processed.
If it is a callable, it's passed the match object and must
return a replacement string to be used.
|
[
"Return",
"a",
"2",
"-",
"tuple",
"containing",
"(",
"new_string",
"number",
")",
".",
"new_string",
"is",
"the",
"string",
"obtained",
"by",
"replacing",
"the",
"leftmost",
"non",
"-",
"overlapping",
"occurrences",
"of",
"the",
"pattern",
"in",
"the",
"source",
"string",
"by",
"the",
"replacement",
"repl",
".",
"number",
"is",
"the",
"number",
"of",
"substitutions",
"that",
"were",
"made",
".",
"repl",
"can",
"be",
"either",
"a",
"string",
"or",
"a",
"callable",
";",
"if",
"a",
"string",
"backslash",
"escapes",
"in",
"it",
"are",
"processed",
".",
"If",
"it",
"is",
"a",
"callable",
"it",
"s",
"passed",
"the",
"match",
"object",
"and",
"must",
"return",
"a",
"replacement",
"string",
"to",
"be",
"used",
"."
] |
def subn(pattern, repl, string, count=0, flags=0):
"""Return a 2-tuple containing (new_string, number).
new_string is the string obtained by replacing the leftmost
non-overlapping occurrences of the pattern in the source
string by the replacement repl. number is the number of
substitutions that were made. repl can be either a string or a
callable; if a string, backslash escapes in it are processed.
If it is a callable, it's passed the match object and must
return a replacement string to be used."""
return _compile(pattern, flags).subn(repl, string, count)
|
[
"def",
"subn",
"(",
"pattern",
",",
"repl",
",",
"string",
",",
"count",
"=",
"0",
",",
"flags",
"=",
"0",
")",
":",
"return",
"_compile",
"(",
"pattern",
",",
"flags",
")",
".",
"subn",
"(",
"repl",
",",
"string",
",",
"count",
")"
] |
https://github.com/Salensoft/thu-cst-cracker/blob/f7f6b4de460aaac6da3d776ab28d9175e8b32ae2/大三上/软件工程/hw/2015/Homework/作业2 - 代码风格/01_code_style/my_re.py#L159-L168
|
|
baidu-research/tensorflow-allreduce
|
66d5b855e90b0949e9fa5cca5599fd729a70e874
|
tensorflow/contrib/distributions/python/ops/deterministic.py
|
python
|
VectorDeterministic.__init__
|
(self,
loc,
atol=None,
rtol=None,
validate_args=False,
allow_nan_stats=True,
name="VectorDeterministic")
|
Initialize a `VectorDeterministic` distribution on `R^k`, for `k >= 0`.
Note that there is only one point in `R^0`, the "point" `[]`. So if `k = 0`
then `self.prob([]) == 1`.
The `atol` and `rtol` parameters allow for some slack in `pmf`
computations, e.g. due to floating-point error.
```
pmf(x; loc)
= 1, if All[Abs(x - loc) <= atol + rtol * Abs(loc)],
= 0, otherwise
```
Args:
loc: Numeric `Tensor` of shape `[B1, ..., Bb, k]`, with `b >= 0`, `k >= 0`
The point (or batch of points) on which this distribution is supported.
atol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The absolute tolerance for comparing closeness to `loc`.
Default is `0`.
rtol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The relative tolerance for comparing closeness to `loc`.
Default is `0`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
|
Initialize a `VectorDeterministic` distribution on `R^k`, for `k >= 0`.
|
[
"Initialize",
"a",
"VectorDeterministic",
"distribution",
"on",
"R^k",
"for",
"k",
">",
"=",
"0",
"."
] |
def __init__(self,
loc,
atol=None,
rtol=None,
validate_args=False,
allow_nan_stats=True,
name="VectorDeterministic"):
"""Initialize a `VectorDeterministic` distribution on `R^k`, for `k >= 0`.
Note that there is only one point in `R^0`, the "point" `[]`. So if `k = 0`
then `self.prob([]) == 1`.
The `atol` and `rtol` parameters allow for some slack in `pmf`
computations, e.g. due to floating-point error.
```
pmf(x; loc)
= 1, if All[Abs(x - loc) <= atol + rtol * Abs(loc)],
= 0, otherwise
```
Args:
loc: Numeric `Tensor` of shape `[B1, ..., Bb, k]`, with `b >= 0`, `k >= 0`
The point (or batch of points) on which this distribution is supported.
atol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The absolute tolerance for comparing closeness to `loc`.
Default is `0`.
rtol: Non-negative `Tensor` of same `dtype` as `loc` and broadcastable
shape. The relative tolerance for comparing closeness to `loc`.
Default is `0`.
validate_args: Python `bool`, default `False`. When `True` distribution
parameters are checked for validity despite possibly degrading runtime
performance. When `False` invalid inputs may silently render incorrect
outputs.
allow_nan_stats: Python `bool`, default `True`. When `True`, statistics
(e.g., mean, mode, variance) use the value "`NaN`" to indicate the
result is undefined. When `False`, an exception is raised if one or
more of the statistic's batch members are undefined.
name: Python `str` name prefixed to Ops created by this class.
"""
super(VectorDeterministic, self).__init__(
loc,
atol=atol,
rtol=rtol,
is_vector=True,
validate_args=validate_args,
allow_nan_stats=allow_nan_stats,
name=name)
|
[
"def",
"__init__",
"(",
"self",
",",
"loc",
",",
"atol",
"=",
"None",
",",
"rtol",
"=",
"None",
",",
"validate_args",
"=",
"False",
",",
"allow_nan_stats",
"=",
"True",
",",
"name",
"=",
"\"VectorDeterministic\"",
")",
":",
"super",
"(",
"VectorDeterministic",
",",
"self",
")",
".",
"__init__",
"(",
"loc",
",",
"atol",
"=",
"atol",
",",
"rtol",
"=",
"rtol",
",",
"is_vector",
"=",
"True",
",",
"validate_args",
"=",
"validate_args",
",",
"allow_nan_stats",
"=",
"allow_nan_stats",
",",
"name",
"=",
"name",
")"
] |
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/distributions/python/ops/deterministic.py#L309-L356
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/msw/_windows.py
|
python
|
VScrolledWindow.HitTest
|
(*args, **kwargs)
|
return _windows_.VScrolledWindow_HitTest(*args, **kwargs)
|
HitTest(self, Point pt) -> int
Test where the given (in client coords) point lies
|
HitTest(self, Point pt) -> int
|
[
"HitTest",
"(",
"self",
"Point",
"pt",
")",
"-",
">",
"int"
] |
def HitTest(*args, **kwargs):
"""
HitTest(self, Point pt) -> int
Test where the given (in client coords) point lies
"""
return _windows_.VScrolledWindow_HitTest(*args, **kwargs)
|
[
"def",
"HitTest",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_windows_",
".",
"VScrolledWindow_HitTest",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_windows.py#L2446-L2452
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/tools/python/src/Lib/sgmllib.py
|
python
|
SGMLParser.reset
|
(self)
|
Reset this instance. Loses all unprocessed data.
|
Reset this instance. Loses all unprocessed data.
|
[
"Reset",
"this",
"instance",
".",
"Loses",
"all",
"unprocessed",
"data",
"."
] |
def reset(self):
"""Reset this instance. Loses all unprocessed data."""
self.__starttag_text = None
self.rawdata = ''
self.stack = []
self.lasttag = '???'
self.nomoretags = 0
self.literal = 0
markupbase.ParserBase.reset(self)
|
[
"def",
"reset",
"(",
"self",
")",
":",
"self",
".",
"__starttag_text",
"=",
"None",
"self",
".",
"rawdata",
"=",
"''",
"self",
".",
"stack",
"=",
"[",
"]",
"self",
".",
"lasttag",
"=",
"'???'",
"self",
".",
"nomoretags",
"=",
"0",
"self",
".",
"literal",
"=",
"0",
"markupbase",
".",
"ParserBase",
".",
"reset",
"(",
"self",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/sgmllib.py#L71-L79
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_cocoa/_controls.py
|
python
|
TreeCtrl.GetNextSibling
|
(*args, **kwargs)
|
return _controls_.TreeCtrl_GetNextSibling(*args, **kwargs)
|
GetNextSibling(self, TreeItemId item) -> TreeItemId
|
GetNextSibling(self, TreeItemId item) -> TreeItemId
|
[
"GetNextSibling",
"(",
"self",
"TreeItemId",
"item",
")",
"-",
">",
"TreeItemId"
] |
def GetNextSibling(*args, **kwargs):
"""GetNextSibling(self, TreeItemId item) -> TreeItemId"""
return _controls_.TreeCtrl_GetNextSibling(*args, **kwargs)
|
[
"def",
"GetNextSibling",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_controls_",
".",
"TreeCtrl_GetNextSibling",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_controls.py#L5399-L5401
|
|
pmq20/node-packer
|
12c46c6e44fbc14d9ee645ebd17d5296b324f7e0
|
current/tools/inspector_protocol/jinja2/environment.py
|
python
|
Template.make_module_async
|
(self, vars=None, shared=False, locals=None)
|
As template module creation can invoke template code for
asynchronous exections this method must be used instead of the
normal :meth:`make_module` one. Likewise the module attribute
becomes unavailable in async mode.
|
As template module creation can invoke template code for
asynchronous exections this method must be used instead of the
normal :meth:`make_module` one. Likewise the module attribute
becomes unavailable in async mode.
|
[
"As",
"template",
"module",
"creation",
"can",
"invoke",
"template",
"code",
"for",
"asynchronous",
"exections",
"this",
"method",
"must",
"be",
"used",
"instead",
"of",
"the",
"normal",
":",
"meth",
":",
"make_module",
"one",
".",
"Likewise",
"the",
"module",
"attribute",
"becomes",
"unavailable",
"in",
"async",
"mode",
"."
] |
def make_module_async(self, vars=None, shared=False, locals=None):
"""As template module creation can invoke template code for
asynchronous exections this method must be used instead of the
normal :meth:`make_module` one. Likewise the module attribute
becomes unavailable in async mode.
"""
# see asyncsupport for the actual implementation
raise NotImplementedError('This feature is not available for this '
'version of Python')
|
[
"def",
"make_module_async",
"(",
"self",
",",
"vars",
"=",
"None",
",",
"shared",
"=",
"False",
",",
"locals",
"=",
"None",
")",
":",
"# see asyncsupport for the actual implementation",
"raise",
"NotImplementedError",
"(",
"'This feature is not available for this '",
"'version of Python'",
")"
] |
https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/current/tools/inspector_protocol/jinja2/environment.py#L1075-L1083
|
||
apache/impala
|
8ddac48f3428c86f2cbd037ced89cfb903298b12
|
docker/monitor.py
|
python
|
ContainerMonitor.__init__
|
(self, output_path, frequency_seconds=1)
|
frequency_seconds is how often metrics are gathered
|
frequency_seconds is how often metrics are gathered
|
[
"frequency_seconds",
"is",
"how",
"often",
"metrics",
"are",
"gathered"
] |
def __init__(self, output_path, frequency_seconds=1):
"""frequency_seconds is how often metrics are gathered"""
self.containers = []
self.output_path = output_path
self.keep_monitoring = None
self.monitor_thread = None
self.frequency_seconds = frequency_seconds
|
[
"def",
"__init__",
"(",
"self",
",",
"output_path",
",",
"frequency_seconds",
"=",
"1",
")",
":",
"self",
".",
"containers",
"=",
"[",
"]",
"self",
".",
"output_path",
"=",
"output_path",
"self",
".",
"keep_monitoring",
"=",
"None",
"self",
".",
"monitor_thread",
"=",
"None",
"self",
".",
"frequency_seconds",
"=",
"frequency_seconds"
] |
https://github.com/apache/impala/blob/8ddac48f3428c86f2cbd037ced89cfb903298b12/docker/monitor.py#L125-L131
|
||
BlzFans/wke
|
b0fa21158312e40c5fbd84682d643022b6c34a93
|
cygwin/lib/python2.6/difflib.py
|
python
|
IS_CHARACTER_JUNK
|
(ch, ws=" \t")
|
return ch in ws
|
r"""
Return 1 for ignorable character: iff `ch` is a space or tab.
Examples:
>>> IS_CHARACTER_JUNK(' ')
True
>>> IS_CHARACTER_JUNK('\t')
True
>>> IS_CHARACTER_JUNK('\n')
False
>>> IS_CHARACTER_JUNK('x')
False
|
r"""
Return 1 for ignorable character: iff `ch` is a space or tab.
|
[
"r",
"Return",
"1",
"for",
"ignorable",
"character",
":",
"iff",
"ch",
"is",
"a",
"space",
"or",
"tab",
"."
] |
def IS_CHARACTER_JUNK(ch, ws=" \t"):
r"""
Return 1 for ignorable character: iff `ch` is a space or tab.
Examples:
>>> IS_CHARACTER_JUNK(' ')
True
>>> IS_CHARACTER_JUNK('\t')
True
>>> IS_CHARACTER_JUNK('\n')
False
>>> IS_CHARACTER_JUNK('x')
False
"""
return ch in ws
|
[
"def",
"IS_CHARACTER_JUNK",
"(",
"ch",
",",
"ws",
"=",
"\" \\t\"",
")",
":",
"return",
"ch",
"in",
"ws"
] |
https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/difflib.py#L1125-L1141
|
|
mapnik/mapnik
|
f3da900c355e1d15059c4a91b00203dcc9d9f0ef
|
scons/scons-local-4.1.0/SCons/Node/FS.py
|
python
|
Dir.File
|
(self, name)
|
return self.fs.File(name, self)
|
Looks up or creates a file node named 'name' relative to
this directory.
|
Looks up or creates a file node named 'name' relative to
this directory.
|
[
"Looks",
"up",
"or",
"creates",
"a",
"file",
"node",
"named",
"name",
"relative",
"to",
"this",
"directory",
"."
] |
def File(self, name):
"""
Looks up or creates a file node named 'name' relative to
this directory.
"""
return self.fs.File(name, self)
|
[
"def",
"File",
"(",
"self",
",",
"name",
")",
":",
"return",
"self",
".",
"fs",
".",
"File",
"(",
"name",
",",
"self",
")"
] |
https://github.com/mapnik/mapnik/blob/f3da900c355e1d15059c4a91b00203dcc9d9f0ef/scons/scons-local-4.1.0/SCons/Node/FS.py#L1648-L1653
|
|
BitMEX/api-connectors
|
37a3a5b806ad5d0e0fc975ab86d9ed43c3bcd812
|
auto-generated/python/swagger_client/models/quote_fill_ratio.py
|
python
|
QuoteFillRatio.dealt_count
|
(self)
|
return self._dealt_count
|
Gets the dealt_count of this QuoteFillRatio. # noqa: E501
:return: The dealt_count of this QuoteFillRatio. # noqa: E501
:rtype: float
|
Gets the dealt_count of this QuoteFillRatio. # noqa: E501
|
[
"Gets",
"the",
"dealt_count",
"of",
"this",
"QuoteFillRatio",
".",
"#",
"noqa",
":",
"E501"
] |
def dealt_count(self):
"""Gets the dealt_count of this QuoteFillRatio. # noqa: E501
:return: The dealt_count of this QuoteFillRatio. # noqa: E501
:rtype: float
"""
return self._dealt_count
|
[
"def",
"dealt_count",
"(",
"self",
")",
":",
"return",
"self",
".",
"_dealt_count"
] |
https://github.com/BitMEX/api-connectors/blob/37a3a5b806ad5d0e0fc975ab86d9ed43c3bcd812/auto-generated/python/swagger_client/models/quote_fill_ratio.py#L145-L152
|
|
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/sunaudio.py
|
python
|
printhdr
|
(file)
|
Read and print the sound header of a named file.
|
Read and print the sound header of a named file.
|
[
"Read",
"and",
"print",
"the",
"sound",
"header",
"of",
"a",
"named",
"file",
"."
] |
def printhdr(file):
"""Read and print the sound header of a named file."""
hdr = gethdr(open(file, 'r'))
data_size, encoding, sample_rate, channels, info = hdr
while info[-1:] == '\0':
info = info[:-1]
print 'File name: ', file
print 'Data size: ', data_size
print 'Encoding: ', encoding
print 'Sample rate:', sample_rate
print 'Channels: ', channels
print 'Info: ', repr(info)
|
[
"def",
"printhdr",
"(",
"file",
")",
":",
"hdr",
"=",
"gethdr",
"(",
"open",
"(",
"file",
",",
"'r'",
")",
")",
"data_size",
",",
"encoding",
",",
"sample_rate",
",",
"channels",
",",
"info",
"=",
"hdr",
"while",
"info",
"[",
"-",
"1",
":",
"]",
"==",
"'\\0'",
":",
"info",
"=",
"info",
"[",
":",
"-",
"1",
"]",
"print",
"'File name: '",
",",
"file",
"print",
"'Data size: '",
",",
"data_size",
"print",
"'Encoding: '",
",",
"encoding",
"print",
"'Sample rate:'",
",",
"sample_rate",
"print",
"'Channels: '",
",",
"channels",
"print",
"'Info: '",
",",
"repr",
"(",
"info",
")"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/sunaudio.py#L38-L49
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/_manylinux.py
|
python
|
_glibc_version_string_ctypes
|
()
|
return version_str
|
Fallback implementation of glibc_version_string using ctypes.
|
Fallback implementation of glibc_version_string using ctypes.
|
[
"Fallback",
"implementation",
"of",
"glibc_version_string",
"using",
"ctypes",
"."
] |
def _glibc_version_string_ctypes() -> Optional[str]:
"""
Fallback implementation of glibc_version_string using ctypes.
"""
try:
import ctypes
except ImportError:
return None
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
# manpage says, "If filename is NULL, then the returned handle is for the
# main program". This way we can let the linker do the work to figure out
# which libc our process is actually using.
#
# We must also handle the special case where the executable is not a
# dynamically linked executable. This can occur when using musl libc,
# for example. In this situation, dlopen() will error, leading to an
# OSError. Interestingly, at least in the case of musl, there is no
# errno set on the OSError. The single string argument used to construct
# OSError comes from libc itself and is therefore not portable to
# hard code here. In any case, failure to call dlopen() means we
# can proceed, so we bail on our attempt.
try:
process_namespace = ctypes.CDLL(None)
except OSError:
return None
try:
gnu_get_libc_version = process_namespace.gnu_get_libc_version
except AttributeError:
# Symbol doesn't exist -> therefore, we are not linked to
# glibc.
return None
# Call gnu_get_libc_version, which returns a string like "2.5"
gnu_get_libc_version.restype = ctypes.c_char_p
version_str: str = gnu_get_libc_version()
# py2 / py3 compatibility:
if not isinstance(version_str, str):
version_str = version_str.decode("ascii")
return version_str
|
[
"def",
"_glibc_version_string_ctypes",
"(",
")",
"->",
"Optional",
"[",
"str",
"]",
":",
"try",
":",
"import",
"ctypes",
"except",
"ImportError",
":",
"return",
"None",
"# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen",
"# manpage says, \"If filename is NULL, then the returned handle is for the",
"# main program\". This way we can let the linker do the work to figure out",
"# which libc our process is actually using.",
"#",
"# We must also handle the special case where the executable is not a",
"# dynamically linked executable. This can occur when using musl libc,",
"# for example. In this situation, dlopen() will error, leading to an",
"# OSError. Interestingly, at least in the case of musl, there is no",
"# errno set on the OSError. The single string argument used to construct",
"# OSError comes from libc itself and is therefore not portable to",
"# hard code here. In any case, failure to call dlopen() means we",
"# can proceed, so we bail on our attempt.",
"try",
":",
"process_namespace",
"=",
"ctypes",
".",
"CDLL",
"(",
"None",
")",
"except",
"OSError",
":",
"return",
"None",
"try",
":",
"gnu_get_libc_version",
"=",
"process_namespace",
".",
"gnu_get_libc_version",
"except",
"AttributeError",
":",
"# Symbol doesn't exist -> therefore, we are not linked to",
"# glibc.",
"return",
"None",
"# Call gnu_get_libc_version, which returns a string like \"2.5\"",
"gnu_get_libc_version",
".",
"restype",
"=",
"ctypes",
".",
"c_char_p",
"version_str",
":",
"str",
"=",
"gnu_get_libc_version",
"(",
")",
"# py2 / py3 compatibility:",
"if",
"not",
"isinstance",
"(",
"version_str",
",",
"str",
")",
":",
"version_str",
"=",
"version_str",
".",
"decode",
"(",
"\"ascii\"",
")",
"return",
"version_str"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/setuptools/py3/pkg_resources/_vendor/packaging/_manylinux.py#L154-L195
|
|
kamyu104/LeetCode-Solutions
|
77605708a927ea3b85aee5a479db733938c7c211
|
Python/number-of-digit-one.py
|
python
|
Solution.countDigitOne
|
(self, n)
|
return result
|
:type n: int
:rtype: int
|
:type n: int
:rtype: int
|
[
":",
"type",
"n",
":",
"int",
":",
"rtype",
":",
"int"
] |
def countDigitOne(self, n):
"""
:type n: int
:rtype: int
"""
DIGIT = 1
is_zero = int(DIGIT == 0)
result = is_zero
base = 1
while n >= base:
result += (n//(10*base)-is_zero)*base + \
min(base, max(n%(10*base) - DIGIT*base + 1, 0))
base *= 10
return result
|
[
"def",
"countDigitOne",
"(",
"self",
",",
"n",
")",
":",
"DIGIT",
"=",
"1",
"is_zero",
"=",
"int",
"(",
"DIGIT",
"==",
"0",
")",
"result",
"=",
"is_zero",
"base",
"=",
"1",
"while",
"n",
">=",
"base",
":",
"result",
"+=",
"(",
"n",
"//",
"(",
"10",
"*",
"base",
")",
"-",
"is_zero",
")",
"*",
"base",
"+",
"min",
"(",
"base",
",",
"max",
"(",
"n",
"%",
"(",
"10",
"*",
"base",
")",
"-",
"DIGIT",
"*",
"base",
"+",
"1",
",",
"0",
")",
")",
"base",
"*=",
"10",
"return",
"result"
] |
https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/number-of-digit-one.py#L5-L18
|
|
openvinotoolkit/openvino
|
dedcbeafa8b84cccdc55ca64b8da516682b381c7
|
tools/pot/openvino/tools/pot/graph/editor.py
|
python
|
connect_nodes
|
(src_node, src_port, dst_node, dst_port)
|
Connects two nodes with each other
:param src_node: name of the input node
:param src_port: index of the port for input node
:param dst_node: name of the destination node
:param dst_port: index of the port for destination node
|
Connects two nodes with each other
:param src_node: name of the input node
:param src_port: index of the port for input node
:param dst_node: name of the destination node
:param dst_port: index of the port for destination node
|
[
"Connects",
"two",
"nodes",
"with",
"each",
"other",
":",
"param",
"src_node",
":",
"name",
"of",
"the",
"input",
"node",
":",
"param",
"src_port",
":",
"index",
"of",
"the",
"port",
"for",
"input",
"node",
":",
"param",
"dst_node",
":",
"name",
"of",
"the",
"destination",
"node",
":",
"param",
"dst_port",
":",
"index",
"of",
"the",
"port",
"for",
"destination",
"node"
] |
def connect_nodes(src_node, src_port, dst_node, dst_port):
""" Connects two nodes with each other
:param src_node: name of the input node
:param src_port: index of the port for input node
:param dst_node: name of the destination node
:param dst_port: index of the port for destination node
"""
src_node.out_port(src_port).connect(dst_node.in_port(dst_port))
|
[
"def",
"connect_nodes",
"(",
"src_node",
",",
"src_port",
",",
"dst_node",
",",
"dst_port",
")",
":",
"src_node",
".",
"out_port",
"(",
"src_port",
")",
".",
"connect",
"(",
"dst_node",
".",
"in_port",
"(",
"dst_port",
")",
")"
] |
https://github.com/openvinotoolkit/openvino/blob/dedcbeafa8b84cccdc55ca64b8da516682b381c7/tools/pot/openvino/tools/pot/graph/editor.py#L79-L86
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/subprocess.py
|
python
|
list2cmdline
|
(seq)
|
return ''.join(result)
|
Translate a sequence of arguments into a command line
string, using the same rules as the MS C runtime:
1) Arguments are delimited by white space, which is either a
space or a tab.
2) A string surrounded by double quotation marks is
interpreted as a single argument, regardless of white space
contained within. A quoted string can be embedded in an
argument.
3) A double quotation mark preceded by a backslash is
interpreted as a literal double quotation mark.
4) Backslashes are interpreted literally, unless they
immediately precede a double quotation mark.
5) If backslashes immediately precede a double quotation mark,
every pair of backslashes is interpreted as a literal
backslash. If the number of backslashes is odd, the last
backslash escapes the next double quotation mark as
described in rule 3.
|
Translate a sequence of arguments into a command line
string, using the same rules as the MS C runtime:
|
[
"Translate",
"a",
"sequence",
"of",
"arguments",
"into",
"a",
"command",
"line",
"string",
"using",
"the",
"same",
"rules",
"as",
"the",
"MS",
"C",
"runtime",
":"
] |
def list2cmdline(seq):
"""
Translate a sequence of arguments into a command line
string, using the same rules as the MS C runtime:
1) Arguments are delimited by white space, which is either a
space or a tab.
2) A string surrounded by double quotation marks is
interpreted as a single argument, regardless of white space
contained within. A quoted string can be embedded in an
argument.
3) A double quotation mark preceded by a backslash is
interpreted as a literal double quotation mark.
4) Backslashes are interpreted literally, unless they
immediately precede a double quotation mark.
5) If backslashes immediately precede a double quotation mark,
every pair of backslashes is interpreted as a literal
backslash. If the number of backslashes is odd, the last
backslash escapes the next double quotation mark as
described in rule 3.
"""
# See
# http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
# or search http://msdn.microsoft.com for
# "Parsing C++ Command-Line Arguments"
result = []
needquote = False
for arg in seq:
bs_buf = []
# Add a space to separate this argument from the others
if result:
result.append(' ')
needquote = (" " in arg) or ("\t" in arg) or not arg
if needquote:
result.append('"')
for c in arg:
if c == '\\':
# Don't know if we need to double yet.
bs_buf.append(c)
elif c == '"':
# Double backslashes.
result.append('\\' * len(bs_buf)*2)
bs_buf = []
result.append('\\"')
else:
# Normal char
if bs_buf:
result.extend(bs_buf)
bs_buf = []
result.append(c)
# Add remaining backslashes, if any.
if bs_buf:
result.extend(bs_buf)
if needquote:
result.extend(bs_buf)
result.append('"')
return ''.join(result)
|
[
"def",
"list2cmdline",
"(",
"seq",
")",
":",
"# See",
"# http://msdn.microsoft.com/en-us/library/17w5ykft.aspx",
"# or search http://msdn.microsoft.com for",
"# \"Parsing C++ Command-Line Arguments\"",
"result",
"=",
"[",
"]",
"needquote",
"=",
"False",
"for",
"arg",
"in",
"seq",
":",
"bs_buf",
"=",
"[",
"]",
"# Add a space to separate this argument from the others",
"if",
"result",
":",
"result",
".",
"append",
"(",
"' '",
")",
"needquote",
"=",
"(",
"\" \"",
"in",
"arg",
")",
"or",
"(",
"\"\\t\"",
"in",
"arg",
")",
"or",
"not",
"arg",
"if",
"needquote",
":",
"result",
".",
"append",
"(",
"'\"'",
")",
"for",
"c",
"in",
"arg",
":",
"if",
"c",
"==",
"'\\\\'",
":",
"# Don't know if we need to double yet.",
"bs_buf",
".",
"append",
"(",
"c",
")",
"elif",
"c",
"==",
"'\"'",
":",
"# Double backslashes.",
"result",
".",
"append",
"(",
"'\\\\'",
"*",
"len",
"(",
"bs_buf",
")",
"*",
"2",
")",
"bs_buf",
"=",
"[",
"]",
"result",
".",
"append",
"(",
"'\\\\\"'",
")",
"else",
":",
"# Normal char",
"if",
"bs_buf",
":",
"result",
".",
"extend",
"(",
"bs_buf",
")",
"bs_buf",
"=",
"[",
"]",
"result",
".",
"append",
"(",
"c",
")",
"# Add remaining backslashes, if any.",
"if",
"bs_buf",
":",
"result",
".",
"extend",
"(",
"bs_buf",
")",
"if",
"needquote",
":",
"result",
".",
"extend",
"(",
"bs_buf",
")",
"result",
".",
"append",
"(",
"'\"'",
")",
"return",
"''",
".",
"join",
"(",
"result",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/subprocess.py#L516-L583
|
|
ComputationalRadiationPhysics/picongpu
|
59e9b53605f9a5c1bf271eeb055bc74370a99052
|
lib/python/picongpu/plugins/data/radiation.py
|
python
|
RadiationData.get_Amplitude_z
|
(self)
|
return ((self.h5_Az_Re[...] + 1j * self.h5_Az_Im[...])[:, :, 0] *
np.sqrt(self.convert_to_SI))
|
Returns the complex amplitudes in z-axis.
|
Returns the complex amplitudes in z-axis.
|
[
"Returns",
"the",
"complex",
"amplitudes",
"in",
"z",
"-",
"axis",
"."
] |
def get_Amplitude_z(self):
"""Returns the complex amplitudes in z-axis."""
return ((self.h5_Az_Re[...] + 1j * self.h5_Az_Im[...])[:, :, 0] *
np.sqrt(self.convert_to_SI))
|
[
"def",
"get_Amplitude_z",
"(",
"self",
")",
":",
"return",
"(",
"(",
"self",
".",
"h5_Az_Re",
"[",
"...",
"]",
"+",
"1j",
"*",
"self",
".",
"h5_Az_Im",
"[",
"...",
"]",
")",
"[",
":",
",",
":",
",",
"0",
"]",
"*",
"np",
".",
"sqrt",
"(",
"self",
".",
"convert_to_SI",
")",
")"
] |
https://github.com/ComputationalRadiationPhysics/picongpu/blob/59e9b53605f9a5c1bf271eeb055bc74370a99052/lib/python/picongpu/plugins/data/radiation.py#L80-L83
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/tools/python/src/Lib/plat-mac/PixMapWrapper.py
|
python
|
PixMapWrapper.blit
|
(self, x1=0,y1=0,x2=None,y2=None, port=None)
|
Draw this pixmap into the given (default current) grafport.
|
Draw this pixmap into the given (default current) grafport.
|
[
"Draw",
"this",
"pixmap",
"into",
"the",
"given",
"(",
"default",
"current",
")",
"grafport",
"."
] |
def blit(self, x1=0,y1=0,x2=None,y2=None, port=None):
"""Draw this pixmap into the given (default current) grafport."""
src = self.bounds
dest = [x1,y1,x2,y2]
if x2 is None:
dest[2] = x1 + src[2]-src[0]
if y2 is None:
dest[3] = y1 + src[3]-src[1]
if not port: port = Qd.GetPort()
Qd.CopyBits(self.PixMap(), port.GetPortBitMapForCopyBits(), src, tuple(dest),
QuickDraw.srcCopy, None)
|
[
"def",
"blit",
"(",
"self",
",",
"x1",
"=",
"0",
",",
"y1",
"=",
"0",
",",
"x2",
"=",
"None",
",",
"y2",
"=",
"None",
",",
"port",
"=",
"None",
")",
":",
"src",
"=",
"self",
".",
"bounds",
"dest",
"=",
"[",
"x1",
",",
"y1",
",",
"x2",
",",
"y2",
"]",
"if",
"x2",
"is",
"None",
":",
"dest",
"[",
"2",
"]",
"=",
"x1",
"+",
"src",
"[",
"2",
"]",
"-",
"src",
"[",
"0",
"]",
"if",
"y2",
"is",
"None",
":",
"dest",
"[",
"3",
"]",
"=",
"y1",
"+",
"src",
"[",
"3",
"]",
"-",
"src",
"[",
"1",
"]",
"if",
"not",
"port",
":",
"port",
"=",
"Qd",
".",
"GetPort",
"(",
")",
"Qd",
".",
"CopyBits",
"(",
"self",
".",
"PixMap",
"(",
")",
",",
"port",
".",
"GetPortBitMapForCopyBits",
"(",
")",
",",
"src",
",",
"tuple",
"(",
"dest",
")",
",",
"QuickDraw",
".",
"srcCopy",
",",
"None",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/plat-mac/PixMapWrapper.py#L149-L159
|
||
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/x86/toolchain/lib/python2.7/idlelib/CodeContext.py
|
python
|
CodeContext.get_context
|
(self, new_topvisible, stopline=1, stopindent=0)
|
return lines, lastindent
|
Get context lines, starting at new_topvisible and working backwards.
Stop when stopline or stopindent is reached. Return a tuple of context
data and the indent level at the top of the region inspected.
|
Get context lines, starting at new_topvisible and working backwards.
|
[
"Get",
"context",
"lines",
"starting",
"at",
"new_topvisible",
"and",
"working",
"backwards",
"."
] |
def get_context(self, new_topvisible, stopline=1, stopindent=0):
"""Get context lines, starting at new_topvisible and working backwards.
Stop when stopline or stopindent is reached. Return a tuple of context
data and the indent level at the top of the region inspected.
"""
assert stopline > 0
lines = []
# The indentation level we are currently in:
lastindent = INFINITY
# For a line to be interesting, it must begin with a block opening
# keyword, and have less indentation than lastindent.
for linenum in xrange(new_topvisible, stopline-1, -1):
indent, text, opener = self.get_line_info(linenum)
if indent < lastindent:
lastindent = indent
if opener in ("else", "elif"):
# We also show the if statement
lastindent += 1
if opener and linenum < new_topvisible and indent >= stopindent:
lines.append((linenum, indent, text, opener))
if lastindent <= stopindent:
break
lines.reverse()
return lines, lastindent
|
[
"def",
"get_context",
"(",
"self",
",",
"new_topvisible",
",",
"stopline",
"=",
"1",
",",
"stopindent",
"=",
"0",
")",
":",
"assert",
"stopline",
">",
"0",
"lines",
"=",
"[",
"]",
"# The indentation level we are currently in:",
"lastindent",
"=",
"INFINITY",
"# For a line to be interesting, it must begin with a block opening",
"# keyword, and have less indentation than lastindent.",
"for",
"linenum",
"in",
"xrange",
"(",
"new_topvisible",
",",
"stopline",
"-",
"1",
",",
"-",
"1",
")",
":",
"indent",
",",
"text",
",",
"opener",
"=",
"self",
".",
"get_line_info",
"(",
"linenum",
")",
"if",
"indent",
"<",
"lastindent",
":",
"lastindent",
"=",
"indent",
"if",
"opener",
"in",
"(",
"\"else\"",
",",
"\"elif\"",
")",
":",
"# We also show the if statement",
"lastindent",
"+=",
"1",
"if",
"opener",
"and",
"linenum",
"<",
"new_topvisible",
"and",
"indent",
">=",
"stopindent",
":",
"lines",
".",
"append",
"(",
"(",
"linenum",
",",
"indent",
",",
"text",
",",
"opener",
")",
")",
"if",
"lastindent",
"<=",
"stopindent",
":",
"break",
"lines",
".",
"reverse",
"(",
")",
"return",
"lines",
",",
"lastindent"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/idlelib/CodeContext.py#L107-L132
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/scipy/py3/scipy/ndimage/filters.py
|
python
|
minimum_filter
|
(input, size=None, footprint=None, output=None,
mode="reflect", cval=0.0, origin=0)
|
return _min_or_max_filter(input, size, footprint, None, output, mode,
cval, origin, 1)
|
Calculate a multi-dimensional minimum filter.
Parameters
----------
%(input)s
%(size_foot)s
%(output)s
%(mode_multiple)s
%(cval)s
%(origin_multiple)s
Returns
-------
minimum_filter : ndarray
Filtered array. Has the same shape as `input`.
Examples
--------
>>> from scipy import ndimage, misc
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> plt.gray() # show the filtered result in grayscale
>>> ax1 = fig.add_subplot(121) # left side
>>> ax2 = fig.add_subplot(122) # right side
>>> ascent = misc.ascent()
>>> result = ndimage.minimum_filter(ascent, size=20)
>>> ax1.imshow(ascent)
>>> ax2.imshow(result)
>>> plt.show()
|
Calculate a multi-dimensional minimum filter.
|
[
"Calculate",
"a",
"multi",
"-",
"dimensional",
"minimum",
"filter",
"."
] |
def minimum_filter(input, size=None, footprint=None, output=None,
mode="reflect", cval=0.0, origin=0):
"""Calculate a multi-dimensional minimum filter.
Parameters
----------
%(input)s
%(size_foot)s
%(output)s
%(mode_multiple)s
%(cval)s
%(origin_multiple)s
Returns
-------
minimum_filter : ndarray
Filtered array. Has the same shape as `input`.
Examples
--------
>>> from scipy import ndimage, misc
>>> import matplotlib.pyplot as plt
>>> fig = plt.figure()
>>> plt.gray() # show the filtered result in grayscale
>>> ax1 = fig.add_subplot(121) # left side
>>> ax2 = fig.add_subplot(122) # right side
>>> ascent = misc.ascent()
>>> result = ndimage.minimum_filter(ascent, size=20)
>>> ax1.imshow(ascent)
>>> ax2.imshow(result)
>>> plt.show()
"""
return _min_or_max_filter(input, size, footprint, None, output, mode,
cval, origin, 1)
|
[
"def",
"minimum_filter",
"(",
"input",
",",
"size",
"=",
"None",
",",
"footprint",
"=",
"None",
",",
"output",
"=",
"None",
",",
"mode",
"=",
"\"reflect\"",
",",
"cval",
"=",
"0.0",
",",
"origin",
"=",
"0",
")",
":",
"return",
"_min_or_max_filter",
"(",
"input",
",",
"size",
",",
"footprint",
",",
"None",
",",
"output",
",",
"mode",
",",
"cval",
",",
"origin",
",",
"1",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/ndimage/filters.py#L1027-L1060
|
|
ceph/ceph
|
959663007321a369c83218414a29bd9dbc8bda3a
|
qa/tasks/qemu.py
|
python
|
task
|
(ctx, config)
|
Run a test inside of QEMU on top of rbd. Only one test
is supported per client.
For example, you can specify which clients to run on::
tasks:
- ceph:
- qemu:
client.0:
test: http://download.ceph.com/qa/test.sh
client.1:
test: http://download.ceph.com/qa/test2.sh
Or use the same settings on all clients:
tasks:
- ceph:
- qemu:
all:
test: http://download.ceph.com/qa/test.sh
For tests that want to explicitly describe the RBD images to connect:
tasks:
- ceph:
- qemu:
client.0:
test: http://download.ceph.com/qa/test.sh
clone: True/False (optionally clone all created disks),
image_url: <URL> (optional default image URL)
type: filesystem / block (optional default device type)
disks: [
{
action: create / clone / none (optional, defaults to create)
image_name: <image name> (optional)
parent_name: <parent_name> (if action == clone),
type: filesystem / block (optional, defaults to fileystem)
image_url: <URL> (optional),
image_size: <MiB> (optional)
encryption_format: luks1 / luks2 / none (optional, defaults to none)
}, ...
]
You can set the amount of CPUs and memory the VM has (default is 1 CPU and
4096 MB)::
tasks:
- ceph:
- qemu:
client.0:
test: http://download.ceph.com/qa/test.sh
cpus: 4
memory: 512 # megabytes
If you need to configure additional cloud-config options, set cloud_config
to the required data set::
tasks:
- ceph
- qemu:
client.0:
test: http://ceph.com/qa/test.sh
cloud_config_archive:
- |
#/bin/bash
touch foo1
- content: |
test data
type: text/plain
filename: /tmp/data
|
Run a test inside of QEMU on top of rbd. Only one test
is supported per client.
|
[
"Run",
"a",
"test",
"inside",
"of",
"QEMU",
"on",
"top",
"of",
"rbd",
".",
"Only",
"one",
"test",
"is",
"supported",
"per",
"client",
"."
] |
def task(ctx, config):
"""
Run a test inside of QEMU on top of rbd. Only one test
is supported per client.
For example, you can specify which clients to run on::
tasks:
- ceph:
- qemu:
client.0:
test: http://download.ceph.com/qa/test.sh
client.1:
test: http://download.ceph.com/qa/test2.sh
Or use the same settings on all clients:
tasks:
- ceph:
- qemu:
all:
test: http://download.ceph.com/qa/test.sh
For tests that want to explicitly describe the RBD images to connect:
tasks:
- ceph:
- qemu:
client.0:
test: http://download.ceph.com/qa/test.sh
clone: True/False (optionally clone all created disks),
image_url: <URL> (optional default image URL)
type: filesystem / block (optional default device type)
disks: [
{
action: create / clone / none (optional, defaults to create)
image_name: <image name> (optional)
parent_name: <parent_name> (if action == clone),
type: filesystem / block (optional, defaults to fileystem)
image_url: <URL> (optional),
image_size: <MiB> (optional)
encryption_format: luks1 / luks2 / none (optional, defaults to none)
}, ...
]
You can set the amount of CPUs and memory the VM has (default is 1 CPU and
4096 MB)::
tasks:
- ceph:
- qemu:
client.0:
test: http://download.ceph.com/qa/test.sh
cpus: 4
memory: 512 # megabytes
If you need to configure additional cloud-config options, set cloud_config
to the required data set::
tasks:
- ceph
- qemu:
client.0:
test: http://ceph.com/qa/test.sh
cloud_config_archive:
- |
#/bin/bash
touch foo1
- content: |
test data
type: text/plain
filename: /tmp/data
"""
assert isinstance(config, dict), \
"task qemu only supports a dictionary for configuration"
config = teuthology.replace_all_with_clients(ctx.cluster, config)
normalize_disks(config)
managers = []
create_images(ctx=ctx, config=config, managers=managers)
managers.extend([
lambda: create_dirs(ctx=ctx, config=config),
lambda: generate_iso(ctx=ctx, config=config),
lambda: download_image(ctx=ctx, config=config),
])
create_clones(ctx=ctx, config=config, managers=managers)
create_encrypted_devices(ctx=ctx, config=config, managers=managers)
managers.append(
lambda: run_qemu(ctx=ctx, config=config),
)
with contextutil.nested(*managers):
yield
|
[
"def",
"task",
"(",
"ctx",
",",
"config",
")",
":",
"assert",
"isinstance",
"(",
"config",
",",
"dict",
")",
",",
"\"task qemu only supports a dictionary for configuration\"",
"config",
"=",
"teuthology",
".",
"replace_all_with_clients",
"(",
"ctx",
".",
"cluster",
",",
"config",
")",
"normalize_disks",
"(",
"config",
")",
"managers",
"=",
"[",
"]",
"create_images",
"(",
"ctx",
"=",
"ctx",
",",
"config",
"=",
"config",
",",
"managers",
"=",
"managers",
")",
"managers",
".",
"extend",
"(",
"[",
"lambda",
":",
"create_dirs",
"(",
"ctx",
"=",
"ctx",
",",
"config",
"=",
"config",
")",
",",
"lambda",
":",
"generate_iso",
"(",
"ctx",
"=",
"ctx",
",",
"config",
"=",
"config",
")",
",",
"lambda",
":",
"download_image",
"(",
"ctx",
"=",
"ctx",
",",
"config",
"=",
"config",
")",
",",
"]",
")",
"create_clones",
"(",
"ctx",
"=",
"ctx",
",",
"config",
"=",
"config",
",",
"managers",
"=",
"managers",
")",
"create_encrypted_devices",
"(",
"ctx",
"=",
"ctx",
",",
"config",
"=",
"config",
",",
"managers",
"=",
"managers",
")",
"managers",
".",
"append",
"(",
"lambda",
":",
"run_qemu",
"(",
"ctx",
"=",
"ctx",
",",
"config",
"=",
"config",
")",
",",
")",
"with",
"contextutil",
".",
"nested",
"(",
"*",
"managers",
")",
":",
"yield"
] |
https://github.com/ceph/ceph/blob/959663007321a369c83218414a29bd9dbc8bda3a/qa/tasks/qemu.py#L580-L673
|
||
qt/qt
|
0a2f2382541424726168804be2c90b91381608c6
|
src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/xcodeproj_file.py
|
python
|
XCObject.Comment
|
(self)
|
return self.Name()
|
Return a comment string for the object.
Most objects just use their name as the comment, but PBXProject uses
different values.
The returned comment is not escaped and does not have any comment marker
strings applied to it.
|
Return a comment string for the object.
|
[
"Return",
"a",
"comment",
"string",
"for",
"the",
"object",
"."
] |
def Comment(self):
"""Return a comment string for the object.
Most objects just use their name as the comment, but PBXProject uses
different values.
The returned comment is not escaped and does not have any comment marker
strings applied to it.
"""
return self.Name()
|
[
"def",
"Comment",
"(",
"self",
")",
":",
"return",
"self",
".",
"Name",
"(",
")"
] |
https://github.com/qt/qt/blob/0a2f2382541424726168804be2c90b91381608c6/src/3rdparty/webkit/Source/ThirdParty/gyp/pylib/gyp/xcodeproj_file.py#L374-L384
|
|
pybox2d/pybox2d
|
09643321fd363f0850087d1bde8af3f4afd82163
|
library/Box2D/examples/backends/pyglet_framework.py
|
python
|
PygletWindow.on_mouse_drag
|
(self, x, y, dx, dy, buttons, modifiers)
|
Mouse moved while clicking
|
Mouse moved while clicking
|
[
"Mouse",
"moved",
"while",
"clicking"
] |
def on_mouse_drag(self, x, y, dx, dy, buttons, modifiers):
"""
Mouse moved while clicking
"""
p = self.test.ConvertScreenToWorld(x, y)
self.test.mouseWorld = p
self.test.MouseMove(p)
if buttons & pyglet.window.mouse.RIGHT:
self.test.viewCenter -= (float(dx) / 5, float(dy) / 5)
|
[
"def",
"on_mouse_drag",
"(",
"self",
",",
"x",
",",
"y",
",",
"dx",
",",
"dy",
",",
"buttons",
",",
"modifiers",
")",
":",
"p",
"=",
"self",
".",
"test",
".",
"ConvertScreenToWorld",
"(",
"x",
",",
"y",
")",
"self",
".",
"test",
".",
"mouseWorld",
"=",
"p",
"self",
".",
"test",
".",
"MouseMove",
"(",
"p",
")",
"if",
"buttons",
"&",
"pyglet",
".",
"window",
".",
"mouse",
".",
"RIGHT",
":",
"self",
".",
"test",
".",
"viewCenter",
"-=",
"(",
"float",
"(",
"dx",
")",
"/",
"5",
",",
"float",
"(",
"dy",
")",
"/",
"5",
")"
] |
https://github.com/pybox2d/pybox2d/blob/09643321fd363f0850087d1bde8af3f4afd82163/library/Box2D/examples/backends/pyglet_framework.py#L419-L429
|
||
f4exb/sdrangel
|
fce235b2bc59b932f93d2cb8784055d51b3b8424
|
scriptsapi/freqtracking.py
|
python
|
get_device_frequency
|
(sdrangel_ip, sdrangel_port, device_index)
|
return device_frequency
|
Obtain the device center frequency from either the settings or
the report
|
Obtain the device center frequency from either the settings or
the report
|
[
"Obtain",
"the",
"device",
"center",
"frequency",
"from",
"either",
"the",
"settings",
"or",
"the",
"report"
] |
def get_device_frequency(sdrangel_ip, sdrangel_port, device_index):
""" Obtain the device center frequency from either the settings or
the report
"""
# ----------------------------------------------------------------------
base_url = f'http://{sdrangel_ip}:{sdrangel_port}/sdrangel'
device_frequency = None
# get frequency from settings
r = requests.get(url=base_url + f'/deviceset/{device_index}/device/settings')
if r.status_code // 100 == 2:
device_content = r.json()
for freq in gen_dict_extract('centerFrequency', device_content):
device_frequency = freq
# get frequency from report
if device_frequency is None:
r = requests.get(url=base_url + f'/deviceset/{device_index}/device/report')
if r.status_code // 100 != 2:
return None
device_content = r.json()
for freq in gen_dict_extract('centerFrequency', device_content):
device_frequency = freq
return device_frequency
|
[
"def",
"get_device_frequency",
"(",
"sdrangel_ip",
",",
"sdrangel_port",
",",
"device_index",
")",
":",
"# ----------------------------------------------------------------------",
"base_url",
"=",
"f'http://{sdrangel_ip}:{sdrangel_port}/sdrangel'",
"device_frequency",
"=",
"None",
"# get frequency from settings",
"r",
"=",
"requests",
".",
"get",
"(",
"url",
"=",
"base_url",
"+",
"f'/deviceset/{device_index}/device/settings'",
")",
"if",
"r",
".",
"status_code",
"//",
"100",
"==",
"2",
":",
"device_content",
"=",
"r",
".",
"json",
"(",
")",
"for",
"freq",
"in",
"gen_dict_extract",
"(",
"'centerFrequency'",
",",
"device_content",
")",
":",
"device_frequency",
"=",
"freq",
"# get frequency from report",
"if",
"device_frequency",
"is",
"None",
":",
"r",
"=",
"requests",
".",
"get",
"(",
"url",
"=",
"base_url",
"+",
"f'/deviceset/{device_index}/device/report'",
")",
"if",
"r",
".",
"status_code",
"//",
"100",
"!=",
"2",
":",
"return",
"None",
"device_content",
"=",
"r",
".",
"json",
"(",
")",
"for",
"freq",
"in",
"gen_dict_extract",
"(",
"'centerFrequency'",
",",
"device_content",
")",
":",
"device_frequency",
"=",
"freq",
"return",
"device_frequency"
] |
https://github.com/f4exb/sdrangel/blob/fce235b2bc59b932f93d2cb8784055d51b3b8424/scriptsapi/freqtracking.py#L101-L122
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/pbr/util.py
|
python
|
resolve_name
|
(name)
|
return ret
|
Resolve a name like ``module.object`` to an object and return it.
Raise ImportError if the module or name is not found.
|
Resolve a name like ``module.object`` to an object and return it.
|
[
"Resolve",
"a",
"name",
"like",
"module",
".",
"object",
"to",
"an",
"object",
"and",
"return",
"it",
"."
] |
def resolve_name(name):
"""Resolve a name like ``module.object`` to an object and return it.
Raise ImportError if the module or name is not found.
"""
parts = name.split('.')
cursor = len(parts) - 1
module_name = parts[:cursor]
attr_name = parts[-1]
while cursor > 0:
try:
ret = __import__('.'.join(module_name), fromlist=[attr_name])
break
except ImportError:
if cursor == 0:
raise
cursor -= 1
module_name = parts[:cursor]
attr_name = parts[cursor]
ret = ''
for part in parts[cursor:]:
try:
ret = getattr(ret, part)
except AttributeError:
raise ImportError(name)
return ret
|
[
"def",
"resolve_name",
"(",
"name",
")",
":",
"parts",
"=",
"name",
".",
"split",
"(",
"'.'",
")",
"cursor",
"=",
"len",
"(",
"parts",
")",
"-",
"1",
"module_name",
"=",
"parts",
"[",
":",
"cursor",
"]",
"attr_name",
"=",
"parts",
"[",
"-",
"1",
"]",
"while",
"cursor",
">",
"0",
":",
"try",
":",
"ret",
"=",
"__import__",
"(",
"'.'",
".",
"join",
"(",
"module_name",
")",
",",
"fromlist",
"=",
"[",
"attr_name",
"]",
")",
"break",
"except",
"ImportError",
":",
"if",
"cursor",
"==",
"0",
":",
"raise",
"cursor",
"-=",
"1",
"module_name",
"=",
"parts",
"[",
":",
"cursor",
"]",
"attr_name",
"=",
"parts",
"[",
"cursor",
"]",
"ret",
"=",
"''",
"for",
"part",
"in",
"parts",
"[",
"cursor",
":",
"]",
":",
"try",
":",
"ret",
"=",
"getattr",
"(",
"ret",
",",
"part",
")",
"except",
"AttributeError",
":",
"raise",
"ImportError",
"(",
"name",
")",
"return",
"ret"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/pbr/util.py#L163-L192
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.