nwo
stringlengths 5
86
| sha
stringlengths 40
40
| path
stringlengths 4
189
| language
stringclasses 1
value | identifier
stringlengths 1
94
| parameters
stringlengths 2
4.03k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
11.5k
| docstring
stringlengths 1
33.2k
| docstring_summary
stringlengths 0
5.15k
| docstring_tokens
list | function
stringlengths 34
151k
| function_tokens
list | url
stringlengths 90
278
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
vmtk/vmtk
|
927331ad752265199390eabbbf2e07cdc2b4bcc6
|
vtkVmtk/Utilities/Stellar_1.0/meshconvert.py
|
python
|
writeOFF
|
(verts, tris, outFileName)
|
write out an OFF file from a list of vertices and triangles
|
write out an OFF file from a list of vertices and triangles
|
[
"write",
"out",
"an",
"OFF",
"file",
"from",
"a",
"list",
"of",
"vertices",
"and",
"triangles"
] |
def writeOFF(verts, tris, outFileName):
"""write out an OFF file from a list of vertices and triangles"""
outFileName += '.off'
outfile = open(outFileName, 'w')
# write header lines
outfile.write('OFF\n')
outfile.write('%d %d 0\n' % (len(verts), len(tris)))
for vert in verts:
#print "vert:", vert
outfile.write('%g %g %g\n' % (vert[0], vert[1], vert[2]))
for tri in tris:
outfile.write('3 %d %d %d\n' % (tri[0], tri[1], tri[2]))
outfile.close()
|
[
"def",
"writeOFF",
"(",
"verts",
",",
"tris",
",",
"outFileName",
")",
":",
"outFileName",
"+=",
"'.off'",
"outfile",
"=",
"open",
"(",
"outFileName",
",",
"'w'",
")",
"# write header lines",
"outfile",
".",
"write",
"(",
"'OFF\\n'",
")",
"outfile",
".",
"write",
"(",
"'%d %d 0\\n'",
"%",
"(",
"len",
"(",
"verts",
")",
",",
"len",
"(",
"tris",
")",
")",
")",
"for",
"vert",
"in",
"verts",
":",
"#print \"vert:\", vert",
"outfile",
".",
"write",
"(",
"'%g %g %g\\n'",
"%",
"(",
"vert",
"[",
"0",
"]",
",",
"vert",
"[",
"1",
"]",
",",
"vert",
"[",
"2",
"]",
")",
")",
"for",
"tri",
"in",
"tris",
":",
"outfile",
".",
"write",
"(",
"'3 %d %d %d\\n'",
"%",
"(",
"tri",
"[",
"0",
"]",
",",
"tri",
"[",
"1",
"]",
",",
"tri",
"[",
"2",
"]",
")",
")",
"outfile",
".",
"close",
"(",
")"
] |
https://github.com/vmtk/vmtk/blob/927331ad752265199390eabbbf2e07cdc2b4bcc6/vtkVmtk/Utilities/Stellar_1.0/meshconvert.py#L751-L766
|
||
limbo018/DREAMPlace
|
146c3b9fd003d1acd52c96d9fd02e3f0a05154e4
|
dreamplace/ops/dct/discrete_spectral_transform.py
|
python
|
idct_2N
|
(x, expk=None)
|
return y
|
Batch Inverse Discrete Cosine Transformation without normalization to coefficients.
Compute y_u = \sum_i x_i cos(pi*(2u+1)*i/(2N)),
Impelements the 2N padding trick to solve IDCT with IFFT in the following link,
https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/python/ops/spectral_ops.py
1. Multiply by 2*exp(1j*pi*u/(2N))
2. Pad x by zeros
3. Perform IFFT
4. Extract the real part
|
Batch Inverse Discrete Cosine Transformation without normalization to coefficients.
Compute y_u = \sum_i x_i cos(pi*(2u+1)*i/(2N)),
Impelements the 2N padding trick to solve IDCT with IFFT in the following link,
https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/python/ops/spectral_ops.py
|
[
"Batch",
"Inverse",
"Discrete",
"Cosine",
"Transformation",
"without",
"normalization",
"to",
"coefficients",
".",
"Compute",
"y_u",
"=",
"\\",
"sum_i",
"x_i",
"cos",
"(",
"pi",
"*",
"(",
"2u",
"+",
"1",
")",
"*",
"i",
"/",
"(",
"2N",
"))",
"Impelements",
"the",
"2N",
"padding",
"trick",
"to",
"solve",
"IDCT",
"with",
"IFFT",
"in",
"the",
"following",
"link",
"https",
":",
"//",
"github",
".",
"com",
"/",
"tensorflow",
"/",
"tensorflow",
"/",
"blob",
"/",
"r1",
".",
"10",
"/",
"tensorflow",
"/",
"python",
"/",
"ops",
"/",
"spectral_ops",
".",
"py"
] |
def idct_2N(x, expk=None):
""" Batch Inverse Discrete Cosine Transformation without normalization to coefficients.
Compute y_u = \sum_i x_i cos(pi*(2u+1)*i/(2N)),
Impelements the 2N padding trick to solve IDCT with IFFT in the following link,
https://github.com/tensorflow/tensorflow/blob/r1.10/tensorflow/python/ops/spectral_ops.py
1. Multiply by 2*exp(1j*pi*u/(2N))
2. Pad x by zeros
3. Perform IFFT
4. Extract the real part
"""
# last dimension
N = x.size(-1)
if expk is None:
expk = get_expk(N, dtype=x.dtype, device=x.device)
# multiply by 2*exp(1j*pi*u/(2N))
x_pad = x.unsqueeze(-1).mul(expk)
# pad second last dimension, excluding the complex number dimension
x_pad = F.pad(x_pad, (0, 0, 0, N), 'constant', 0)
if len(x.size()) == 1:
x_pad.unsqueeze_(0)
# the last dimension here becomes -2 because complex numbers introduce a new dimension
y = torch_fft_api.irfft(x_pad, signal_ndim=1, normalized=False, onesided=False, signal_sizes=[2*N])[..., 0:N]
y.mul_(N)
if len(x.size()) == 1:
y.squeeze_(0)
return y
|
[
"def",
"idct_2N",
"(",
"x",
",",
"expk",
"=",
"None",
")",
":",
"# last dimension",
"N",
"=",
"x",
".",
"size",
"(",
"-",
"1",
")",
"if",
"expk",
"is",
"None",
":",
"expk",
"=",
"get_expk",
"(",
"N",
",",
"dtype",
"=",
"x",
".",
"dtype",
",",
"device",
"=",
"x",
".",
"device",
")",
"# multiply by 2*exp(1j*pi*u/(2N))",
"x_pad",
"=",
"x",
".",
"unsqueeze",
"(",
"-",
"1",
")",
".",
"mul",
"(",
"expk",
")",
"# pad second last dimension, excluding the complex number dimension",
"x_pad",
"=",
"F",
".",
"pad",
"(",
"x_pad",
",",
"(",
"0",
",",
"0",
",",
"0",
",",
"N",
")",
",",
"'constant'",
",",
"0",
")",
"if",
"len",
"(",
"x",
".",
"size",
"(",
")",
")",
"==",
"1",
":",
"x_pad",
".",
"unsqueeze_",
"(",
"0",
")",
"# the last dimension here becomes -2 because complex numbers introduce a new dimension",
"y",
"=",
"torch_fft_api",
".",
"irfft",
"(",
"x_pad",
",",
"signal_ndim",
"=",
"1",
",",
"normalized",
"=",
"False",
",",
"onesided",
"=",
"False",
",",
"signal_sizes",
"=",
"[",
"2",
"*",
"N",
"]",
")",
"[",
"...",
",",
"0",
":",
"N",
"]",
"y",
".",
"mul_",
"(",
"N",
")",
"if",
"len",
"(",
"x",
".",
"size",
"(",
")",
")",
"==",
"1",
":",
"y",
".",
"squeeze_",
"(",
"0",
")",
"return",
"y"
] |
https://github.com/limbo018/DREAMPlace/blob/146c3b9fd003d1acd52c96d9fd02e3f0a05154e4/dreamplace/ops/dct/discrete_spectral_transform.py#L153-L185
|
|
generalized-intelligence/GAAS
|
29ab17d3e8a4ba18edef3a57c36d8db6329fac73
|
algorithms/src/LocalizationAndMapping/registration_localization/fast_gicp/thirdparty/Sophus/py/sophus/se3.py
|
python
|
Se3.__getitem__
|
(self, key)
|
We use the following convention [q0, q1, q2, q3, t0, t1, t2]
|
We use the following convention [q0, q1, q2, q3, t0, t1, t2]
|
[
"We",
"use",
"the",
"following",
"convention",
"[",
"q0",
"q1",
"q2",
"q3",
"t0",
"t1",
"t2",
"]"
] |
def __getitem__(self, key):
""" We use the following convention [q0, q1, q2, q3, t0, t1, t2] """
assert (key >= 0 and key < 7)
if key < 4:
return self.so3[key]
else:
return self.t[key - 4]
|
[
"def",
"__getitem__",
"(",
"self",
",",
"key",
")",
":",
"assert",
"(",
"key",
">=",
"0",
"and",
"key",
"<",
"7",
")",
"if",
"key",
"<",
"4",
":",
"return",
"self",
".",
"so3",
"[",
"key",
"]",
"else",
":",
"return",
"self",
".",
"t",
"[",
"key",
"-",
"4",
"]"
] |
https://github.com/generalized-intelligence/GAAS/blob/29ab17d3e8a4ba18edef3a57c36d8db6329fac73/algorithms/src/LocalizationAndMapping/registration_localization/fast_gicp/thirdparty/Sophus/py/sophus/se3.py#L96-L102
|
||
VAR-solutions/Algorithms
|
4ad6773e9675ef35aa858ca3969be5ddf6e3daea
|
LinkedList/LinkedListModule.py
|
python
|
LinkedList.getNodeCount
|
(self,value)
|
return NodeCount
|
returns the number of nodes containing a particular value
|
returns the number of nodes containing a particular value
|
[
"returns",
"the",
"number",
"of",
"nodes",
"containing",
"a",
"particular",
"value"
] |
def getNodeCount(self,value):
"""returns the number of nodes containing a particular value"""
CurrentNode = self.head
NodeCount = 0
while CurrentNode is not None:
if CurrentNode.data == value:
NodeCount += 1
CurrentNode = CurrentNode.next
return NodeCount
|
[
"def",
"getNodeCount",
"(",
"self",
",",
"value",
")",
":",
"CurrentNode",
"=",
"self",
".",
"head",
"NodeCount",
"=",
"0",
"while",
"CurrentNode",
"is",
"not",
"None",
":",
"if",
"CurrentNode",
".",
"data",
"==",
"value",
":",
"NodeCount",
"+=",
"1",
"CurrentNode",
"=",
"CurrentNode",
".",
"next",
"return",
"NodeCount"
] |
https://github.com/VAR-solutions/Algorithms/blob/4ad6773e9675ef35aa858ca3969be5ddf6e3daea/LinkedList/LinkedListModule.py#L147-L158
|
|
Polidea/SiriusObfuscator
|
b0e590d8130e97856afe578869b83a209e2b19be
|
SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py
|
python
|
SBSymbolContext.SetFunction
|
(self, *args)
|
return _lldb.SBSymbolContext_SetFunction(self, *args)
|
SetFunction(self, SBFunction function)
|
SetFunction(self, SBFunction function)
|
[
"SetFunction",
"(",
"self",
"SBFunction",
"function",
")"
] |
def SetFunction(self, *args):
"""SetFunction(self, SBFunction function)"""
return _lldb.SBSymbolContext_SetFunction(self, *args)
|
[
"def",
"SetFunction",
"(",
"self",
",",
"*",
"args",
")",
":",
"return",
"_lldb",
".",
"SBSymbolContext_SetFunction",
"(",
"self",
",",
"*",
"args",
")"
] |
https://github.com/Polidea/SiriusObfuscator/blob/b0e590d8130e97856afe578869b83a209e2b19be/SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py#L8292-L8294
|
|
swift/swift
|
12d031cf8177fdec0137f9aa7e2912fa23c4416b
|
3rdParty/SCons/scons-3.0.1/engine/SCons/Tool/lex.py
|
python
|
generate
|
(env)
|
Add Builders and construction variables for lex to an Environment.
|
Add Builders and construction variables for lex to an Environment.
|
[
"Add",
"Builders",
"and",
"construction",
"variables",
"for",
"lex",
"to",
"an",
"Environment",
"."
] |
def generate(env):
"""Add Builders and construction variables for lex to an Environment."""
c_file, cxx_file = SCons.Tool.createCFileBuilders(env)
# C
c_file.add_action(".l", LexAction)
c_file.add_emitter(".l", lexEmitter)
c_file.add_action(".lex", LexAction)
c_file.add_emitter(".lex", lexEmitter)
# Objective-C
cxx_file.add_action(".lm", LexAction)
cxx_file.add_emitter(".lm", lexEmitter)
# C++
cxx_file.add_action(".ll", LexAction)
cxx_file.add_emitter(".ll", lexEmitter)
env["LEX"] = env.Detect("flex") or "lex"
env["LEXFLAGS"] = SCons.Util.CLVar("")
env["LEXCOM"] = "$LEX $LEXFLAGS -t $SOURCES > $TARGET"
|
[
"def",
"generate",
"(",
"env",
")",
":",
"c_file",
",",
"cxx_file",
"=",
"SCons",
".",
"Tool",
".",
"createCFileBuilders",
"(",
"env",
")",
"# C",
"c_file",
".",
"add_action",
"(",
"\".l\"",
",",
"LexAction",
")",
"c_file",
".",
"add_emitter",
"(",
"\".l\"",
",",
"lexEmitter",
")",
"c_file",
".",
"add_action",
"(",
"\".lex\"",
",",
"LexAction",
")",
"c_file",
".",
"add_emitter",
"(",
"\".lex\"",
",",
"lexEmitter",
")",
"# Objective-C",
"cxx_file",
".",
"add_action",
"(",
"\".lm\"",
",",
"LexAction",
")",
"cxx_file",
".",
"add_emitter",
"(",
"\".lm\"",
",",
"lexEmitter",
")",
"# C++",
"cxx_file",
".",
"add_action",
"(",
"\".ll\"",
",",
"LexAction",
")",
"cxx_file",
".",
"add_emitter",
"(",
"\".ll\"",
",",
"lexEmitter",
")",
"env",
"[",
"\"LEX\"",
"]",
"=",
"env",
".",
"Detect",
"(",
"\"flex\"",
")",
"or",
"\"lex\"",
"env",
"[",
"\"LEXFLAGS\"",
"]",
"=",
"SCons",
".",
"Util",
".",
"CLVar",
"(",
"\"\"",
")",
"env",
"[",
"\"LEXCOM\"",
"]",
"=",
"\"$LEX $LEXFLAGS -t $SOURCES > $TARGET\""
] |
https://github.com/swift/swift/blob/12d031cf8177fdec0137f9aa7e2912fa23c4416b/3rdParty/SCons/scons-3.0.1/engine/SCons/Tool/lex.py#L67-L88
|
||
apple/turicreate
|
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
|
deps/src/libxml2-2.9.1/python/libxml2class.py
|
python
|
xmlTextReader.BaseUri
|
(self)
|
return ret
|
The base URI of the node.
|
The base URI of the node.
|
[
"The",
"base",
"URI",
"of",
"the",
"node",
"."
] |
def BaseUri(self):
"""The base URI of the node. """
ret = libxml2mod.xmlTextReaderConstBaseUri(self._o)
return ret
|
[
"def",
"BaseUri",
"(",
"self",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlTextReaderConstBaseUri",
"(",
"self",
".",
"_o",
")",
"return",
"ret"
] |
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2class.py#L5748-L5751
|
|
qgis/QGIS
|
15a77662d4bb712184f6aa60d0bd663010a76a75
|
python/utils.py
|
python
|
loadPlugin
|
(packageName: str)
|
load plugin's package
|
load plugin's package
|
[
"load",
"plugin",
"s",
"package"
] |
def loadPlugin(packageName: str) -> bool:
""" load plugin's package """
try:
__import__(packageName)
return True
except:
pass # continue...
# snake in the grass, we know it's there
sys.path_importer_cache.clear()
# retry
try:
__import__(packageName)
return True
except:
msg = QCoreApplication.translate("Python", "Couldn't load plugin '{0}'").format(packageName)
showException(sys.exc_info()[0], sys.exc_info()[1], sys.exc_info()[2], msg, messagebar=True, level=Qgis.Critical)
return False
|
[
"def",
"loadPlugin",
"(",
"packageName",
":",
"str",
")",
"->",
"bool",
":",
"try",
":",
"__import__",
"(",
"packageName",
")",
"return",
"True",
"except",
":",
"pass",
"# continue...",
"# snake in the grass, we know it's there",
"sys",
".",
"path_importer_cache",
".",
"clear",
"(",
")",
"# retry",
"try",
":",
"__import__",
"(",
"packageName",
")",
"return",
"True",
"except",
":",
"msg",
"=",
"QCoreApplication",
".",
"translate",
"(",
"\"Python\"",
",",
"\"Couldn't load plugin '{0}'\"",
")",
".",
"format",
"(",
"packageName",
")",
"showException",
"(",
"sys",
".",
"exc_info",
"(",
")",
"[",
"0",
"]",
",",
"sys",
".",
"exc_info",
"(",
")",
"[",
"1",
"]",
",",
"sys",
".",
"exc_info",
"(",
")",
"[",
"2",
"]",
",",
"msg",
",",
"messagebar",
"=",
"True",
",",
"level",
"=",
"Qgis",
".",
"Critical",
")",
"return",
"False"
] |
https://github.com/qgis/QGIS/blob/15a77662d4bb712184f6aa60d0bd663010a76a75/python/utils.py#L387-L406
|
||
chromiumembedded/cef
|
80caf947f3fe2210e5344713c5281d8af9bdc295
|
tools/cef_parser.py
|
python
|
obj_header.get_class
|
(self, classname, defined_structs=None)
|
return None
|
Return the specified class or None if not found.
|
Return the specified class or None if not found.
|
[
"Return",
"the",
"specified",
"class",
"or",
"None",
"if",
"not",
"found",
"."
] |
def get_class(self, classname, defined_structs=None):
""" Return the specified class or None if not found. """
for cls in self.classes:
if cls.get_name() == classname:
return cls
elif not defined_structs is None:
defined_structs.append(cls.get_capi_name())
return None
|
[
"def",
"get_class",
"(",
"self",
",",
"classname",
",",
"defined_structs",
"=",
"None",
")",
":",
"for",
"cls",
"in",
"self",
".",
"classes",
":",
"if",
"cls",
".",
"get_name",
"(",
")",
"==",
"classname",
":",
"return",
"cls",
"elif",
"not",
"defined_structs",
"is",
"None",
":",
"defined_structs",
".",
"append",
"(",
"cls",
".",
"get_capi_name",
"(",
")",
")",
"return",
"None"
] |
https://github.com/chromiumembedded/cef/blob/80caf947f3fe2210e5344713c5281d8af9bdc295/tools/cef_parser.py#L718-L725
|
|
SequoiaDB/SequoiaDB
|
2894ed7e5bd6fe57330afc900cf76d0ff0df9f64
|
tools/server/php_linux/libxml2/lib/python2.4/site-packages/libxml2.py
|
python
|
newCatalog
|
(sgml)
|
return catalog(_obj=ret)
|
create a new Catalog.
|
create a new Catalog.
|
[
"create",
"a",
"new",
"Catalog",
"."
] |
def newCatalog(sgml):
"""create a new Catalog. """
ret = libxml2mod.xmlNewCatalog(sgml)
if ret is None:raise treeError('xmlNewCatalog() failed')
return catalog(_obj=ret)
|
[
"def",
"newCatalog",
"(",
"sgml",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlNewCatalog",
"(",
"sgml",
")",
"if",
"ret",
"is",
"None",
":",
"raise",
"treeError",
"(",
"'xmlNewCatalog() failed'",
")",
"return",
"catalog",
"(",
"_obj",
"=",
"ret",
")"
] |
https://github.com/SequoiaDB/SequoiaDB/blob/2894ed7e5bd6fe57330afc900cf76d0ff0df9f64/tools/server/php_linux/libxml2/lib/python2.4/site-packages/libxml2.py#L954-L958
|
|
lukasmonk/lucaschess
|
13e2e5cb13b38a720ccf897af649054a64bcb914
|
Code/QT/Grid.py
|
python
|
Grid.recno
|
(self)
|
return n if n <= nX else nX
|
Devuelve la fila actual.
|
Devuelve la fila actual.
|
[
"Devuelve",
"la",
"fila",
"actual",
"."
] |
def recno(self):
"""
Devuelve la fila actual.
"""
n = self.currentIndex().row()
nX = self.cg.numDatos - 1
return n if n <= nX else nX
|
[
"def",
"recno",
"(",
"self",
")",
":",
"n",
"=",
"self",
".",
"currentIndex",
"(",
")",
".",
"row",
"(",
")",
"nX",
"=",
"self",
".",
"cg",
".",
"numDatos",
"-",
"1",
"return",
"n",
"if",
"n",
"<=",
"nX",
"else",
"nX"
] |
https://github.com/lukasmonk/lucaschess/blob/13e2e5cb13b38a720ccf897af649054a64bcb914/Code/QT/Grid.py#L441-L447
|
|
y123456yz/reading-and-annotate-mongodb-3.6
|
93280293672ca7586dc24af18132aa61e4ed7fcf
|
mongo/buildscripts/idl/idl/errors.py
|
python
|
ParserError.__str__
|
(self)
|
return msg
|
Return a formatted error.
Example error message:
test.idl: (17, 4): ID0008: Unknown IDL node 'cpp_namespac' for YAML entity 'global'.
|
Return a formatted error.
|
[
"Return",
"a",
"formatted",
"error",
"."
] |
def __str__(self):
# type: () -> str
"""
Return a formatted error.
Example error message:
test.idl: (17, 4): ID0008: Unknown IDL node 'cpp_namespac' for YAML entity 'global'.
"""
msg = "%s: (%d, %d): %s: %s" % (os.path.basename(self.file_name), self.line, self.column,
self.error_id, self.msg)
return msg
|
[
"def",
"__str__",
"(",
"self",
")",
":",
"# type: () -> str",
"msg",
"=",
"\"%s: (%d, %d): %s: %s\"",
"%",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"self",
".",
"file_name",
")",
",",
"self",
".",
"line",
",",
"self",
".",
"column",
",",
"self",
".",
"error_id",
",",
"self",
".",
"msg",
")",
"return",
"msg"
] |
https://github.com/y123456yz/reading-and-annotate-mongodb-3.6/blob/93280293672ca7586dc24af18132aa61e4ed7fcf/mongo/buildscripts/idl/idl/errors.py#L113-L123
|
|
mantidproject/mantid
|
03deeb89254ec4289edb8771e0188c2090a02f32
|
qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/utilities/table_utils.py
|
python
|
DoubleItemDelegate.convert_for_display
|
(number)
|
return str(float(number))
|
need to convert to a float then
back to a string to make sure it
always has scientific notation
|
need to convert to a float then
back to a string to make sure it
always has scientific notation
|
[
"need",
"to",
"convert",
"to",
"a",
"float",
"then",
"back",
"to",
"a",
"string",
"to",
"make",
"sure",
"it",
"always",
"has",
"scientific",
"notation"
] |
def convert_for_display(number):
"""need to convert to a float then
back to a string to make sure it
always has scientific notation"""
return str(float(number))
|
[
"def",
"convert_for_display",
"(",
"number",
")",
":",
"return",
"str",
"(",
"float",
"(",
"number",
")",
")"
] |
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/utilities/table_utils.py#L214-L218
|
|
bh107/bohrium
|
5b83e7117285fefc7779ed0e9acb0f8e74c7e068
|
bridge/npbackend/bohrium/backend_messaging.py
|
python
|
cuda_use_current_context
|
()
|
return _backend_msg("CUDA: use current context")
|
Tell the CUDA backend to use the current CUDA context (useful for PyCUDA interop)
|
Tell the CUDA backend to use the current CUDA context (useful for PyCUDA interop)
|
[
"Tell",
"the",
"CUDA",
"backend",
"to",
"use",
"the",
"current",
"CUDA",
"context",
"(",
"useful",
"for",
"PyCUDA",
"interop",
")"
] |
def cuda_use_current_context():
"""Tell the CUDA backend to use the current CUDA context (useful for PyCUDA interop)"""
return _backend_msg("CUDA: use current context")
|
[
"def",
"cuda_use_current_context",
"(",
")",
":",
"return",
"_backend_msg",
"(",
"\"CUDA: use current context\"",
")"
] |
https://github.com/bh107/bohrium/blob/5b83e7117285fefc7779ed0e9acb0f8e74c7e068/bridge/npbackend/bohrium/backend_messaging.py#L34-L36
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/pydoc.py
|
python
|
TextDoc.docmodule
|
(self, object, name=None, mod=None)
|
return result
|
Produce text documentation for a given module object.
|
Produce text documentation for a given module object.
|
[
"Produce",
"text",
"documentation",
"for",
"a",
"given",
"module",
"object",
"."
] |
def docmodule(self, object, name=None, mod=None):
"""Produce text documentation for a given module object."""
name = object.__name__ # ignore the passed-in name
synop, desc = splitdoc(getdoc(object))
result = self.section('NAME', name + (synop and ' - ' + synop))
all = getattr(object, '__all__', None)
docloc = self.getdocloc(object)
if docloc is not None:
result = result + self.section('MODULE REFERENCE', docloc + """
The following documentation is automatically generated from the Python
source files. It may be incomplete, incorrect or include features that
are considered implementation detail and may vary between Python
implementations. When in doubt, consult the module reference at the
location listed above.
""")
if desc:
result = result + self.section('DESCRIPTION', desc)
classes = []
for key, value in inspect.getmembers(object, inspect.isclass):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None
or (inspect.getmodule(value) or object) is object):
if visiblename(key, all, object):
classes.append((key, value))
funcs = []
for key, value in inspect.getmembers(object, inspect.isroutine):
# if __all__ exists, believe it. Otherwise use old heuristic.
if (all is not None or
inspect.isbuiltin(value) or inspect.getmodule(value) is object):
if visiblename(key, all, object):
funcs.append((key, value))
data = []
for key, value in inspect.getmembers(object, isdata):
if visiblename(key, all, object):
data.append((key, value))
modpkgs = []
modpkgs_names = set()
if hasattr(object, '__path__'):
for importer, modname, ispkg in pkgutil.iter_modules(object.__path__):
modpkgs_names.add(modname)
if ispkg:
modpkgs.append(modname + ' (package)')
else:
modpkgs.append(modname)
modpkgs.sort()
result = result + self.section(
'PACKAGE CONTENTS', '\n'.join(modpkgs))
# Detect submodules as sometimes created by C extensions
submodules = []
for key, value in inspect.getmembers(object, inspect.ismodule):
if value.__name__.startswith(name + '.') and key not in modpkgs_names:
submodules.append(key)
if submodules:
submodules.sort()
result = result + self.section(
'SUBMODULES', '\n'.join(submodules))
if classes:
classlist = [value for key, value in classes]
contents = [self.formattree(
inspect.getclasstree(classlist, 1), name)]
for key, value in classes:
contents.append(self.document(value, key, name))
result = result + self.section('CLASSES', '\n'.join(contents))
if funcs:
contents = []
for key, value in funcs:
contents.append(self.document(value, key, name))
result = result + self.section('FUNCTIONS', '\n'.join(contents))
if data:
contents = []
for key, value in data:
contents.append(self.docother(value, key, name, maxlen=70))
result = result + self.section('DATA', '\n'.join(contents))
if hasattr(object, '__version__'):
version = str(object.__version__)
if version[:11] == '$' + 'Revision: ' and version[-1:] == '$':
version = version[11:-1].strip()
result = result + self.section('VERSION', version)
if hasattr(object, '__date__'):
result = result + self.section('DATE', str(object.__date__))
if hasattr(object, '__author__'):
result = result + self.section('AUTHOR', str(object.__author__))
if hasattr(object, '__credits__'):
result = result + self.section('CREDITS', str(object.__credits__))
try:
file = inspect.getabsfile(object)
except TypeError:
file = '(built-in)'
result = result + self.section('FILE', file)
return result
|
[
"def",
"docmodule",
"(",
"self",
",",
"object",
",",
"name",
"=",
"None",
",",
"mod",
"=",
"None",
")",
":",
"name",
"=",
"object",
".",
"__name__",
"# ignore the passed-in name",
"synop",
",",
"desc",
"=",
"splitdoc",
"(",
"getdoc",
"(",
"object",
")",
")",
"result",
"=",
"self",
".",
"section",
"(",
"'NAME'",
",",
"name",
"+",
"(",
"synop",
"and",
"' - '",
"+",
"synop",
")",
")",
"all",
"=",
"getattr",
"(",
"object",
",",
"'__all__'",
",",
"None",
")",
"docloc",
"=",
"self",
".",
"getdocloc",
"(",
"object",
")",
"if",
"docloc",
"is",
"not",
"None",
":",
"result",
"=",
"result",
"+",
"self",
".",
"section",
"(",
"'MODULE REFERENCE'",
",",
"docloc",
"+",
"\"\"\"\n\nThe following documentation is automatically generated from the Python\nsource files. It may be incomplete, incorrect or include features that\nare considered implementation detail and may vary between Python\nimplementations. When in doubt, consult the module reference at the\nlocation listed above.\n\"\"\"",
")",
"if",
"desc",
":",
"result",
"=",
"result",
"+",
"self",
".",
"section",
"(",
"'DESCRIPTION'",
",",
"desc",
")",
"classes",
"=",
"[",
"]",
"for",
"key",
",",
"value",
"in",
"inspect",
".",
"getmembers",
"(",
"object",
",",
"inspect",
".",
"isclass",
")",
":",
"# if __all__ exists, believe it. Otherwise use old heuristic.",
"if",
"(",
"all",
"is",
"not",
"None",
"or",
"(",
"inspect",
".",
"getmodule",
"(",
"value",
")",
"or",
"object",
")",
"is",
"object",
")",
":",
"if",
"visiblename",
"(",
"key",
",",
"all",
",",
"object",
")",
":",
"classes",
".",
"append",
"(",
"(",
"key",
",",
"value",
")",
")",
"funcs",
"=",
"[",
"]",
"for",
"key",
",",
"value",
"in",
"inspect",
".",
"getmembers",
"(",
"object",
",",
"inspect",
".",
"isroutine",
")",
":",
"# if __all__ exists, believe it. Otherwise use old heuristic.",
"if",
"(",
"all",
"is",
"not",
"None",
"or",
"inspect",
".",
"isbuiltin",
"(",
"value",
")",
"or",
"inspect",
".",
"getmodule",
"(",
"value",
")",
"is",
"object",
")",
":",
"if",
"visiblename",
"(",
"key",
",",
"all",
",",
"object",
")",
":",
"funcs",
".",
"append",
"(",
"(",
"key",
",",
"value",
")",
")",
"data",
"=",
"[",
"]",
"for",
"key",
",",
"value",
"in",
"inspect",
".",
"getmembers",
"(",
"object",
",",
"isdata",
")",
":",
"if",
"visiblename",
"(",
"key",
",",
"all",
",",
"object",
")",
":",
"data",
".",
"append",
"(",
"(",
"key",
",",
"value",
")",
")",
"modpkgs",
"=",
"[",
"]",
"modpkgs_names",
"=",
"set",
"(",
")",
"if",
"hasattr",
"(",
"object",
",",
"'__path__'",
")",
":",
"for",
"importer",
",",
"modname",
",",
"ispkg",
"in",
"pkgutil",
".",
"iter_modules",
"(",
"object",
".",
"__path__",
")",
":",
"modpkgs_names",
".",
"add",
"(",
"modname",
")",
"if",
"ispkg",
":",
"modpkgs",
".",
"append",
"(",
"modname",
"+",
"' (package)'",
")",
"else",
":",
"modpkgs",
".",
"append",
"(",
"modname",
")",
"modpkgs",
".",
"sort",
"(",
")",
"result",
"=",
"result",
"+",
"self",
".",
"section",
"(",
"'PACKAGE CONTENTS'",
",",
"'\\n'",
".",
"join",
"(",
"modpkgs",
")",
")",
"# Detect submodules as sometimes created by C extensions",
"submodules",
"=",
"[",
"]",
"for",
"key",
",",
"value",
"in",
"inspect",
".",
"getmembers",
"(",
"object",
",",
"inspect",
".",
"ismodule",
")",
":",
"if",
"value",
".",
"__name__",
".",
"startswith",
"(",
"name",
"+",
"'.'",
")",
"and",
"key",
"not",
"in",
"modpkgs_names",
":",
"submodules",
".",
"append",
"(",
"key",
")",
"if",
"submodules",
":",
"submodules",
".",
"sort",
"(",
")",
"result",
"=",
"result",
"+",
"self",
".",
"section",
"(",
"'SUBMODULES'",
",",
"'\\n'",
".",
"join",
"(",
"submodules",
")",
")",
"if",
"classes",
":",
"classlist",
"=",
"[",
"value",
"for",
"key",
",",
"value",
"in",
"classes",
"]",
"contents",
"=",
"[",
"self",
".",
"formattree",
"(",
"inspect",
".",
"getclasstree",
"(",
"classlist",
",",
"1",
")",
",",
"name",
")",
"]",
"for",
"key",
",",
"value",
"in",
"classes",
":",
"contents",
".",
"append",
"(",
"self",
".",
"document",
"(",
"value",
",",
"key",
",",
"name",
")",
")",
"result",
"=",
"result",
"+",
"self",
".",
"section",
"(",
"'CLASSES'",
",",
"'\\n'",
".",
"join",
"(",
"contents",
")",
")",
"if",
"funcs",
":",
"contents",
"=",
"[",
"]",
"for",
"key",
",",
"value",
"in",
"funcs",
":",
"contents",
".",
"append",
"(",
"self",
".",
"document",
"(",
"value",
",",
"key",
",",
"name",
")",
")",
"result",
"=",
"result",
"+",
"self",
".",
"section",
"(",
"'FUNCTIONS'",
",",
"'\\n'",
".",
"join",
"(",
"contents",
")",
")",
"if",
"data",
":",
"contents",
"=",
"[",
"]",
"for",
"key",
",",
"value",
"in",
"data",
":",
"contents",
".",
"append",
"(",
"self",
".",
"docother",
"(",
"value",
",",
"key",
",",
"name",
",",
"maxlen",
"=",
"70",
")",
")",
"result",
"=",
"result",
"+",
"self",
".",
"section",
"(",
"'DATA'",
",",
"'\\n'",
".",
"join",
"(",
"contents",
")",
")",
"if",
"hasattr",
"(",
"object",
",",
"'__version__'",
")",
":",
"version",
"=",
"str",
"(",
"object",
".",
"__version__",
")",
"if",
"version",
"[",
":",
"11",
"]",
"==",
"'$'",
"+",
"'Revision: '",
"and",
"version",
"[",
"-",
"1",
":",
"]",
"==",
"'$'",
":",
"version",
"=",
"version",
"[",
"11",
":",
"-",
"1",
"]",
".",
"strip",
"(",
")",
"result",
"=",
"result",
"+",
"self",
".",
"section",
"(",
"'VERSION'",
",",
"version",
")",
"if",
"hasattr",
"(",
"object",
",",
"'__date__'",
")",
":",
"result",
"=",
"result",
"+",
"self",
".",
"section",
"(",
"'DATE'",
",",
"str",
"(",
"object",
".",
"__date__",
")",
")",
"if",
"hasattr",
"(",
"object",
",",
"'__author__'",
")",
":",
"result",
"=",
"result",
"+",
"self",
".",
"section",
"(",
"'AUTHOR'",
",",
"str",
"(",
"object",
".",
"__author__",
")",
")",
"if",
"hasattr",
"(",
"object",
",",
"'__credits__'",
")",
":",
"result",
"=",
"result",
"+",
"self",
".",
"section",
"(",
"'CREDITS'",
",",
"str",
"(",
"object",
".",
"__credits__",
")",
")",
"try",
":",
"file",
"=",
"inspect",
".",
"getabsfile",
"(",
"object",
")",
"except",
"TypeError",
":",
"file",
"=",
"'(built-in)'",
"result",
"=",
"result",
"+",
"self",
".",
"section",
"(",
"'FILE'",
",",
"file",
")",
"return",
"result"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/pydoc.py#L1113-L1212
|
|
tensorflow/tensorflow
|
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
|
tensorflow/python/eager/context.py
|
python
|
set_global_seed
|
(seed)
|
Sets the eager mode seed.
|
Sets the eager mode seed.
|
[
"Sets",
"the",
"eager",
"mode",
"seed",
"."
] |
def set_global_seed(seed):
"""Sets the eager mode seed."""
context()._set_global_seed(seed)
|
[
"def",
"set_global_seed",
"(",
"seed",
")",
":",
"context",
"(",
")",
".",
"_set_global_seed",
"(",
"seed",
")"
] |
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/eager/context.py#L2105-L2107
|
||
miyosuda/TensorFlowAndroidMNIST
|
7b5a4603d2780a8a2834575706e9001977524007
|
jni-build/jni/include/tensorflow/python/client/session.py
|
python
|
_uniquify_fetches
|
(fetch_mappers)
|
return unique_fetches, value_indices
|
Uniquifies fetches from a list of fetch_mappers.
This is a utility function used by _ListFetchMapper and _DictFetchMapper. It
gathers all the unique fetches from a list of mappers and builds a list
containing all of them but without duplicates (unique_fetches).
It also returns a 2-D list of integers (values_indices) indicating at which
index in unique_fetches the fetches of the mappers are located.
This list is as follows:
values_indices[mapper_index][mapper_fetch_index] = unique_fetches_index
Args:
fetch_mappers: list of fetch mappers.
Returns:
A list of fetches.
A 2-D list of integers.
|
Uniquifies fetches from a list of fetch_mappers.
|
[
"Uniquifies",
"fetches",
"from",
"a",
"list",
"of",
"fetch_mappers",
"."
] |
def _uniquify_fetches(fetch_mappers):
"""Uniquifies fetches from a list of fetch_mappers.
This is a utility function used by _ListFetchMapper and _DictFetchMapper. It
gathers all the unique fetches from a list of mappers and builds a list
containing all of them but without duplicates (unique_fetches).
It also returns a 2-D list of integers (values_indices) indicating at which
index in unique_fetches the fetches of the mappers are located.
This list is as follows:
values_indices[mapper_index][mapper_fetch_index] = unique_fetches_index
Args:
fetch_mappers: list of fetch mappers.
Returns:
A list of fetches.
A 2-D list of integers.
"""
unique_fetches = []
value_indices = []
seen_fetches = {}
for m in fetch_mappers:
m_value_indices = []
for f in m.unique_fetches():
j = seen_fetches.get(f)
if j is None:
j = len(seen_fetches)
seen_fetches[f] = j
unique_fetches.append(f)
m_value_indices.append(j)
value_indices.append(m_value_indices)
return unique_fetches, value_indices
|
[
"def",
"_uniquify_fetches",
"(",
"fetch_mappers",
")",
":",
"unique_fetches",
"=",
"[",
"]",
"value_indices",
"=",
"[",
"]",
"seen_fetches",
"=",
"{",
"}",
"for",
"m",
"in",
"fetch_mappers",
":",
"m_value_indices",
"=",
"[",
"]",
"for",
"f",
"in",
"m",
".",
"unique_fetches",
"(",
")",
":",
"j",
"=",
"seen_fetches",
".",
"get",
"(",
"f",
")",
"if",
"j",
"is",
"None",
":",
"j",
"=",
"len",
"(",
"seen_fetches",
")",
"seen_fetches",
"[",
"f",
"]",
"=",
"j",
"unique_fetches",
".",
"append",
"(",
"f",
")",
"m_value_indices",
".",
"append",
"(",
"j",
")",
"value_indices",
".",
"append",
"(",
"m_value_indices",
")",
"return",
"unique_fetches",
",",
"value_indices"
] |
https://github.com/miyosuda/TensorFlowAndroidMNIST/blob/7b5a4603d2780a8a2834575706e9001977524007/jni-build/jni/include/tensorflow/python/client/session.py#L242-L275
|
|
ApolloAuto/apollo-platform
|
86d9dc6743b496ead18d597748ebabd34a513289
|
ros/ros_comm/rosmaster/src/rosmaster/master_api.py
|
python
|
ROSMasterHandler.subscribeParam
|
(self, caller_id, caller_api, key)
|
return 1, "Subscribed to parameter [%s]"%key, val
|
Retrieve parameter value from server and subscribe to updates to that param. See
paramUpdate() in the Node API.
@param caller_id str: ROS caller id
@type caller_id: str
@param key: parameter to lookup.
@type key: str
@param caller_api: API URI for paramUpdate callbacks.
@type caller_api: str
@return: [code, statusMessage, parameterValue]. If code is not
1, parameterValue should be ignored. parameterValue is an empty dictionary if the parameter
has not been set yet.
@rtype: [int, str, XMLRPCLegalValue]
|
Retrieve parameter value from server and subscribe to updates to that param. See
paramUpdate() in the Node API.
|
[
"Retrieve",
"parameter",
"value",
"from",
"server",
"and",
"subscribe",
"to",
"updates",
"to",
"that",
"param",
".",
"See",
"paramUpdate",
"()",
"in",
"the",
"Node",
"API",
"."
] |
def subscribeParam(self, caller_id, caller_api, key):
"""
Retrieve parameter value from server and subscribe to updates to that param. See
paramUpdate() in the Node API.
@param caller_id str: ROS caller id
@type caller_id: str
@param key: parameter to lookup.
@type key: str
@param caller_api: API URI for paramUpdate callbacks.
@type caller_api: str
@return: [code, statusMessage, parameterValue]. If code is not
1, parameterValue should be ignored. parameterValue is an empty dictionary if the parameter
has not been set yet.
@rtype: [int, str, XMLRPCLegalValue]
"""
key = resolve_name(key, caller_id)
try:
# ps_lock has precedence and is required due to
# potential self.reg_manager modification
self.ps_lock.acquire()
val = self.param_server.subscribe_param(key, (caller_id, caller_api))
finally:
self.ps_lock.release()
return 1, "Subscribed to parameter [%s]"%key, val
|
[
"def",
"subscribeParam",
"(",
"self",
",",
"caller_id",
",",
"caller_api",
",",
"key",
")",
":",
"key",
"=",
"resolve_name",
"(",
"key",
",",
"caller_id",
")",
"try",
":",
"# ps_lock has precedence and is required due to",
"# potential self.reg_manager modification",
"self",
".",
"ps_lock",
".",
"acquire",
"(",
")",
"val",
"=",
"self",
".",
"param_server",
".",
"subscribe_param",
"(",
"key",
",",
"(",
"caller_id",
",",
"caller_api",
")",
")",
"finally",
":",
"self",
".",
"ps_lock",
".",
"release",
"(",
")",
"return",
"1",
",",
"\"Subscribed to parameter [%s]\"",
"%",
"key",
",",
"val"
] |
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/ros_comm/rosmaster/src/rosmaster/master_api.py#L432-L455
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/_core.py
|
python
|
VersionInfo.GetMajor
|
(*args, **kwargs)
|
return _core_.VersionInfo_GetMajor(*args, **kwargs)
|
GetMajor(self) -> int
|
GetMajor(self) -> int
|
[
"GetMajor",
"(",
"self",
")",
"-",
">",
"int"
] |
def GetMajor(*args, **kwargs):
"""GetMajor(self) -> int"""
return _core_.VersionInfo_GetMajor(*args, **kwargs)
|
[
"def",
"GetMajor",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"VersionInfo_GetMajor",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_core.py#L16569-L16571
|
|
eventql/eventql
|
7ca0dbb2e683b525620ea30dc40540a22d5eb227
|
deps/3rdparty/spidermonkey/mozjs/build/pymake/pymake/parser.py
|
python
|
parsestring
|
(s, filename)
|
return condstack[0]
|
Parse a string containing makefile data into a parserdata.StatementList.
|
Parse a string containing makefile data into a parserdata.StatementList.
|
[
"Parse",
"a",
"string",
"containing",
"makefile",
"data",
"into",
"a",
"parserdata",
".",
"StatementList",
"."
] |
def parsestring(s, filename):
"""
Parse a string containing makefile data into a parserdata.StatementList.
"""
currule = False
condstack = [parserdata.StatementList()]
fdlines = enumeratelines(s, filename)
for d in fdlines:
assert len(condstack) > 0
offset = d.lstart
if currule and offset < d.lend and d.s[offset] == '\t':
e, token, offset = parsemakesyntax(d, offset + 1, (), itercommandchars)
assert token is None
assert offset is None
condstack[-1].append(parserdata.Command(e))
continue
# To parse Makefile syntax, we first strip leading whitespace and
# look for initial keywords. If there are no keywords, it's either
# setting a variable or writing a rule.
offset = d.skipwhitespace(offset)
if offset is None:
continue
m = _directivesre.match(d.s, offset, d.lend)
if m is not None:
kword = m.group(1)
offset = m.end(0)
if kword == 'endif':
_ensureend(d, offset, "Unexpected data after 'endif' directive")
if len(condstack) == 1:
raise SyntaxError("unmatched 'endif' directive",
d.getloc(offset))
condstack.pop().endloc = d.getloc(offset)
continue
if kword == 'else':
if len(condstack) == 1:
raise SyntaxError("unmatched 'else' directive",
d.getloc(offset))
m = _conditionre.match(d.s, offset, d.lend)
if m is None:
_ensureend(d, offset, "Unexpected data after 'else' directive.")
condstack[-1].addcondition(d.getloc(offset), parserdata.ElseCondition())
else:
kword = m.group(1)
if kword not in _conditionkeywords:
raise SyntaxError("Unexpected condition after 'else' directive.",
d.getloc(offset))
startoffset = offset
offset = d.skipwhitespace(m.end(1))
c = _conditionkeywords[kword](d, offset)
condstack[-1].addcondition(d.getloc(startoffset), c)
continue
if kword in _conditionkeywords:
c = _conditionkeywords[kword](d, offset)
cb = parserdata.ConditionBlock(d.getloc(d.lstart), c)
condstack[-1].append(cb)
condstack.append(cb)
continue
if kword == 'endef':
raise SyntaxError("endef without matching define", d.getloc(offset))
if kword == 'define':
currule = False
vname, t, i = parsemakesyntax(d, offset, (), itermakefilechars)
vname.rstrip()
startloc = d.getloc(d.lstart)
value = iterdefinelines(fdlines, startloc)
condstack[-1].append(parserdata.SetVariable(vname, value=value, valueloc=startloc, token='=', targetexp=None))
continue
if kword in ('include', '-include', 'includedeps', '-includedeps'):
if kword.startswith('-'):
required = False
kword = kword[1:]
else:
required = True
deps = kword == 'includedeps'
currule = False
incfile, t, offset = parsemakesyntax(d, offset, (), itermakefilechars)
condstack[-1].append(parserdata.Include(incfile, required, deps))
continue
if kword == 'vpath':
currule = False
e, t, offset = parsemakesyntax(d, offset, (), itermakefilechars)
condstack[-1].append(parserdata.VPathDirective(e))
continue
if kword == 'override':
currule = False
vname, token, offset = parsemakesyntax(d, offset, _varsettokens, itermakefilechars)
vname.lstrip()
vname.rstrip()
if token is None:
raise SyntaxError("Malformed override directive, need =", d.getloc(d.lstart))
value = flattenmakesyntax(d, offset).lstrip()
condstack[-1].append(parserdata.SetVariable(vname, value=value, valueloc=d.getloc(offset), token=token, targetexp=None, source=data.Variables.SOURCE_OVERRIDE))
continue
if kword == 'export':
currule = False
e, token, offset = parsemakesyntax(d, offset, _varsettokens, itermakefilechars)
e.lstrip()
e.rstrip()
if token is None:
condstack[-1].append(parserdata.ExportDirective(e, concurrent_set=False))
else:
condstack[-1].append(parserdata.ExportDirective(e, concurrent_set=True))
value = flattenmakesyntax(d, offset).lstrip()
condstack[-1].append(parserdata.SetVariable(e, value=value, valueloc=d.getloc(offset), token=token, targetexp=None))
continue
if kword == 'unexport':
e, token, offset = parsemakesyntax(d, offset, (), itermakefilechars)
condstack[-1].append(parserdata.UnexportDirective(e))
continue
e, token, offset = parsemakesyntax(d, offset, _varsettokens + ('::', ':'), itermakefilechars)
if token is None:
e.rstrip()
e.lstrip()
if not e.isempty():
condstack[-1].append(parserdata.EmptyDirective(e))
continue
# if we encountered real makefile syntax, the current rule is over
currule = False
if token in _varsettokens:
e.lstrip()
e.rstrip()
value = flattenmakesyntax(d, offset).lstrip()
condstack[-1].append(parserdata.SetVariable(e, value=value, valueloc=d.getloc(offset), token=token, targetexp=None))
else:
doublecolon = token == '::'
# `e` is targets or target patterns, which can end up as
# * a rule
# * an implicit rule
# * a static pattern rule
# * a target-specific variable definition
# * a pattern-specific variable definition
# any of the rules may have order-only prerequisites
# delimited by |, and a command delimited by ;
targets = e
e, token, offset = parsemakesyntax(d, offset,
_varsettokens + (':', '|', ';'),
itermakefilechars)
if token in (None, ';'):
condstack[-1].append(parserdata.Rule(targets, e, doublecolon))
currule = True
if token == ';':
offset = d.skipwhitespace(offset)
e, t, offset = parsemakesyntax(d, offset, (), itercommandchars)
condstack[-1].append(parserdata.Command(e))
elif token in _varsettokens:
e.lstrip()
e.rstrip()
value = flattenmakesyntax(d, offset).lstrip()
condstack[-1].append(parserdata.SetVariable(e, value=value, valueloc=d.getloc(offset), token=token, targetexp=targets))
elif token == '|':
raise SyntaxError('order-only prerequisites not implemented', d.getloc(offset))
else:
assert token == ':'
# static pattern rule
pattern = e
deps, token, offset = parsemakesyntax(d, offset, (';',), itermakefilechars)
condstack[-1].append(parserdata.StaticPatternRule(targets, pattern, deps, doublecolon))
currule = True
if token == ';':
offset = d.skipwhitespace(offset)
e, token, offset = parsemakesyntax(d, offset, (), itercommandchars)
condstack[-1].append(parserdata.Command(e))
if len(condstack) != 1:
raise SyntaxError("Condition never terminated with endif", condstack[-1].loc)
return condstack[0]
|
[
"def",
"parsestring",
"(",
"s",
",",
"filename",
")",
":",
"currule",
"=",
"False",
"condstack",
"=",
"[",
"parserdata",
".",
"StatementList",
"(",
")",
"]",
"fdlines",
"=",
"enumeratelines",
"(",
"s",
",",
"filename",
")",
"for",
"d",
"in",
"fdlines",
":",
"assert",
"len",
"(",
"condstack",
")",
">",
"0",
"offset",
"=",
"d",
".",
"lstart",
"if",
"currule",
"and",
"offset",
"<",
"d",
".",
"lend",
"and",
"d",
".",
"s",
"[",
"offset",
"]",
"==",
"'\\t'",
":",
"e",
",",
"token",
",",
"offset",
"=",
"parsemakesyntax",
"(",
"d",
",",
"offset",
"+",
"1",
",",
"(",
")",
",",
"itercommandchars",
")",
"assert",
"token",
"is",
"None",
"assert",
"offset",
"is",
"None",
"condstack",
"[",
"-",
"1",
"]",
".",
"append",
"(",
"parserdata",
".",
"Command",
"(",
"e",
")",
")",
"continue",
"# To parse Makefile syntax, we first strip leading whitespace and",
"# look for initial keywords. If there are no keywords, it's either",
"# setting a variable or writing a rule.",
"offset",
"=",
"d",
".",
"skipwhitespace",
"(",
"offset",
")",
"if",
"offset",
"is",
"None",
":",
"continue",
"m",
"=",
"_directivesre",
".",
"match",
"(",
"d",
".",
"s",
",",
"offset",
",",
"d",
".",
"lend",
")",
"if",
"m",
"is",
"not",
"None",
":",
"kword",
"=",
"m",
".",
"group",
"(",
"1",
")",
"offset",
"=",
"m",
".",
"end",
"(",
"0",
")",
"if",
"kword",
"==",
"'endif'",
":",
"_ensureend",
"(",
"d",
",",
"offset",
",",
"\"Unexpected data after 'endif' directive\"",
")",
"if",
"len",
"(",
"condstack",
")",
"==",
"1",
":",
"raise",
"SyntaxError",
"(",
"\"unmatched 'endif' directive\"",
",",
"d",
".",
"getloc",
"(",
"offset",
")",
")",
"condstack",
".",
"pop",
"(",
")",
".",
"endloc",
"=",
"d",
".",
"getloc",
"(",
"offset",
")",
"continue",
"if",
"kword",
"==",
"'else'",
":",
"if",
"len",
"(",
"condstack",
")",
"==",
"1",
":",
"raise",
"SyntaxError",
"(",
"\"unmatched 'else' directive\"",
",",
"d",
".",
"getloc",
"(",
"offset",
")",
")",
"m",
"=",
"_conditionre",
".",
"match",
"(",
"d",
".",
"s",
",",
"offset",
",",
"d",
".",
"lend",
")",
"if",
"m",
"is",
"None",
":",
"_ensureend",
"(",
"d",
",",
"offset",
",",
"\"Unexpected data after 'else' directive.\"",
")",
"condstack",
"[",
"-",
"1",
"]",
".",
"addcondition",
"(",
"d",
".",
"getloc",
"(",
"offset",
")",
",",
"parserdata",
".",
"ElseCondition",
"(",
")",
")",
"else",
":",
"kword",
"=",
"m",
".",
"group",
"(",
"1",
")",
"if",
"kword",
"not",
"in",
"_conditionkeywords",
":",
"raise",
"SyntaxError",
"(",
"\"Unexpected condition after 'else' directive.\"",
",",
"d",
".",
"getloc",
"(",
"offset",
")",
")",
"startoffset",
"=",
"offset",
"offset",
"=",
"d",
".",
"skipwhitespace",
"(",
"m",
".",
"end",
"(",
"1",
")",
")",
"c",
"=",
"_conditionkeywords",
"[",
"kword",
"]",
"(",
"d",
",",
"offset",
")",
"condstack",
"[",
"-",
"1",
"]",
".",
"addcondition",
"(",
"d",
".",
"getloc",
"(",
"startoffset",
")",
",",
"c",
")",
"continue",
"if",
"kword",
"in",
"_conditionkeywords",
":",
"c",
"=",
"_conditionkeywords",
"[",
"kword",
"]",
"(",
"d",
",",
"offset",
")",
"cb",
"=",
"parserdata",
".",
"ConditionBlock",
"(",
"d",
".",
"getloc",
"(",
"d",
".",
"lstart",
")",
",",
"c",
")",
"condstack",
"[",
"-",
"1",
"]",
".",
"append",
"(",
"cb",
")",
"condstack",
".",
"append",
"(",
"cb",
")",
"continue",
"if",
"kword",
"==",
"'endef'",
":",
"raise",
"SyntaxError",
"(",
"\"endef without matching define\"",
",",
"d",
".",
"getloc",
"(",
"offset",
")",
")",
"if",
"kword",
"==",
"'define'",
":",
"currule",
"=",
"False",
"vname",
",",
"t",
",",
"i",
"=",
"parsemakesyntax",
"(",
"d",
",",
"offset",
",",
"(",
")",
",",
"itermakefilechars",
")",
"vname",
".",
"rstrip",
"(",
")",
"startloc",
"=",
"d",
".",
"getloc",
"(",
"d",
".",
"lstart",
")",
"value",
"=",
"iterdefinelines",
"(",
"fdlines",
",",
"startloc",
")",
"condstack",
"[",
"-",
"1",
"]",
".",
"append",
"(",
"parserdata",
".",
"SetVariable",
"(",
"vname",
",",
"value",
"=",
"value",
",",
"valueloc",
"=",
"startloc",
",",
"token",
"=",
"'='",
",",
"targetexp",
"=",
"None",
")",
")",
"continue",
"if",
"kword",
"in",
"(",
"'include'",
",",
"'-include'",
",",
"'includedeps'",
",",
"'-includedeps'",
")",
":",
"if",
"kword",
".",
"startswith",
"(",
"'-'",
")",
":",
"required",
"=",
"False",
"kword",
"=",
"kword",
"[",
"1",
":",
"]",
"else",
":",
"required",
"=",
"True",
"deps",
"=",
"kword",
"==",
"'includedeps'",
"currule",
"=",
"False",
"incfile",
",",
"t",
",",
"offset",
"=",
"parsemakesyntax",
"(",
"d",
",",
"offset",
",",
"(",
")",
",",
"itermakefilechars",
")",
"condstack",
"[",
"-",
"1",
"]",
".",
"append",
"(",
"parserdata",
".",
"Include",
"(",
"incfile",
",",
"required",
",",
"deps",
")",
")",
"continue",
"if",
"kword",
"==",
"'vpath'",
":",
"currule",
"=",
"False",
"e",
",",
"t",
",",
"offset",
"=",
"parsemakesyntax",
"(",
"d",
",",
"offset",
",",
"(",
")",
",",
"itermakefilechars",
")",
"condstack",
"[",
"-",
"1",
"]",
".",
"append",
"(",
"parserdata",
".",
"VPathDirective",
"(",
"e",
")",
")",
"continue",
"if",
"kword",
"==",
"'override'",
":",
"currule",
"=",
"False",
"vname",
",",
"token",
",",
"offset",
"=",
"parsemakesyntax",
"(",
"d",
",",
"offset",
",",
"_varsettokens",
",",
"itermakefilechars",
")",
"vname",
".",
"lstrip",
"(",
")",
"vname",
".",
"rstrip",
"(",
")",
"if",
"token",
"is",
"None",
":",
"raise",
"SyntaxError",
"(",
"\"Malformed override directive, need =\"",
",",
"d",
".",
"getloc",
"(",
"d",
".",
"lstart",
")",
")",
"value",
"=",
"flattenmakesyntax",
"(",
"d",
",",
"offset",
")",
".",
"lstrip",
"(",
")",
"condstack",
"[",
"-",
"1",
"]",
".",
"append",
"(",
"parserdata",
".",
"SetVariable",
"(",
"vname",
",",
"value",
"=",
"value",
",",
"valueloc",
"=",
"d",
".",
"getloc",
"(",
"offset",
")",
",",
"token",
"=",
"token",
",",
"targetexp",
"=",
"None",
",",
"source",
"=",
"data",
".",
"Variables",
".",
"SOURCE_OVERRIDE",
")",
")",
"continue",
"if",
"kword",
"==",
"'export'",
":",
"currule",
"=",
"False",
"e",
",",
"token",
",",
"offset",
"=",
"parsemakesyntax",
"(",
"d",
",",
"offset",
",",
"_varsettokens",
",",
"itermakefilechars",
")",
"e",
".",
"lstrip",
"(",
")",
"e",
".",
"rstrip",
"(",
")",
"if",
"token",
"is",
"None",
":",
"condstack",
"[",
"-",
"1",
"]",
".",
"append",
"(",
"parserdata",
".",
"ExportDirective",
"(",
"e",
",",
"concurrent_set",
"=",
"False",
")",
")",
"else",
":",
"condstack",
"[",
"-",
"1",
"]",
".",
"append",
"(",
"parserdata",
".",
"ExportDirective",
"(",
"e",
",",
"concurrent_set",
"=",
"True",
")",
")",
"value",
"=",
"flattenmakesyntax",
"(",
"d",
",",
"offset",
")",
".",
"lstrip",
"(",
")",
"condstack",
"[",
"-",
"1",
"]",
".",
"append",
"(",
"parserdata",
".",
"SetVariable",
"(",
"e",
",",
"value",
"=",
"value",
",",
"valueloc",
"=",
"d",
".",
"getloc",
"(",
"offset",
")",
",",
"token",
"=",
"token",
",",
"targetexp",
"=",
"None",
")",
")",
"continue",
"if",
"kword",
"==",
"'unexport'",
":",
"e",
",",
"token",
",",
"offset",
"=",
"parsemakesyntax",
"(",
"d",
",",
"offset",
",",
"(",
")",
",",
"itermakefilechars",
")",
"condstack",
"[",
"-",
"1",
"]",
".",
"append",
"(",
"parserdata",
".",
"UnexportDirective",
"(",
"e",
")",
")",
"continue",
"e",
",",
"token",
",",
"offset",
"=",
"parsemakesyntax",
"(",
"d",
",",
"offset",
",",
"_varsettokens",
"+",
"(",
"'::'",
",",
"':'",
")",
",",
"itermakefilechars",
")",
"if",
"token",
"is",
"None",
":",
"e",
".",
"rstrip",
"(",
")",
"e",
".",
"lstrip",
"(",
")",
"if",
"not",
"e",
".",
"isempty",
"(",
")",
":",
"condstack",
"[",
"-",
"1",
"]",
".",
"append",
"(",
"parserdata",
".",
"EmptyDirective",
"(",
"e",
")",
")",
"continue",
"# if we encountered real makefile syntax, the current rule is over",
"currule",
"=",
"False",
"if",
"token",
"in",
"_varsettokens",
":",
"e",
".",
"lstrip",
"(",
")",
"e",
".",
"rstrip",
"(",
")",
"value",
"=",
"flattenmakesyntax",
"(",
"d",
",",
"offset",
")",
".",
"lstrip",
"(",
")",
"condstack",
"[",
"-",
"1",
"]",
".",
"append",
"(",
"parserdata",
".",
"SetVariable",
"(",
"e",
",",
"value",
"=",
"value",
",",
"valueloc",
"=",
"d",
".",
"getloc",
"(",
"offset",
")",
",",
"token",
"=",
"token",
",",
"targetexp",
"=",
"None",
")",
")",
"else",
":",
"doublecolon",
"=",
"token",
"==",
"'::'",
"# `e` is targets or target patterns, which can end up as",
"# * a rule",
"# * an implicit rule",
"# * a static pattern rule",
"# * a target-specific variable definition",
"# * a pattern-specific variable definition",
"# any of the rules may have order-only prerequisites",
"# delimited by |, and a command delimited by ;",
"targets",
"=",
"e",
"e",
",",
"token",
",",
"offset",
"=",
"parsemakesyntax",
"(",
"d",
",",
"offset",
",",
"_varsettokens",
"+",
"(",
"':'",
",",
"'|'",
",",
"';'",
")",
",",
"itermakefilechars",
")",
"if",
"token",
"in",
"(",
"None",
",",
"';'",
")",
":",
"condstack",
"[",
"-",
"1",
"]",
".",
"append",
"(",
"parserdata",
".",
"Rule",
"(",
"targets",
",",
"e",
",",
"doublecolon",
")",
")",
"currule",
"=",
"True",
"if",
"token",
"==",
"';'",
":",
"offset",
"=",
"d",
".",
"skipwhitespace",
"(",
"offset",
")",
"e",
",",
"t",
",",
"offset",
"=",
"parsemakesyntax",
"(",
"d",
",",
"offset",
",",
"(",
")",
",",
"itercommandchars",
")",
"condstack",
"[",
"-",
"1",
"]",
".",
"append",
"(",
"parserdata",
".",
"Command",
"(",
"e",
")",
")",
"elif",
"token",
"in",
"_varsettokens",
":",
"e",
".",
"lstrip",
"(",
")",
"e",
".",
"rstrip",
"(",
")",
"value",
"=",
"flattenmakesyntax",
"(",
"d",
",",
"offset",
")",
".",
"lstrip",
"(",
")",
"condstack",
"[",
"-",
"1",
"]",
".",
"append",
"(",
"parserdata",
".",
"SetVariable",
"(",
"e",
",",
"value",
"=",
"value",
",",
"valueloc",
"=",
"d",
".",
"getloc",
"(",
"offset",
")",
",",
"token",
"=",
"token",
",",
"targetexp",
"=",
"targets",
")",
")",
"elif",
"token",
"==",
"'|'",
":",
"raise",
"SyntaxError",
"(",
"'order-only prerequisites not implemented'",
",",
"d",
".",
"getloc",
"(",
"offset",
")",
")",
"else",
":",
"assert",
"token",
"==",
"':'",
"# static pattern rule",
"pattern",
"=",
"e",
"deps",
",",
"token",
",",
"offset",
"=",
"parsemakesyntax",
"(",
"d",
",",
"offset",
",",
"(",
"';'",
",",
")",
",",
"itermakefilechars",
")",
"condstack",
"[",
"-",
"1",
"]",
".",
"append",
"(",
"parserdata",
".",
"StaticPatternRule",
"(",
"targets",
",",
"pattern",
",",
"deps",
",",
"doublecolon",
")",
")",
"currule",
"=",
"True",
"if",
"token",
"==",
"';'",
":",
"offset",
"=",
"d",
".",
"skipwhitespace",
"(",
"offset",
")",
"e",
",",
"token",
",",
"offset",
"=",
"parsemakesyntax",
"(",
"d",
",",
"offset",
",",
"(",
")",
",",
"itercommandchars",
")",
"condstack",
"[",
"-",
"1",
"]",
".",
"append",
"(",
"parserdata",
".",
"Command",
"(",
"e",
")",
")",
"if",
"len",
"(",
"condstack",
")",
"!=",
"1",
":",
"raise",
"SyntaxError",
"(",
"\"Condition never terminated with endif\"",
",",
"condstack",
"[",
"-",
"1",
"]",
".",
"loc",
")",
"return",
"condstack",
"[",
"0",
"]"
] |
https://github.com/eventql/eventql/blob/7ca0dbb2e683b525620ea30dc40540a22d5eb227/deps/3rdparty/spidermonkey/mozjs/build/pymake/pymake/parser.py#L425-L635
|
|
trilinos/Trilinos
|
6168be6dd51e35e1cd681e9c4b24433e709df140
|
packages/seacas/libraries/ioss/src/visualization/catalyst/phactori/phactori.py
|
python
|
ShowDataColorLegendXX
|
(inPvView,
inOnOffSetting, inColorLegendPositionAndSize, inColorSettings,
inColorLegendRepRef, inPvDataRep)
|
return newScalarBarWidgetRepresentation
|
Turns on or off the display of the color bar legend showing the mapping
between the color and the data value (and the name of the data value.
(note this is primarily to do the paraview-side work to turn bar on or
off, on to set up for rendering in the shared view, off to turn off
rendering in shared view. On or off state for rendering is stored
as a flag in the ioPhactoriImagesetBlock instance
|
Turns on or off the display of the color bar legend showing the mapping
between the color and the data value (and the name of the data value.
(note this is primarily to do the paraview-side work to turn bar on or
off, on to set up for rendering in the shared view, off to turn off
rendering in shared view. On or off state for rendering is stored
as a flag in the ioPhactoriImagesetBlock instance
|
[
"Turns",
"on",
"or",
"off",
"the",
"display",
"of",
"the",
"color",
"bar",
"legend",
"showing",
"the",
"mapping",
"between",
"the",
"color",
"and",
"the",
"data",
"value",
"(",
"and",
"the",
"name",
"of",
"the",
"data",
"value",
".",
"(",
"note",
"this",
"is",
"primarily",
"to",
"do",
"the",
"paraview",
"-",
"side",
"work",
"to",
"turn",
"bar",
"on",
"or",
"off",
"on",
"to",
"set",
"up",
"for",
"rendering",
"in",
"the",
"shared",
"view",
"off",
"to",
"turn",
"off",
"rendering",
"in",
"shared",
"view",
".",
"On",
"or",
"off",
"state",
"for",
"rendering",
"is",
"stored",
"as",
"a",
"flag",
"in",
"the",
"ioPhactoriImagesetBlock",
"instance"
] |
def ShowDataColorLegendXX(inPvView,
inOnOffSetting, inColorLegendPositionAndSize, inColorSettings,
inColorLegendRepRef, inPvDataRep):
"""Turns on or off the display of the color bar legend showing the mapping
between the color and the data value (and the name of the data value.
(note this is primarily to do the paraview-side work to turn bar on or
off, on to set up for rendering in the shared view, off to turn off
rendering in shared view. On or off state for rendering is stored
as a flag in the ioPhactoriImagesetBlock instance"""
if PhactoriDbg(100):
myDebugPrint3('phactori.ShowDataColorLegendXX entered, setting:' + \
inOnOffSetting + '\n', 100)
if(inOnOffSetting == 'on'):
myVisibility = 1
else:
myVisibility = 0
if inColorLegendRepRef != None:
if PhactoriDbg(100):
myDebugPrint3("A inColorLegendRepRef was " + \
str(inColorLegendRepRef.Visibility) + \
" now 0: " + str(inColorLegendRepRef) + "\n")
inColorLegendRepRef.Visibility = 0
myDebugPrint3(
'phactori.ShowDataColorLegendXX returing with none rep: ' + \
inOnOffSetting + '\n', 100)
return None
if gParaViewCatalystVersionFlag <= 40100:
localColorArrayName = \
inPvDataRep.ColorArrayName
else:
localColorArrayName = \
inPvDataRep.ColorArrayName[1]
if inColorLegendRepRef != None:
#myDebugPrint3(' phactori.ShowDataColorLegend using rep reference\n', 100)
#if inColorLegendRepRef.ColorArrayName == '':
# return
inPvView.OrientationAxesLabelColor = inColorSettings.mTextColor
if PhactoriDbg(100):
myDebugPrint3("B inColorLegendRepRef was " + \
str(inColorLegendRepRef.Visibility) + \
" now " + str(myVisibility) + ": " + str(inColorLegendRepRef) + "\n")
inColorLegendRepRef.Visibility = myVisibility
inColorLegendRepRef.LookupTable = inPvDataRep.LookupTable
inColorLegendRepRef.Title = localColorArrayName
#ioPhactoriImagesetBlock.mColorLegendRepRef.Color = \
# inColorSettings.mTextColor
if PhactoriDbg(100):
myDebugPrint3(
'phactori.ShowDataColorLegendXX returing with old rep: ' + \
inOnOffSetting + '\n', 100)
return inColorLegendRepRef
#else:
#myDebugPrint3(' phactori.ShowDataColorLegend have to create rep reference\n', 100)
if inColorLegendPositionAndSize[0] == 'parameters':
legendSizeMultiplier = None
legendSize = inColorLegendPositionAndSize[3]
legendFontSize = inColorLegendPositionAndSize[4]
else:
legendSizeMultiplier = inColorLegendPositionAndSize[1]
if gParaViewCatalystVersionFlag <= 40100:
#our own factor to make legends smaller generally
legendSizeMultiplier *= 0.6
else:
#legendSizeMultiplier *= 1.0
legendSizeMultiplier *= 0.7
#legendFontSize = int(12.0 * legendSizeMultiplier)
legendFontSize = int(9.0 * legendSizeMultiplier)
if gParaViewCatalystVersionFlag <= 40100:
colorLegendDefaultLongSize = 0.5
colorLegendDefaultShortSize = 0.13
else:
#[0.12, 0.43]
#[0.85, 0.05]
colorLegendDefaultLongSize = 0.43
colorLegendDefaultShortSize = 0.12
colorLegendAdjustedLongSize = colorLegendDefaultLongSize * legendSizeMultiplier
colorLegendAdjustedShortSize = colorLegendDefaultShortSize * legendSizeMultiplier
horizontalLegendSize = [colorLegendAdjustedLongSize, colorLegendAdjustedShortSize]
verticalLegendSize = [colorLegendAdjustedShortSize, colorLegendAdjustedLongSize]
xPosForBottomTop = 0.5 - 0.5 * colorLegendAdjustedLongSize
yPosForLeftRight = 0.5 - 0.5 * colorLegendAdjustedShortSize
if gParaViewCatalystVersionFlag < 50400:
if inColorLegendPositionAndSize[0] == 'top':
legendOrientation = 'Horizontal'
legendSize = horizontalLegendSize
legendPosition=[xPosForBottomTop, 0.85]
elif inColorLegendPositionAndSize[0] == 'bottom':
legendOrientation = 'Horizontal'
legendSize = horizontalLegendSize
legendPosition=[xPosForBottomTop, 0.02]
elif inColorLegendPositionAndSize[0] == 'left':
legendOrientation = 'Vertical'
legendSize = verticalLegendSize
legendPosition=[0.065, yPosForLeftRight]
elif inColorLegendPositionAndSize[0] == 'right':
legendOrientation = 'Vertical'
legendSize = verticalLegendSize
legendPosition=[0.9, yPosForLeftRight]
elif inColorLegendPositionAndSize[0] == 'top left':
legendOrientation = 'Horizontal'
legendSize = horizontalLegendSize
legendPosition=[0.065, 0.85]
elif inColorLegendPositionAndSize[0] == 'top right':
legendOrientation = 'Horizontal'
legendSize = horizontalLegendSize
legendPosition=[0.7, 0.85]
elif inColorLegendPositionAndSize[0] == 'bottom left':
legendOrientation = 'Horizontal'
legendSize = horizontalLegendSize
#legendPosition=[0.065, 0.85]
legendPosition=[0.065, 0.01]
elif inColorLegendPositionAndSize[0] == 'bottom right':
legendOrientation = 'Horizontal'
legendSize = horizontalLegendSize
#legendPosition=[0.7, 0.05]
legendPosition=[0.7, 0.01]
elif inColorLegendPositionAndSize[0] == 'parameters':
legendOrientation = inColorLegendPositionAndSize[1]
legendSize = horizontalLegendSize
legendPosition = inColorLegendPositionAndSize[2]
else:
legendOrientation = 'Horizontal'
legendSize = horizontalLegendSize
#legendPosition=[xPosForBottomTop, 0.05]
legendPosition=[xPosForBottomTop, 0.01]
if PhactoriDbg():
myDebugPrint3("legend info:\n legendSizeMultiplier: " + str(legendSizeMultiplier) + "\n" \
" legendSize: " + str(legendSize) + "\n" \
" legendPos: " + str(legendPosition) + "\n"\
" legendOrientation: " + str(legendOrientation) + "\n"\
" legendFontSize: " + str(legendFontSize) + "\n")
else:
defaultLegendLength = 0.33
defaultMidPos = 0.5 - 0.5*defaultLegendLength
#legendFontSize = 16
#legendSize = 1.0
#validPositions = ['UpperLeftCorner', 'UpperRightCorner',
# 'LowerLeftCorner', 'LowerRightCorner',
# 'UpperCenter', 'LowerCenter']
legendPosition=[0.0, 0.0]
if inColorLegendPositionAndSize[0] == 'top':
legendOrientation = 'Horizontal'
legendWindowLocation = 'UpperCenter'
elif inColorLegendPositionAndSize[0] == 'bottom':
legendOrientation = 'Horizontal'
legendWindowLocation = 'LowerCenter'
elif inColorLegendPositionAndSize[0] == 'left':
legendOrientation = 'Vertical'
legendPosition=[0.02, defaultMidPos]
legendWindowLocation = 'AnyLocation'
elif inColorLegendPositionAndSize[0] == 'right':
legendOrientation = 'Vertical'
legendPosition=[0.89, defaultMidPos]
legendWindowLocation = 'AnyLocation'
elif inColorLegendPositionAndSize[0] == 'top left':
legendOrientation = 'Vertical'
legendWindowLocation = 'UpperLeftCorner'
elif inColorLegendPositionAndSize[0] == 'top right':
legendOrientation = 'Vertical'
legendWindowLocation = 'UpperRightCorner'
elif inColorLegendPositionAndSize[0] == 'bottom left':
legendOrientation = 'Vertical'
legendWindowLocation = 'LowerLeftCorner'
elif inColorLegendPositionAndSize[0] == 'bottom right':
legendOrientation = 'Vertical'
legendWindowLocation = 'LowerRightCorner'
elif inColorLegendPositionAndSize[0] == 'parameters':
legendOrientation = inColorLegendPositionAndSize[1]
legendPosition = inColorLegendPositionAndSize[2]
legendWindowLocation = 'AnyLocation'
else:
legendOrientation = 'Vertical'
legendWindowLocation = 'LowerRightCorner'
#newScalarBarWidgetRepresentation = CreateScalarBar( Title=inPvDataRep.ColorArrayName, Position2=[0.13, 0.5], TitleOpacity=1.0, TitleShadow=0, AutomaticLabelFormat=1, TitleFontSize=12, TitleColor=[1.0, 1.0, 1.0], AspectRatio=20.0, NumberOfLabels=5, ComponentTitle='', Resizable=1, TitleFontFamily='Arial', Visibility=myVisibility, LabelFontSize=12, LabelFontFamily='Arial', TitleItalic=0, Selectable=0, LabelItalic=0, Enabled=0, LabelColor=[1.0, 1.0, 1.0], Position=[0.9, 0.31396255850234012], LabelBold=0, UseNonCompositedRenderer=1, LabelOpacity=1.0, TitleBold=0, LabelFormat='%-#6.3g', Orientation='Vertical', LabelShadow=0, LookupTable=inPvDataRep.LookupTable, Repositionable=1 )
if gParaViewCatalystVersionFlag <= 40100:
newScalarBarWidgetRepresentation = CreateScalarBar(Title=localColorArrayName,
Orientation=legendOrientation,
Position=legendPosition,
Position2 = legendSize,
Visibility=myVisibility,
LookupTable=inPvDataRep.LookupTable,
LabelFontSize=legendFontSize,
TitleOpacity=1.0,
TitleShadow=0,
AutomaticLabelFormat=1,
TitleFontSize=legendFontSize,
TitleColor=inColorSettings.mTextColor,
AspectRatio=20.0,
NumberOfLabels=5,
ComponentTitle='',
Resizable=1,
TitleFontFamily='Arial',
LabelFontFamily='Arial',
TitleItalic=0,
Selectable=0,
LabelItalic=0,
Enabled=0,
LabelColor=inColorSettings.mTextColor,
LabelBold=0,
UseNonCompositedRenderer=1,
LabelOpacity=1.0,
TitleBold=0,
LabelFormat='%-#6.3g',
LabelShadow=0,
Repositionable=1)
elif gParaViewCatalystVersionFlag < 50400:
newScalarBarWidgetRepresentation = CreateScalarBar(Title=localColorArrayName,
Orientation=legendOrientation,
Position=legendPosition,
Position2 = legendSize,
Visibility=myVisibility,
LookupTable=inPvDataRep.LookupTable,
LabelFontSize=legendFontSize,
#TitleOpacity=1.0,
#TitleShadow=0,
#AutomaticLabelFormat=1,
TitleFontSize=legendFontSize,
TitleColor=inColorSettings.mTextColor,
AspectRatio=20.0,
#NumberOfLabels=5,
ComponentTitle='',
#Resizable=1,
#TitleFontFamily='Arial',
#LabelFontFamily='Arial',
#TitleItalic=0,
#Selectable=0,
#LabelItalic=0,
#Enabled=0,
LabelColor=inColorSettings.mTextColor,
#LabelBold=0,
#UseNonCompositedRenderer=1,
#LabelOpacity=1.0,
#TitleBold=0,
#LabelFormat='%-#6.3g',
#LabelShadow=0,
#Repositionable=1
)
else:
newScalarBarWidgetRepresentation = CreateScalarBar(
Title=localColorArrayName, ComponentTitle='')
newScalarBarWidgetRepresentation.Orientation = legendOrientation
newScalarBarWidgetRepresentation.WindowLocation = legendWindowLocation
if legendWindowLocation == 'AnyLocation':
newScalarBarWidgetRepresentation.Position = legendPosition
if PhactoriDbg():
nbwr = newScalarBarWidgetRepresentation
myDebugPrint3("newScalarBarWidgetRepresentation:\n" +\
str(nbwr) + "\n" +\
" Title: " + str(nbwr.Title) + "\n" +\
" ComponentTitle: " + str(nbwr.ComponentTitle) + "\n" +\
" WindowLocation: " + str(nbwr.WindowLocation) + "\n" +\
" Position: " + str(nbwr.Position) + "\n" +\
" ScalarBarLength: " + str(nbwr.ScalarBarLength) + "\n" +\
" ScalarBarThickness: " + str(nbwr.ScalarBarThickness) + "\n" +\
" Orientation: " + str(nbwr.Orientation) + "\n" +\
" LabelFontSize: " + str(nbwr.LabelFontSize) + "\n" +\
" TitleFontSize: " + str(nbwr.TitleFontSize) + "\n" +\
" LabelFontFamily: " + str(nbwr.LabelFontFamily) + "\n" +\
" TitleFontFamily: " + str(nbwr.TitleFontSize) + "\n")
#" LockPosition: " + str(nbwr.LockPosition) + "\n" +\
#" Repositionable: " + str(nbwr.Repositionable) + "\n" +\
#" AutoOrient: " + str(nbwr.AutoOrient) + "\n" +\
inPvView.OrientationAxesLabelColor = inColorSettings.mTextColor
inPvView.Representations.append(newScalarBarWidgetRepresentation)
#ioPhactoriImagesetBlock.mColorLegendRepRef = \
# newScalarBarWidgetRepresentation
if PhactoriDbg():
myDebugPrint3("current lookup table:\n");
if PhactoriDbg():
myDebugPrint3(str(inPvDataRep.LookupTable) + '\n')
if PhactoriDbg():
myDebugPrint3("RGBPoints:\n");
if PhactoriDbg():
if gParaViewCatalystVersionFlag <= 40100:
myLocalRGBPoints = inPvDataRep.LookupTable.RGBPoints
else:
pv_4_3_LUT = GetColorTransferFunction(
inPvDataRep.ColorArrayName[1])
myLocalRGBPoints = pv_4_3_LUT.RGBPoints
myDebugPrint3(str(myLocalRGBPoints) + '\n')
if PhactoriDbg():
myDebugPrint3("widget:\n");
if PhactoriDbg():
myDebugPrint3(str(newScalarBarWidgetRepresentation) + '\n')
if PhactoriDbg(100):
myDebugPrint3('phactori.ShowDataColorLegendXX returing with new rep: ' + \
inOnOffSetting + '\n', 100)
return newScalarBarWidgetRepresentation
|
[
"def",
"ShowDataColorLegendXX",
"(",
"inPvView",
",",
"inOnOffSetting",
",",
"inColorLegendPositionAndSize",
",",
"inColorSettings",
",",
"inColorLegendRepRef",
",",
"inPvDataRep",
")",
":",
"if",
"PhactoriDbg",
"(",
"100",
")",
":",
"myDebugPrint3",
"(",
"'phactori.ShowDataColorLegendXX entered, setting:'",
"+",
"inOnOffSetting",
"+",
"'\\n'",
",",
"100",
")",
"if",
"(",
"inOnOffSetting",
"==",
"'on'",
")",
":",
"myVisibility",
"=",
"1",
"else",
":",
"myVisibility",
"=",
"0",
"if",
"inColorLegendRepRef",
"!=",
"None",
":",
"if",
"PhactoriDbg",
"(",
"100",
")",
":",
"myDebugPrint3",
"(",
"\"A inColorLegendRepRef was \"",
"+",
"str",
"(",
"inColorLegendRepRef",
".",
"Visibility",
")",
"+",
"\" now 0: \"",
"+",
"str",
"(",
"inColorLegendRepRef",
")",
"+",
"\"\\n\"",
")",
"inColorLegendRepRef",
".",
"Visibility",
"=",
"0",
"myDebugPrint3",
"(",
"'phactori.ShowDataColorLegendXX returing with none rep: '",
"+",
"inOnOffSetting",
"+",
"'\\n'",
",",
"100",
")",
"return",
"None",
"if",
"gParaViewCatalystVersionFlag",
"<=",
"40100",
":",
"localColorArrayName",
"=",
"inPvDataRep",
".",
"ColorArrayName",
"else",
":",
"localColorArrayName",
"=",
"inPvDataRep",
".",
"ColorArrayName",
"[",
"1",
"]",
"if",
"inColorLegendRepRef",
"!=",
"None",
":",
"#myDebugPrint3(' phactori.ShowDataColorLegend using rep reference\\n', 100)",
"#if inColorLegendRepRef.ColorArrayName == '':",
"# return",
"inPvView",
".",
"OrientationAxesLabelColor",
"=",
"inColorSettings",
".",
"mTextColor",
"if",
"PhactoriDbg",
"(",
"100",
")",
":",
"myDebugPrint3",
"(",
"\"B inColorLegendRepRef was \"",
"+",
"str",
"(",
"inColorLegendRepRef",
".",
"Visibility",
")",
"+",
"\" now \"",
"+",
"str",
"(",
"myVisibility",
")",
"+",
"\": \"",
"+",
"str",
"(",
"inColorLegendRepRef",
")",
"+",
"\"\\n\"",
")",
"inColorLegendRepRef",
".",
"Visibility",
"=",
"myVisibility",
"inColorLegendRepRef",
".",
"LookupTable",
"=",
"inPvDataRep",
".",
"LookupTable",
"inColorLegendRepRef",
".",
"Title",
"=",
"localColorArrayName",
"#ioPhactoriImagesetBlock.mColorLegendRepRef.Color = \\",
"# inColorSettings.mTextColor",
"if",
"PhactoriDbg",
"(",
"100",
")",
":",
"myDebugPrint3",
"(",
"'phactori.ShowDataColorLegendXX returing with old rep: '",
"+",
"inOnOffSetting",
"+",
"'\\n'",
",",
"100",
")",
"return",
"inColorLegendRepRef",
"#else:",
"#myDebugPrint3(' phactori.ShowDataColorLegend have to create rep reference\\n', 100)",
"if",
"inColorLegendPositionAndSize",
"[",
"0",
"]",
"==",
"'parameters'",
":",
"legendSizeMultiplier",
"=",
"None",
"legendSize",
"=",
"inColorLegendPositionAndSize",
"[",
"3",
"]",
"legendFontSize",
"=",
"inColorLegendPositionAndSize",
"[",
"4",
"]",
"else",
":",
"legendSizeMultiplier",
"=",
"inColorLegendPositionAndSize",
"[",
"1",
"]",
"if",
"gParaViewCatalystVersionFlag",
"<=",
"40100",
":",
"#our own factor to make legends smaller generally",
"legendSizeMultiplier",
"*=",
"0.6",
"else",
":",
"#legendSizeMultiplier *= 1.0",
"legendSizeMultiplier",
"*=",
"0.7",
"#legendFontSize = int(12.0 * legendSizeMultiplier)",
"legendFontSize",
"=",
"int",
"(",
"9.0",
"*",
"legendSizeMultiplier",
")",
"if",
"gParaViewCatalystVersionFlag",
"<=",
"40100",
":",
"colorLegendDefaultLongSize",
"=",
"0.5",
"colorLegendDefaultShortSize",
"=",
"0.13",
"else",
":",
"#[0.12, 0.43]",
"#[0.85, 0.05]",
"colorLegendDefaultLongSize",
"=",
"0.43",
"colorLegendDefaultShortSize",
"=",
"0.12",
"colorLegendAdjustedLongSize",
"=",
"colorLegendDefaultLongSize",
"*",
"legendSizeMultiplier",
"colorLegendAdjustedShortSize",
"=",
"colorLegendDefaultShortSize",
"*",
"legendSizeMultiplier",
"horizontalLegendSize",
"=",
"[",
"colorLegendAdjustedLongSize",
",",
"colorLegendAdjustedShortSize",
"]",
"verticalLegendSize",
"=",
"[",
"colorLegendAdjustedShortSize",
",",
"colorLegendAdjustedLongSize",
"]",
"xPosForBottomTop",
"=",
"0.5",
"-",
"0.5",
"*",
"colorLegendAdjustedLongSize",
"yPosForLeftRight",
"=",
"0.5",
"-",
"0.5",
"*",
"colorLegendAdjustedShortSize",
"if",
"gParaViewCatalystVersionFlag",
"<",
"50400",
":",
"if",
"inColorLegendPositionAndSize",
"[",
"0",
"]",
"==",
"'top'",
":",
"legendOrientation",
"=",
"'Horizontal'",
"legendSize",
"=",
"horizontalLegendSize",
"legendPosition",
"=",
"[",
"xPosForBottomTop",
",",
"0.85",
"]",
"elif",
"inColorLegendPositionAndSize",
"[",
"0",
"]",
"==",
"'bottom'",
":",
"legendOrientation",
"=",
"'Horizontal'",
"legendSize",
"=",
"horizontalLegendSize",
"legendPosition",
"=",
"[",
"xPosForBottomTop",
",",
"0.02",
"]",
"elif",
"inColorLegendPositionAndSize",
"[",
"0",
"]",
"==",
"'left'",
":",
"legendOrientation",
"=",
"'Vertical'",
"legendSize",
"=",
"verticalLegendSize",
"legendPosition",
"=",
"[",
"0.065",
",",
"yPosForLeftRight",
"]",
"elif",
"inColorLegendPositionAndSize",
"[",
"0",
"]",
"==",
"'right'",
":",
"legendOrientation",
"=",
"'Vertical'",
"legendSize",
"=",
"verticalLegendSize",
"legendPosition",
"=",
"[",
"0.9",
",",
"yPosForLeftRight",
"]",
"elif",
"inColorLegendPositionAndSize",
"[",
"0",
"]",
"==",
"'top left'",
":",
"legendOrientation",
"=",
"'Horizontal'",
"legendSize",
"=",
"horizontalLegendSize",
"legendPosition",
"=",
"[",
"0.065",
",",
"0.85",
"]",
"elif",
"inColorLegendPositionAndSize",
"[",
"0",
"]",
"==",
"'top right'",
":",
"legendOrientation",
"=",
"'Horizontal'",
"legendSize",
"=",
"horizontalLegendSize",
"legendPosition",
"=",
"[",
"0.7",
",",
"0.85",
"]",
"elif",
"inColorLegendPositionAndSize",
"[",
"0",
"]",
"==",
"'bottom left'",
":",
"legendOrientation",
"=",
"'Horizontal'",
"legendSize",
"=",
"horizontalLegendSize",
"#legendPosition=[0.065, 0.85]",
"legendPosition",
"=",
"[",
"0.065",
",",
"0.01",
"]",
"elif",
"inColorLegendPositionAndSize",
"[",
"0",
"]",
"==",
"'bottom right'",
":",
"legendOrientation",
"=",
"'Horizontal'",
"legendSize",
"=",
"horizontalLegendSize",
"#legendPosition=[0.7, 0.05]",
"legendPosition",
"=",
"[",
"0.7",
",",
"0.01",
"]",
"elif",
"inColorLegendPositionAndSize",
"[",
"0",
"]",
"==",
"'parameters'",
":",
"legendOrientation",
"=",
"inColorLegendPositionAndSize",
"[",
"1",
"]",
"legendSize",
"=",
"horizontalLegendSize",
"legendPosition",
"=",
"inColorLegendPositionAndSize",
"[",
"2",
"]",
"else",
":",
"legendOrientation",
"=",
"'Horizontal'",
"legendSize",
"=",
"horizontalLegendSize",
"#legendPosition=[xPosForBottomTop, 0.05]",
"legendPosition",
"=",
"[",
"xPosForBottomTop",
",",
"0.01",
"]",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3",
"(",
"\"legend info:\\n legendSizeMultiplier: \"",
"+",
"str",
"(",
"legendSizeMultiplier",
")",
"+",
"\"\\n\"",
"\" legendSize: \"",
"+",
"str",
"(",
"legendSize",
")",
"+",
"\"\\n\"",
"\" legendPos: \"",
"+",
"str",
"(",
"legendPosition",
")",
"+",
"\"\\n\"",
"\" legendOrientation: \"",
"+",
"str",
"(",
"legendOrientation",
")",
"+",
"\"\\n\"",
"\" legendFontSize: \"",
"+",
"str",
"(",
"legendFontSize",
")",
"+",
"\"\\n\"",
")",
"else",
":",
"defaultLegendLength",
"=",
"0.33",
"defaultMidPos",
"=",
"0.5",
"-",
"0.5",
"*",
"defaultLegendLength",
"#legendFontSize = 16",
"#legendSize = 1.0",
"#validPositions = ['UpperLeftCorner', 'UpperRightCorner', ",
"# 'LowerLeftCorner', 'LowerRightCorner',",
"# 'UpperCenter', 'LowerCenter']",
"legendPosition",
"=",
"[",
"0.0",
",",
"0.0",
"]",
"if",
"inColorLegendPositionAndSize",
"[",
"0",
"]",
"==",
"'top'",
":",
"legendOrientation",
"=",
"'Horizontal'",
"legendWindowLocation",
"=",
"'UpperCenter'",
"elif",
"inColorLegendPositionAndSize",
"[",
"0",
"]",
"==",
"'bottom'",
":",
"legendOrientation",
"=",
"'Horizontal'",
"legendWindowLocation",
"=",
"'LowerCenter'",
"elif",
"inColorLegendPositionAndSize",
"[",
"0",
"]",
"==",
"'left'",
":",
"legendOrientation",
"=",
"'Vertical'",
"legendPosition",
"=",
"[",
"0.02",
",",
"defaultMidPos",
"]",
"legendWindowLocation",
"=",
"'AnyLocation'",
"elif",
"inColorLegendPositionAndSize",
"[",
"0",
"]",
"==",
"'right'",
":",
"legendOrientation",
"=",
"'Vertical'",
"legendPosition",
"=",
"[",
"0.89",
",",
"defaultMidPos",
"]",
"legendWindowLocation",
"=",
"'AnyLocation'",
"elif",
"inColorLegendPositionAndSize",
"[",
"0",
"]",
"==",
"'top left'",
":",
"legendOrientation",
"=",
"'Vertical'",
"legendWindowLocation",
"=",
"'UpperLeftCorner'",
"elif",
"inColorLegendPositionAndSize",
"[",
"0",
"]",
"==",
"'top right'",
":",
"legendOrientation",
"=",
"'Vertical'",
"legendWindowLocation",
"=",
"'UpperRightCorner'",
"elif",
"inColorLegendPositionAndSize",
"[",
"0",
"]",
"==",
"'bottom left'",
":",
"legendOrientation",
"=",
"'Vertical'",
"legendWindowLocation",
"=",
"'LowerLeftCorner'",
"elif",
"inColorLegendPositionAndSize",
"[",
"0",
"]",
"==",
"'bottom right'",
":",
"legendOrientation",
"=",
"'Vertical'",
"legendWindowLocation",
"=",
"'LowerRightCorner'",
"elif",
"inColorLegendPositionAndSize",
"[",
"0",
"]",
"==",
"'parameters'",
":",
"legendOrientation",
"=",
"inColorLegendPositionAndSize",
"[",
"1",
"]",
"legendPosition",
"=",
"inColorLegendPositionAndSize",
"[",
"2",
"]",
"legendWindowLocation",
"=",
"'AnyLocation'",
"else",
":",
"legendOrientation",
"=",
"'Vertical'",
"legendWindowLocation",
"=",
"'LowerRightCorner'",
"#newScalarBarWidgetRepresentation = CreateScalarBar( Title=inPvDataRep.ColorArrayName, Position2=[0.13, 0.5], TitleOpacity=1.0, TitleShadow=0, AutomaticLabelFormat=1, TitleFontSize=12, TitleColor=[1.0, 1.0, 1.0], AspectRatio=20.0, NumberOfLabels=5, ComponentTitle='', Resizable=1, TitleFontFamily='Arial', Visibility=myVisibility, LabelFontSize=12, LabelFontFamily='Arial', TitleItalic=0, Selectable=0, LabelItalic=0, Enabled=0, LabelColor=[1.0, 1.0, 1.0], Position=[0.9, 0.31396255850234012], LabelBold=0, UseNonCompositedRenderer=1, LabelOpacity=1.0, TitleBold=0, LabelFormat='%-#6.3g', Orientation='Vertical', LabelShadow=0, LookupTable=inPvDataRep.LookupTable, Repositionable=1 )",
"if",
"gParaViewCatalystVersionFlag",
"<=",
"40100",
":",
"newScalarBarWidgetRepresentation",
"=",
"CreateScalarBar",
"(",
"Title",
"=",
"localColorArrayName",
",",
"Orientation",
"=",
"legendOrientation",
",",
"Position",
"=",
"legendPosition",
",",
"Position2",
"=",
"legendSize",
",",
"Visibility",
"=",
"myVisibility",
",",
"LookupTable",
"=",
"inPvDataRep",
".",
"LookupTable",
",",
"LabelFontSize",
"=",
"legendFontSize",
",",
"TitleOpacity",
"=",
"1.0",
",",
"TitleShadow",
"=",
"0",
",",
"AutomaticLabelFormat",
"=",
"1",
",",
"TitleFontSize",
"=",
"legendFontSize",
",",
"TitleColor",
"=",
"inColorSettings",
".",
"mTextColor",
",",
"AspectRatio",
"=",
"20.0",
",",
"NumberOfLabels",
"=",
"5",
",",
"ComponentTitle",
"=",
"''",
",",
"Resizable",
"=",
"1",
",",
"TitleFontFamily",
"=",
"'Arial'",
",",
"LabelFontFamily",
"=",
"'Arial'",
",",
"TitleItalic",
"=",
"0",
",",
"Selectable",
"=",
"0",
",",
"LabelItalic",
"=",
"0",
",",
"Enabled",
"=",
"0",
",",
"LabelColor",
"=",
"inColorSettings",
".",
"mTextColor",
",",
"LabelBold",
"=",
"0",
",",
"UseNonCompositedRenderer",
"=",
"1",
",",
"LabelOpacity",
"=",
"1.0",
",",
"TitleBold",
"=",
"0",
",",
"LabelFormat",
"=",
"'%-#6.3g'",
",",
"LabelShadow",
"=",
"0",
",",
"Repositionable",
"=",
"1",
")",
"elif",
"gParaViewCatalystVersionFlag",
"<",
"50400",
":",
"newScalarBarWidgetRepresentation",
"=",
"CreateScalarBar",
"(",
"Title",
"=",
"localColorArrayName",
",",
"Orientation",
"=",
"legendOrientation",
",",
"Position",
"=",
"legendPosition",
",",
"Position2",
"=",
"legendSize",
",",
"Visibility",
"=",
"myVisibility",
",",
"LookupTable",
"=",
"inPvDataRep",
".",
"LookupTable",
",",
"LabelFontSize",
"=",
"legendFontSize",
",",
"#TitleOpacity=1.0,",
"#TitleShadow=0,",
"#AutomaticLabelFormat=1,",
"TitleFontSize",
"=",
"legendFontSize",
",",
"TitleColor",
"=",
"inColorSettings",
".",
"mTextColor",
",",
"AspectRatio",
"=",
"20.0",
",",
"#NumberOfLabels=5,",
"ComponentTitle",
"=",
"''",
",",
"#Resizable=1,",
"#TitleFontFamily='Arial',",
"#LabelFontFamily='Arial',",
"#TitleItalic=0,",
"#Selectable=0,",
"#LabelItalic=0,",
"#Enabled=0,",
"LabelColor",
"=",
"inColorSettings",
".",
"mTextColor",
",",
"#LabelBold=0,",
"#UseNonCompositedRenderer=1,",
"#LabelOpacity=1.0,",
"#TitleBold=0,",
"#LabelFormat='%-#6.3g',",
"#LabelShadow=0,",
"#Repositionable=1",
")",
"else",
":",
"newScalarBarWidgetRepresentation",
"=",
"CreateScalarBar",
"(",
"Title",
"=",
"localColorArrayName",
",",
"ComponentTitle",
"=",
"''",
")",
"newScalarBarWidgetRepresentation",
".",
"Orientation",
"=",
"legendOrientation",
"newScalarBarWidgetRepresentation",
".",
"WindowLocation",
"=",
"legendWindowLocation",
"if",
"legendWindowLocation",
"==",
"'AnyLocation'",
":",
"newScalarBarWidgetRepresentation",
".",
"Position",
"=",
"legendPosition",
"if",
"PhactoriDbg",
"(",
")",
":",
"nbwr",
"=",
"newScalarBarWidgetRepresentation",
"myDebugPrint3",
"(",
"\"newScalarBarWidgetRepresentation:\\n\"",
"+",
"str",
"(",
"nbwr",
")",
"+",
"\"\\n\"",
"+",
"\" Title: \"",
"+",
"str",
"(",
"nbwr",
".",
"Title",
")",
"+",
"\"\\n\"",
"+",
"\" ComponentTitle: \"",
"+",
"str",
"(",
"nbwr",
".",
"ComponentTitle",
")",
"+",
"\"\\n\"",
"+",
"\" WindowLocation: \"",
"+",
"str",
"(",
"nbwr",
".",
"WindowLocation",
")",
"+",
"\"\\n\"",
"+",
"\" Position: \"",
"+",
"str",
"(",
"nbwr",
".",
"Position",
")",
"+",
"\"\\n\"",
"+",
"\" ScalarBarLength: \"",
"+",
"str",
"(",
"nbwr",
".",
"ScalarBarLength",
")",
"+",
"\"\\n\"",
"+",
"\" ScalarBarThickness: \"",
"+",
"str",
"(",
"nbwr",
".",
"ScalarBarThickness",
")",
"+",
"\"\\n\"",
"+",
"\" Orientation: \"",
"+",
"str",
"(",
"nbwr",
".",
"Orientation",
")",
"+",
"\"\\n\"",
"+",
"\" LabelFontSize: \"",
"+",
"str",
"(",
"nbwr",
".",
"LabelFontSize",
")",
"+",
"\"\\n\"",
"+",
"\" TitleFontSize: \"",
"+",
"str",
"(",
"nbwr",
".",
"TitleFontSize",
")",
"+",
"\"\\n\"",
"+",
"\" LabelFontFamily: \"",
"+",
"str",
"(",
"nbwr",
".",
"LabelFontFamily",
")",
"+",
"\"\\n\"",
"+",
"\" TitleFontFamily: \"",
"+",
"str",
"(",
"nbwr",
".",
"TitleFontSize",
")",
"+",
"\"\\n\"",
")",
"#\" LockPosition: \" + str(nbwr.LockPosition) + \"\\n\" +\\",
"#\" Repositionable: \" + str(nbwr.Repositionable) + \"\\n\" +\\",
"#\" AutoOrient: \" + str(nbwr.AutoOrient) + \"\\n\" +\\",
"inPvView",
".",
"OrientationAxesLabelColor",
"=",
"inColorSettings",
".",
"mTextColor",
"inPvView",
".",
"Representations",
".",
"append",
"(",
"newScalarBarWidgetRepresentation",
")",
"#ioPhactoriImagesetBlock.mColorLegendRepRef = \\",
"# newScalarBarWidgetRepresentation",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3",
"(",
"\"current lookup table:\\n\"",
")",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3",
"(",
"str",
"(",
"inPvDataRep",
".",
"LookupTable",
")",
"+",
"'\\n'",
")",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3",
"(",
"\"RGBPoints:\\n\"",
")",
"if",
"PhactoriDbg",
"(",
")",
":",
"if",
"gParaViewCatalystVersionFlag",
"<=",
"40100",
":",
"myLocalRGBPoints",
"=",
"inPvDataRep",
".",
"LookupTable",
".",
"RGBPoints",
"else",
":",
"pv_4_3_LUT",
"=",
"GetColorTransferFunction",
"(",
"inPvDataRep",
".",
"ColorArrayName",
"[",
"1",
"]",
")",
"myLocalRGBPoints",
"=",
"pv_4_3_LUT",
".",
"RGBPoints",
"myDebugPrint3",
"(",
"str",
"(",
"myLocalRGBPoints",
")",
"+",
"'\\n'",
")",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3",
"(",
"\"widget:\\n\"",
")",
"if",
"PhactoriDbg",
"(",
")",
":",
"myDebugPrint3",
"(",
"str",
"(",
"newScalarBarWidgetRepresentation",
")",
"+",
"'\\n'",
")",
"if",
"PhactoriDbg",
"(",
"100",
")",
":",
"myDebugPrint3",
"(",
"'phactori.ShowDataColorLegendXX returing with new rep: '",
"+",
"inOnOffSetting",
"+",
"'\\n'",
",",
"100",
")",
"return",
"newScalarBarWidgetRepresentation"
] |
https://github.com/trilinos/Trilinos/blob/6168be6dd51e35e1cd681e9c4b24433e709df140/packages/seacas/libraries/ioss/src/visualization/catalyst/phactori/phactori.py#L12516-L12817
|
|
hughperkins/tf-coriander
|
970d3df6c11400ad68405f22b0c42a52374e94ca
|
tensorflow/contrib/graph_editor/select.py
|
python
|
get_walks_union_ops
|
(forward_seed_ops,
backward_seed_ops,
forward_inclusive=True,
backward_inclusive=True,
within_ops=None,
control_inputs=False,
control_outputs=None,
control_ios=None)
|
return util.concatenate_unique(forward_ops, backward_ops)
|
Return the union of a forward and a backward walk.
Args:
forward_seed_ops: an iterable of operations from which the forward graph
walk starts. If a list of tensors is given instead, the seed_ops are set
to be the consumers of those tensors.
backward_seed_ops: an iterable of operations from which the backward graph
walk starts. If a list of tensors is given instead, the seed_ops are set
to be the generators of those tensors.
forward_inclusive: if True the given forward_seed_ops are also part of the
resulting set.
backward_inclusive: if True the given backward_seed_ops are also part of the
resulting set.
within_ops: restrict the search within those operations. If within_ops is
None, the search is done within the whole graph.
control_inputs: A boolean indicating whether control inputs are enabled.
control_outputs: An instance of util.ControlOutputs or None. If not None,
control outputs are enabled.
control_ios: An instance of util.ControlOutputs or None. If not None, both
control inputs and control outputs are enabled. This is equivalent to set
control_inputs to True and control_outputs to the util.ControlOutputs
instance.
Returns:
A Python set of all the tf.Operation in the union of a forward and a
backward walk.
Raises:
TypeError: if forward_seed_ops or backward_seed_ops or within_ops cannot be
converted to a list of tf.Operation.
|
Return the union of a forward and a backward walk.
|
[
"Return",
"the",
"union",
"of",
"a",
"forward",
"and",
"a",
"backward",
"walk",
"."
] |
def get_walks_union_ops(forward_seed_ops,
backward_seed_ops,
forward_inclusive=True,
backward_inclusive=True,
within_ops=None,
control_inputs=False,
control_outputs=None,
control_ios=None):
"""Return the union of a forward and a backward walk.
Args:
forward_seed_ops: an iterable of operations from which the forward graph
walk starts. If a list of tensors is given instead, the seed_ops are set
to be the consumers of those tensors.
backward_seed_ops: an iterable of operations from which the backward graph
walk starts. If a list of tensors is given instead, the seed_ops are set
to be the generators of those tensors.
forward_inclusive: if True the given forward_seed_ops are also part of the
resulting set.
backward_inclusive: if True the given backward_seed_ops are also part of the
resulting set.
within_ops: restrict the search within those operations. If within_ops is
None, the search is done within the whole graph.
control_inputs: A boolean indicating whether control inputs are enabled.
control_outputs: An instance of util.ControlOutputs or None. If not None,
control outputs are enabled.
control_ios: An instance of util.ControlOutputs or None. If not None, both
control inputs and control outputs are enabled. This is equivalent to set
control_inputs to True and control_outputs to the util.ControlOutputs
instance.
Returns:
A Python set of all the tf.Operation in the union of a forward and a
backward walk.
Raises:
TypeError: if forward_seed_ops or backward_seed_ops or within_ops cannot be
converted to a list of tf.Operation.
"""
control_inputs, control_outputs = check_cios(control_inputs, control_outputs,
control_ios)
forward_ops = get_forward_walk_ops(
forward_seed_ops,
inclusive=forward_inclusive,
within_ops=within_ops,
control_outputs=control_outputs)
backward_ops = get_backward_walk_ops(
backward_seed_ops,
inclusive=backward_inclusive,
within_ops=within_ops,
control_inputs=control_inputs)
return util.concatenate_unique(forward_ops, backward_ops)
|
[
"def",
"get_walks_union_ops",
"(",
"forward_seed_ops",
",",
"backward_seed_ops",
",",
"forward_inclusive",
"=",
"True",
",",
"backward_inclusive",
"=",
"True",
",",
"within_ops",
"=",
"None",
",",
"control_inputs",
"=",
"False",
",",
"control_outputs",
"=",
"None",
",",
"control_ios",
"=",
"None",
")",
":",
"control_inputs",
",",
"control_outputs",
"=",
"check_cios",
"(",
"control_inputs",
",",
"control_outputs",
",",
"control_ios",
")",
"forward_ops",
"=",
"get_forward_walk_ops",
"(",
"forward_seed_ops",
",",
"inclusive",
"=",
"forward_inclusive",
",",
"within_ops",
"=",
"within_ops",
",",
"control_outputs",
"=",
"control_outputs",
")",
"backward_ops",
"=",
"get_backward_walk_ops",
"(",
"backward_seed_ops",
",",
"inclusive",
"=",
"backward_inclusive",
",",
"within_ops",
"=",
"within_ops",
",",
"control_inputs",
"=",
"control_inputs",
")",
"return",
"util",
".",
"concatenate_unique",
"(",
"forward_ops",
",",
"backward_ops",
")"
] |
https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/contrib/graph_editor/select.py#L562-L611
|
|
pytorch/pytorch
|
7176c92687d3cc847cc046bf002269c6949a21c2
|
torch/nn/modules/utils.py
|
python
|
consume_prefix_in_state_dict_if_present
|
(
state_dict: Dict[str, Any], prefix: str
)
|
r"""Strip the prefix in state_dict in place, if any.
..note::
Given a `state_dict` from a DP/DDP model, a local model can load it by applying
`consume_prefix_in_state_dict_if_present(state_dict, "module.")` before calling
:meth:`torch.nn.Module.load_state_dict`.
Args:
state_dict (OrderedDict): a state-dict to be loaded to the model.
prefix (str): prefix.
|
r"""Strip the prefix in state_dict in place, if any.
|
[
"r",
"Strip",
"the",
"prefix",
"in",
"state_dict",
"in",
"place",
"if",
"any",
"."
] |
def consume_prefix_in_state_dict_if_present(
state_dict: Dict[str, Any], prefix: str
) -> None:
r"""Strip the prefix in state_dict in place, if any.
..note::
Given a `state_dict` from a DP/DDP model, a local model can load it by applying
`consume_prefix_in_state_dict_if_present(state_dict, "module.")` before calling
:meth:`torch.nn.Module.load_state_dict`.
Args:
state_dict (OrderedDict): a state-dict to be loaded to the model.
prefix (str): prefix.
"""
keys = sorted(state_dict.keys())
for key in keys:
if key.startswith(prefix):
newkey = key[len(prefix) :]
state_dict[newkey] = state_dict.pop(key)
# also strip the prefix in metadata if any.
if "_metadata" in state_dict:
metadata = state_dict["_metadata"]
for key in list(metadata.keys()):
# for the metadata dict, the key can be:
# '': for the DDP module, which we want to remove.
# 'module': for the actual model.
# 'module.xx.xx': for the rest.
if len(key) == 0:
continue
newkey = key[len(prefix) :]
metadata[newkey] = metadata.pop(key)
|
[
"def",
"consume_prefix_in_state_dict_if_present",
"(",
"state_dict",
":",
"Dict",
"[",
"str",
",",
"Any",
"]",
",",
"prefix",
":",
"str",
")",
"->",
"None",
":",
"keys",
"=",
"sorted",
"(",
"state_dict",
".",
"keys",
"(",
")",
")",
"for",
"key",
"in",
"keys",
":",
"if",
"key",
".",
"startswith",
"(",
"prefix",
")",
":",
"newkey",
"=",
"key",
"[",
"len",
"(",
"prefix",
")",
":",
"]",
"state_dict",
"[",
"newkey",
"]",
"=",
"state_dict",
".",
"pop",
"(",
"key",
")",
"# also strip the prefix in metadata if any.",
"if",
"\"_metadata\"",
"in",
"state_dict",
":",
"metadata",
"=",
"state_dict",
"[",
"\"_metadata\"",
"]",
"for",
"key",
"in",
"list",
"(",
"metadata",
".",
"keys",
"(",
")",
")",
":",
"# for the metadata dict, the key can be:",
"# '': for the DDP module, which we want to remove.",
"# 'module': for the actual model.",
"# 'module.xx.xx': for the rest.",
"if",
"len",
"(",
"key",
")",
"==",
"0",
":",
"continue",
"newkey",
"=",
"key",
"[",
"len",
"(",
"prefix",
")",
":",
"]",
"metadata",
"[",
"newkey",
"]",
"=",
"metadata",
".",
"pop",
"(",
"key",
")"
] |
https://github.com/pytorch/pytorch/blob/7176c92687d3cc847cc046bf002269c6949a21c2/torch/nn/modules/utils.py#L43-L75
|
||
microsoft/DirectXShaderCompiler
|
8348ff8d9e0287610ba05d3a828e10af981a1c05
|
tools/clang/bindings/python/clang/cindex.py
|
python
|
Type.get_array_size
|
(self)
|
return conf.lib.clang_getArraySize(self)
|
Retrieve the size of the constant array.
|
Retrieve the size of the constant array.
|
[
"Retrieve",
"the",
"size",
"of",
"the",
"constant",
"array",
"."
] |
def get_array_size(self):
"""
Retrieve the size of the constant array.
"""
return conf.lib.clang_getArraySize(self)
|
[
"def",
"get_array_size",
"(",
"self",
")",
":",
"return",
"conf",
".",
"lib",
".",
"clang_getArraySize",
"(",
"self",
")"
] |
https://github.com/microsoft/DirectXShaderCompiler/blob/8348ff8d9e0287610ba05d3a828e10af981a1c05/tools/clang/bindings/python/clang/cindex.py#L1862-L1866
|
|
BestSonny/SSTD
|
174d452189f6bf9cf4b6957719392008bd974069
|
python/caffe/io.py
|
python
|
datum_to_array
|
(datum)
|
Converts a datum to an array. Note that the label is not returned,
as one can easily get it by calling datum.label.
|
Converts a datum to an array. Note that the label is not returned,
as one can easily get it by calling datum.label.
|
[
"Converts",
"a",
"datum",
"to",
"an",
"array",
".",
"Note",
"that",
"the",
"label",
"is",
"not",
"returned",
"as",
"one",
"can",
"easily",
"get",
"it",
"by",
"calling",
"datum",
".",
"label",
"."
] |
def datum_to_array(datum):
"""Converts a datum to an array. Note that the label is not returned,
as one can easily get it by calling datum.label.
"""
if len(datum.data):
return np.fromstring(datum.data, dtype=np.uint8).reshape(
datum.channels, datum.height, datum.width)
else:
return np.array(datum.float_data).astype(float).reshape(
datum.channels, datum.height, datum.width)
|
[
"def",
"datum_to_array",
"(",
"datum",
")",
":",
"if",
"len",
"(",
"datum",
".",
"data",
")",
":",
"return",
"np",
".",
"fromstring",
"(",
"datum",
".",
"data",
",",
"dtype",
"=",
"np",
".",
"uint8",
")",
".",
"reshape",
"(",
"datum",
".",
"channels",
",",
"datum",
".",
"height",
",",
"datum",
".",
"width",
")",
"else",
":",
"return",
"np",
".",
"array",
"(",
"datum",
".",
"float_data",
")",
".",
"astype",
"(",
"float",
")",
".",
"reshape",
"(",
"datum",
".",
"channels",
",",
"datum",
".",
"height",
",",
"datum",
".",
"width",
")"
] |
https://github.com/BestSonny/SSTD/blob/174d452189f6bf9cf4b6957719392008bd974069/python/caffe/io.py#L84-L93
|
||
grpc/grpc
|
27bc6fe7797e43298dc931b96dc57322d0852a9f
|
src/python/grpcio/grpc/framework/interfaces/face/utilities.py
|
python
|
unary_unary_event
|
(behavior)
|
return _MethodImplementation(cardinality.Cardinality.UNARY_UNARY,
style.Service.EVENT, None, None, None, None,
behavior, None, None, None)
|
Creates an face.MethodImplementation for the given behavior.
Args:
behavior: The implementation of a unary-unary RPC method as a callable
value that takes a request value, a response callback to which to pass
the response value of the RPC, and an face.ServicerContext.
Returns:
An face.MethodImplementation derived from the given behavior.
|
Creates an face.MethodImplementation for the given behavior.
|
[
"Creates",
"an",
"face",
".",
"MethodImplementation",
"for",
"the",
"given",
"behavior",
"."
] |
def unary_unary_event(behavior):
"""Creates an face.MethodImplementation for the given behavior.
Args:
behavior: The implementation of a unary-unary RPC method as a callable
value that takes a request value, a response callback to which to pass
the response value of the RPC, and an face.ServicerContext.
Returns:
An face.MethodImplementation derived from the given behavior.
"""
return _MethodImplementation(cardinality.Cardinality.UNARY_UNARY,
style.Service.EVENT, None, None, None, None,
behavior, None, None, None)
|
[
"def",
"unary_unary_event",
"(",
"behavior",
")",
":",
"return",
"_MethodImplementation",
"(",
"cardinality",
".",
"Cardinality",
".",
"UNARY_UNARY",
",",
"style",
".",
"Service",
".",
"EVENT",
",",
"None",
",",
"None",
",",
"None",
",",
"None",
",",
"behavior",
",",
"None",
",",
"None",
",",
"None",
")"
] |
https://github.com/grpc/grpc/blob/27bc6fe7797e43298dc931b96dc57322d0852a9f/src/python/grpcio/grpc/framework/interfaces/face/utilities.py#L105-L118
|
|
perilouswithadollarsign/cstrike15_src
|
f82112a2388b841d72cb62ca48ab1846dfcc11c8
|
thirdparty/protobuf-2.5.0/python/mox.py
|
python
|
In.equals
|
(self, rhs)
|
return self._key in rhs
|
Check to see whether key is in rhs.
Args:
rhs: dict
Returns:
bool
|
Check to see whether key is in rhs.
|
[
"Check",
"to",
"see",
"whether",
"key",
"is",
"in",
"rhs",
"."
] |
def equals(self, rhs):
"""Check to see whether key is in rhs.
Args:
rhs: dict
Returns:
bool
"""
return self._key in rhs
|
[
"def",
"equals",
"(",
"self",
",",
"rhs",
")",
":",
"return",
"self",
".",
"_key",
"in",
"rhs"
] |
https://github.com/perilouswithadollarsign/cstrike15_src/blob/f82112a2388b841d72cb62ca48ab1846dfcc11c8/thirdparty/protobuf-2.5.0/python/mox.py#L955-L965
|
|
wy1iu/LargeMargin_Softmax_Loss
|
c3e9f20e4f16e2b4daf7d358a614366b9b39a6ec
|
scripts/cpp_lint.py
|
python
|
_NestingState.SeenOpenBrace
|
(self)
|
return (not self.stack) or self.stack[-1].seen_open_brace
|
Check if we have seen the opening brace for the innermost block.
Returns:
True if we have seen the opening brace, False if the innermost
block is still expecting an opening brace.
|
Check if we have seen the opening brace for the innermost block.
|
[
"Check",
"if",
"we",
"have",
"seen",
"the",
"opening",
"brace",
"for",
"the",
"innermost",
"block",
"."
] |
def SeenOpenBrace(self):
"""Check if we have seen the opening brace for the innermost block.
Returns:
True if we have seen the opening brace, False if the innermost
block is still expecting an opening brace.
"""
return (not self.stack) or self.stack[-1].seen_open_brace
|
[
"def",
"SeenOpenBrace",
"(",
"self",
")",
":",
"return",
"(",
"not",
"self",
".",
"stack",
")",
"or",
"self",
".",
"stack",
"[",
"-",
"1",
"]",
".",
"seen_open_brace"
] |
https://github.com/wy1iu/LargeMargin_Softmax_Loss/blob/c3e9f20e4f16e2b4daf7d358a614366b9b39a6ec/scripts/cpp_lint.py#L1931-L1938
|
|
thalium/icebox
|
99d147d5b9269222225443ce171b4fd46d8985d4
|
third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py
|
python
|
outputBuffer.htmlDocContentDumpFormatOutput
|
(self, cur, encoding, format)
|
Dump an HTML document.
|
Dump an HTML document.
|
[
"Dump",
"an",
"HTML",
"document",
"."
] |
def htmlDocContentDumpFormatOutput(self, cur, encoding, format):
"""Dump an HTML document. """
if cur is None: cur__o = None
else: cur__o = cur._o
libxml2mod.htmlDocContentDumpFormatOutput(self._o, cur__o, encoding, format)
|
[
"def",
"htmlDocContentDumpFormatOutput",
"(",
"self",
",",
"cur",
",",
"encoding",
",",
"format",
")",
":",
"if",
"cur",
"is",
"None",
":",
"cur__o",
"=",
"None",
"else",
":",
"cur__o",
"=",
"cur",
".",
"_o",
"libxml2mod",
".",
"htmlDocContentDumpFormatOutput",
"(",
"self",
".",
"_o",
",",
"cur__o",
",",
"encoding",
",",
"format",
")"
] |
https://github.com/thalium/icebox/blob/99d147d5b9269222225443ce171b4fd46d8985d4/third_party/virtualbox/src/libs/libxml2-2.9.4/python/libxml2class.py#L5251-L5255
|
||
apple/turicreate
|
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
|
src/external/coremltools_wrap/coremltools/coremltools/converters/onnx/_operators_nd.py
|
python
|
_convert_size
|
(builder, node, graph, err)
|
convert to CoreML GetShape and ReduceProd Layer:
https://github.com/apple/coremltools/blob/655b3be5cc0d42c3c4fa49f0f0e4a93a26b3e492/mlmodel/format/NeuralNetwork.proto#L5131
https://github.com/apple/coremltools/blob/655b3be5cc0d42c3c4fa49f0f0e4a93a26b3e492/mlmodel/format/NeuralNetwork.proto#L4722
|
convert to CoreML GetShape and ReduceProd Layer:
https://github.com/apple/coremltools/blob/655b3be5cc0d42c3c4fa49f0f0e4a93a26b3e492/mlmodel/format/NeuralNetwork.proto#L5131
https://github.com/apple/coremltools/blob/655b3be5cc0d42c3c4fa49f0f0e4a93a26b3e492/mlmodel/format/NeuralNetwork.proto#L4722
|
[
"convert",
"to",
"CoreML",
"GetShape",
"and",
"ReduceProd",
"Layer",
":",
"https",
":",
"//",
"github",
".",
"com",
"/",
"apple",
"/",
"coremltools",
"/",
"blob",
"/",
"655b3be5cc0d42c3c4fa49f0f0e4a93a26b3e492",
"/",
"mlmodel",
"/",
"format",
"/",
"NeuralNetwork",
".",
"proto#L5131",
"https",
":",
"//",
"github",
".",
"com",
"/",
"apple",
"/",
"coremltools",
"/",
"blob",
"/",
"655b3be5cc0d42c3c4fa49f0f0e4a93a26b3e492",
"/",
"mlmodel",
"/",
"format",
"/",
"NeuralNetwork",
".",
"proto#L4722"
] |
def _convert_size(builder, node, graph, err):
"""
convert to CoreML GetShape and ReduceProd Layer:
https://github.com/apple/coremltools/blob/655b3be5cc0d42c3c4fa49f0f0e4a93a26b3e492/mlmodel/format/NeuralNetwork.proto#L5131
https://github.com/apple/coremltools/blob/655b3be5cc0d42c3c4fa49f0f0e4a93a26b3e492/mlmodel/format/NeuralNetwork.proto#L4722
"""
builder.add_get_shape(
name=node.name,
input_name=node.inputs[0],
output_name=node.inputs[0] + "_getshape",
)
builder.add_reduce_prod(
name=node.name + "_reduce_prod",
input_name=node.inputs[0] + "_getshape",
output_name=node.outputs[0],
)
|
[
"def",
"_convert_size",
"(",
"builder",
",",
"node",
",",
"graph",
",",
"err",
")",
":",
"builder",
".",
"add_get_shape",
"(",
"name",
"=",
"node",
".",
"name",
",",
"input_name",
"=",
"node",
".",
"inputs",
"[",
"0",
"]",
",",
"output_name",
"=",
"node",
".",
"inputs",
"[",
"0",
"]",
"+",
"\"_getshape\"",
",",
")",
"builder",
".",
"add_reduce_prod",
"(",
"name",
"=",
"node",
".",
"name",
"+",
"\"_reduce_prod\"",
",",
"input_name",
"=",
"node",
".",
"inputs",
"[",
"0",
"]",
"+",
"\"_getshape\"",
",",
"output_name",
"=",
"node",
".",
"outputs",
"[",
"0",
"]",
",",
")"
] |
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/external/coremltools_wrap/coremltools/coremltools/converters/onnx/_operators_nd.py#L2214-L2229
|
||
BYVoid/OpenCC
|
f3bafc4d016acfba5d5aef0cf5c98fa3c0090e0c
|
deps/google-benchmark/mingw.py
|
python
|
repository
|
(urls = urls, log = EmptyLogger())
|
return versions
|
Downloads and parse mingw-build repository files and parses them
|
Downloads and parse mingw-build repository files and parses them
|
[
"Downloads",
"and",
"parse",
"mingw",
"-",
"build",
"repository",
"files",
"and",
"parses",
"them"
] |
def repository(urls = urls, log = EmptyLogger()):
'''
Downloads and parse mingw-build repository files and parses them
'''
log.info('getting mingw-builds repository')
versions = {}
re_sourceforge = re.compile(r'http://sourceforge.net/projects/([^/]+)/files')
re_sub = r'http://downloads.sourceforge.net/project/\1'
for url in urls:
log.debug(' - requesting: %s', url)
socket = request.urlopen(url)
repo = socket.read()
if not isinstance(repo, str):
repo = repo.decode();
socket.close()
for entry in repo.split('\n')[:-1]:
value = entry.split('|')
version = tuple([int(n) for n in value[0].strip().split('.')])
version = versions.setdefault(version, {})
arch = value[1].strip()
if arch == 'x32':
arch = 'i686'
elif arch == 'x64':
arch = 'x86_64'
arch = version.setdefault(arch, {})
threading = arch.setdefault(value[2].strip(), {})
exceptions = threading.setdefault(value[3].strip(), {})
revision = exceptions.setdefault(int(value[4].strip()[3:]),
re_sourceforge.sub(re_sub, value[5].strip()))
return versions
|
[
"def",
"repository",
"(",
"urls",
"=",
"urls",
",",
"log",
"=",
"EmptyLogger",
"(",
")",
")",
":",
"log",
".",
"info",
"(",
"'getting mingw-builds repository'",
")",
"versions",
"=",
"{",
"}",
"re_sourceforge",
"=",
"re",
".",
"compile",
"(",
"r'http://sourceforge.net/projects/([^/]+)/files'",
")",
"re_sub",
"=",
"r'http://downloads.sourceforge.net/project/\\1'",
"for",
"url",
"in",
"urls",
":",
"log",
".",
"debug",
"(",
"' - requesting: %s'",
",",
"url",
")",
"socket",
"=",
"request",
".",
"urlopen",
"(",
"url",
")",
"repo",
"=",
"socket",
".",
"read",
"(",
")",
"if",
"not",
"isinstance",
"(",
"repo",
",",
"str",
")",
":",
"repo",
"=",
"repo",
".",
"decode",
"(",
")",
"socket",
".",
"close",
"(",
")",
"for",
"entry",
"in",
"repo",
".",
"split",
"(",
"'\\n'",
")",
"[",
":",
"-",
"1",
"]",
":",
"value",
"=",
"entry",
".",
"split",
"(",
"'|'",
")",
"version",
"=",
"tuple",
"(",
"[",
"int",
"(",
"n",
")",
"for",
"n",
"in",
"value",
"[",
"0",
"]",
".",
"strip",
"(",
")",
".",
"split",
"(",
"'.'",
")",
"]",
")",
"version",
"=",
"versions",
".",
"setdefault",
"(",
"version",
",",
"{",
"}",
")",
"arch",
"=",
"value",
"[",
"1",
"]",
".",
"strip",
"(",
")",
"if",
"arch",
"==",
"'x32'",
":",
"arch",
"=",
"'i686'",
"elif",
"arch",
"==",
"'x64'",
":",
"arch",
"=",
"'x86_64'",
"arch",
"=",
"version",
".",
"setdefault",
"(",
"arch",
",",
"{",
"}",
")",
"threading",
"=",
"arch",
".",
"setdefault",
"(",
"value",
"[",
"2",
"]",
".",
"strip",
"(",
")",
",",
"{",
"}",
")",
"exceptions",
"=",
"threading",
".",
"setdefault",
"(",
"value",
"[",
"3",
"]",
".",
"strip",
"(",
")",
",",
"{",
"}",
")",
"revision",
"=",
"exceptions",
".",
"setdefault",
"(",
"int",
"(",
"value",
"[",
"4",
"]",
".",
"strip",
"(",
")",
"[",
"3",
":",
"]",
")",
",",
"re_sourceforge",
".",
"sub",
"(",
"re_sub",
",",
"value",
"[",
"5",
"]",
".",
"strip",
"(",
")",
")",
")",
"return",
"versions"
] |
https://github.com/BYVoid/OpenCC/blob/f3bafc4d016acfba5d5aef0cf5c98fa3c0090e0c/deps/google-benchmark/mingw.py#L55-L84
|
|
baidu-research/tensorflow-allreduce
|
66d5b855e90b0949e9fa5cca5599fd729a70e874
|
tensorflow/python/ops/metrics_impl.py
|
python
|
_streaming_sparse_false_positive_at_k
|
(labels,
predictions_idx,
k=None,
class_id=None,
weights=None,
name=None)
|
Calculates weighted per step false positives for precision@k.
If `class_id` is specified, calculate binary true positives for `class_id`
only.
If `class_id` is not specified, calculate metrics for `k` predicted vs
`n` label classes, where `n` is the 2nd dimension of `labels`.
If `weights` is `None`, weights default to 1. Use weights of 0 to mask values.
Args:
labels: `int64` `Tensor` or `SparseTensor` with shape
[D1, ... DN, num_labels], where N >= 1 and num_labels is the number of
target classes for the associated prediction. Commonly, N=1 and `labels`
has shape [batch_size, num_labels]. [D1, ... DN] must match
`predictions_idx`.
predictions_idx: 1-D or higher `int64` `Tensor` with last dimension `k`,
top `k` predicted classes. For rank `n`, the first `n-1` dimensions must
match `labels`.
k: Integer, k for @k metric. This is only used for default op name.
class_id: Class for which we want binary metrics.
weights: `Tensor` whose rank is either 0, or n-1, where n is the rank of
`labels`. If the latter, it must be broadcastable to `labels` (i.e., all
dimensions must be either `1`, or the same as the corresponding `labels`
dimension).
name: Name of new variable, and namespace for other dependent ops.
Returns:
A tuple of `Variable` and update `Operation`.
Raises:
ValueError: If `weights` is not `None` and has an incompatible shape.
|
Calculates weighted per step false positives for precision@k.
|
[
"Calculates",
"weighted",
"per",
"step",
"false",
"positives",
"for",
"precision@k",
"."
] |
def _streaming_sparse_false_positive_at_k(labels,
predictions_idx,
k=None,
class_id=None,
weights=None,
name=None):
"""Calculates weighted per step false positives for precision@k.
If `class_id` is specified, calculate binary true positives for `class_id`
only.
If `class_id` is not specified, calculate metrics for `k` predicted vs
`n` label classes, where `n` is the 2nd dimension of `labels`.
If `weights` is `None`, weights default to 1. Use weights of 0 to mask values.
Args:
labels: `int64` `Tensor` or `SparseTensor` with shape
[D1, ... DN, num_labels], where N >= 1 and num_labels is the number of
target classes for the associated prediction. Commonly, N=1 and `labels`
has shape [batch_size, num_labels]. [D1, ... DN] must match
`predictions_idx`.
predictions_idx: 1-D or higher `int64` `Tensor` with last dimension `k`,
top `k` predicted classes. For rank `n`, the first `n-1` dimensions must
match `labels`.
k: Integer, k for @k metric. This is only used for default op name.
class_id: Class for which we want binary metrics.
weights: `Tensor` whose rank is either 0, or n-1, where n is the rank of
`labels`. If the latter, it must be broadcastable to `labels` (i.e., all
dimensions must be either `1`, or the same as the corresponding `labels`
dimension).
name: Name of new variable, and namespace for other dependent ops.
Returns:
A tuple of `Variable` and update `Operation`.
Raises:
ValueError: If `weights` is not `None` and has an incompatible shape.
"""
with ops.name_scope(
name, _at_k_name('false_positive', k, class_id=class_id),
(predictions_idx, labels, weights)) as scope:
fp = _sparse_false_positive_at_k(
predictions_idx=predictions_idx, labels=labels, class_id=class_id,
weights=weights)
batch_total_fp = math_ops.to_double(math_ops.reduce_sum(fp))
var = _local_variable(array_ops.zeros([], dtype=dtypes.float64), name=scope)
return var, state_ops.assign_add(var, batch_total_fp, name='update')
|
[
"def",
"_streaming_sparse_false_positive_at_k",
"(",
"labels",
",",
"predictions_idx",
",",
"k",
"=",
"None",
",",
"class_id",
"=",
"None",
",",
"weights",
"=",
"None",
",",
"name",
"=",
"None",
")",
":",
"with",
"ops",
".",
"name_scope",
"(",
"name",
",",
"_at_k_name",
"(",
"'false_positive'",
",",
"k",
",",
"class_id",
"=",
"class_id",
")",
",",
"(",
"predictions_idx",
",",
"labels",
",",
"weights",
")",
")",
"as",
"scope",
":",
"fp",
"=",
"_sparse_false_positive_at_k",
"(",
"predictions_idx",
"=",
"predictions_idx",
",",
"labels",
"=",
"labels",
",",
"class_id",
"=",
"class_id",
",",
"weights",
"=",
"weights",
")",
"batch_total_fp",
"=",
"math_ops",
".",
"to_double",
"(",
"math_ops",
".",
"reduce_sum",
"(",
"fp",
")",
")",
"var",
"=",
"_local_variable",
"(",
"array_ops",
".",
"zeros",
"(",
"[",
"]",
",",
"dtype",
"=",
"dtypes",
".",
"float64",
")",
",",
"name",
"=",
"scope",
")",
"return",
"var",
",",
"state_ops",
".",
"assign_add",
"(",
"var",
",",
"batch_total_fp",
",",
"name",
"=",
"'update'",
")"
] |
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/ops/metrics_impl.py#L2621-L2668
|
||
rusty1s/pytorch_cluster
|
576e0bbfa13cbe9c2fdc49de2cde5e2bf28f1e01
|
torch_cluster/radius.py
|
python
|
radius_graph
|
(x: torch.Tensor, r: float,
batch: Optional[torch.Tensor] = None, loop: bool = False,
max_num_neighbors: int = 32, flow: str = 'source_to_target',
num_workers: int = 1)
|
return torch.stack([row, col], dim=0)
|
r"""Computes graph edges to all points within a given distance.
Args:
x (Tensor): Node feature matrix
:math:`\mathbf{X} \in \mathbb{R}^{N \times F}`.
r (float): The radius.
batch (LongTensor, optional): Batch vector
:math:`\mathbf{b} \in {\{ 0, \ldots, B-1\}}^N`, which assigns each
node to a specific example. :obj:`batch` needs to be sorted.
(default: :obj:`None`)
loop (bool, optional): If :obj:`True`, the graph will contain
self-loops. (default: :obj:`False`)
max_num_neighbors (int, optional): The maximum number of neighbors to
return for each element.
If the number of actual neighbors is greater than
:obj:`max_num_neighbors`, returned neighbors are picked randomly.
(default: :obj:`32`)
flow (string, optional): The flow direction when used in combination
with message passing (:obj:`"source_to_target"` or
:obj:`"target_to_source"`). (default: :obj:`"source_to_target"`)
num_workers (int): Number of workers to use for computation. Has no
effect in case :obj:`batch` is not :obj:`None`, or the input lies
on the GPU. (default: :obj:`1`)
:rtype: :class:`LongTensor`
.. code-block:: python
import torch
from torch_cluster import radius_graph
x = torch.Tensor([[-1, -1], [-1, 1], [1, -1], [1, 1]])
batch = torch.tensor([0, 0, 0, 0])
edge_index = radius_graph(x, r=1.5, batch=batch, loop=False)
|
r"""Computes graph edges to all points within a given distance.
|
[
"r",
"Computes",
"graph",
"edges",
"to",
"all",
"points",
"within",
"a",
"given",
"distance",
"."
] |
def radius_graph(x: torch.Tensor, r: float,
batch: Optional[torch.Tensor] = None, loop: bool = False,
max_num_neighbors: int = 32, flow: str = 'source_to_target',
num_workers: int = 1) -> torch.Tensor:
r"""Computes graph edges to all points within a given distance.
Args:
x (Tensor): Node feature matrix
:math:`\mathbf{X} \in \mathbb{R}^{N \times F}`.
r (float): The radius.
batch (LongTensor, optional): Batch vector
:math:`\mathbf{b} \in {\{ 0, \ldots, B-1\}}^N`, which assigns each
node to a specific example. :obj:`batch` needs to be sorted.
(default: :obj:`None`)
loop (bool, optional): If :obj:`True`, the graph will contain
self-loops. (default: :obj:`False`)
max_num_neighbors (int, optional): The maximum number of neighbors to
return for each element.
If the number of actual neighbors is greater than
:obj:`max_num_neighbors`, returned neighbors are picked randomly.
(default: :obj:`32`)
flow (string, optional): The flow direction when used in combination
with message passing (:obj:`"source_to_target"` or
:obj:`"target_to_source"`). (default: :obj:`"source_to_target"`)
num_workers (int): Number of workers to use for computation. Has no
effect in case :obj:`batch` is not :obj:`None`, or the input lies
on the GPU. (default: :obj:`1`)
:rtype: :class:`LongTensor`
.. code-block:: python
import torch
from torch_cluster import radius_graph
x = torch.Tensor([[-1, -1], [-1, 1], [1, -1], [1, 1]])
batch = torch.tensor([0, 0, 0, 0])
edge_index = radius_graph(x, r=1.5, batch=batch, loop=False)
"""
assert flow in ['source_to_target', 'target_to_source']
edge_index = radius(x, x, r, batch, batch,
max_num_neighbors if loop else max_num_neighbors + 1,
num_workers)
if flow == 'source_to_target':
row, col = edge_index[1], edge_index[0]
else:
row, col = edge_index[0], edge_index[1]
if not loop:
mask = row != col
row, col = row[mask], col[mask]
return torch.stack([row, col], dim=0)
|
[
"def",
"radius_graph",
"(",
"x",
":",
"torch",
".",
"Tensor",
",",
"r",
":",
"float",
",",
"batch",
":",
"Optional",
"[",
"torch",
".",
"Tensor",
"]",
"=",
"None",
",",
"loop",
":",
"bool",
"=",
"False",
",",
"max_num_neighbors",
":",
"int",
"=",
"32",
",",
"flow",
":",
"str",
"=",
"'source_to_target'",
",",
"num_workers",
":",
"int",
"=",
"1",
")",
"->",
"torch",
".",
"Tensor",
":",
"assert",
"flow",
"in",
"[",
"'source_to_target'",
",",
"'target_to_source'",
"]",
"edge_index",
"=",
"radius",
"(",
"x",
",",
"x",
",",
"r",
",",
"batch",
",",
"batch",
",",
"max_num_neighbors",
"if",
"loop",
"else",
"max_num_neighbors",
"+",
"1",
",",
"num_workers",
")",
"if",
"flow",
"==",
"'source_to_target'",
":",
"row",
",",
"col",
"=",
"edge_index",
"[",
"1",
"]",
",",
"edge_index",
"[",
"0",
"]",
"else",
":",
"row",
",",
"col",
"=",
"edge_index",
"[",
"0",
"]",
",",
"edge_index",
"[",
"1",
"]",
"if",
"not",
"loop",
":",
"mask",
"=",
"row",
"!=",
"col",
"row",
",",
"col",
"=",
"row",
"[",
"mask",
"]",
",",
"col",
"[",
"mask",
"]",
"return",
"torch",
".",
"stack",
"(",
"[",
"row",
",",
"col",
"]",
",",
"dim",
"=",
"0",
")"
] |
https://github.com/rusty1s/pytorch_cluster/blob/576e0bbfa13cbe9c2fdc49de2cde5e2bf28f1e01/torch_cluster/radius.py#L75-L128
|
|
synfig/synfig
|
a5ec91db5b751dc12e4400ccfb5c063fd6d2d928
|
synfig-studio/plugins/lottie-exporter/common/misc.py
|
python
|
get_vector
|
(waypoint)
|
return Vector(x, y)
|
Given a waypoint, it parses the string vector into Vector class defined in
this converter
Args:
waypoint (lxml.etree._Element) : Synfig format waypoint
Returns:
(common.Vector.Vector) : x and y axis values stores in Vector format
|
Given a waypoint, it parses the string vector into Vector class defined in
this converter
|
[
"Given",
"a",
"waypoint",
"it",
"parses",
"the",
"string",
"vector",
"into",
"Vector",
"class",
"defined",
"in",
"this",
"converter"
] |
def get_vector(waypoint):
"""
Given a waypoint, it parses the string vector into Vector class defined in
this converter
Args:
waypoint (lxml.etree._Element) : Synfig format waypoint
Returns:
(common.Vector.Vector) : x and y axis values stores in Vector format
"""
# converting radius and angle to a vector
if waypoint.tag == "radial_composite":
for child in waypoint:
if child.tag == "radius":
radius = float(child[0].attrib["value"])
radius *= settings.PIX_PER_UNIT
elif child.tag == "theta":
angle = float(child[0].attrib["value"])
x, y = radial_to_tangent(radius, angle)
else:
x = float(waypoint[0][0].text)
y = float(waypoint[0][1].text)
return Vector(x, y)
|
[
"def",
"get_vector",
"(",
"waypoint",
")",
":",
"# converting radius and angle to a vector",
"if",
"waypoint",
".",
"tag",
"==",
"\"radial_composite\"",
":",
"for",
"child",
"in",
"waypoint",
":",
"if",
"child",
".",
"tag",
"==",
"\"radius\"",
":",
"radius",
"=",
"float",
"(",
"child",
"[",
"0",
"]",
".",
"attrib",
"[",
"\"value\"",
"]",
")",
"radius",
"*=",
"settings",
".",
"PIX_PER_UNIT",
"elif",
"child",
".",
"tag",
"==",
"\"theta\"",
":",
"angle",
"=",
"float",
"(",
"child",
"[",
"0",
"]",
".",
"attrib",
"[",
"\"value\"",
"]",
")",
"x",
",",
"y",
"=",
"radial_to_tangent",
"(",
"radius",
",",
"angle",
")",
"else",
":",
"x",
"=",
"float",
"(",
"waypoint",
"[",
"0",
"]",
"[",
"0",
"]",
".",
"text",
")",
"y",
"=",
"float",
"(",
"waypoint",
"[",
"0",
"]",
"[",
"1",
"]",
".",
"text",
")",
"return",
"Vector",
"(",
"x",
",",
"y",
")"
] |
https://github.com/synfig/synfig/blob/a5ec91db5b751dc12e4400ccfb5c063fd6d2d928/synfig-studio/plugins/lottie-exporter/common/misc.py#L363-L386
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_cocoa/stc.py
|
python
|
StyledTextCtrl.WordLeftExtend
|
(*args, **kwargs)
|
return _stc.StyledTextCtrl_WordLeftExtend(*args, **kwargs)
|
WordLeftExtend(self)
Move caret left one word extending selection to new caret position.
|
WordLeftExtend(self)
|
[
"WordLeftExtend",
"(",
"self",
")"
] |
def WordLeftExtend(*args, **kwargs):
"""
WordLeftExtend(self)
Move caret left one word extending selection to new caret position.
"""
return _stc.StyledTextCtrl_WordLeftExtend(*args, **kwargs)
|
[
"def",
"WordLeftExtend",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_stc",
".",
"StyledTextCtrl_WordLeftExtend",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/stc.py#L4400-L4406
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/gtk/_core.py
|
python
|
DropFilesEvent.GetPosition
|
(*args, **kwargs)
|
return _core_.DropFilesEvent_GetPosition(*args, **kwargs)
|
GetPosition(self) -> Point
Returns the position at which the files were dropped.
|
GetPosition(self) -> Point
|
[
"GetPosition",
"(",
"self",
")",
"-",
">",
"Point"
] |
def GetPosition(*args, **kwargs):
"""
GetPosition(self) -> Point
Returns the position at which the files were dropped.
"""
return _core_.DropFilesEvent_GetPosition(*args, **kwargs)
|
[
"def",
"GetPosition",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"DropFilesEvent_GetPosition",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_core.py#L6654-L6660
|
|
gnina/gnina
|
b9ae032f52fc7a8153987bde09c0efa3620d8bb6
|
caffe/examples/pycaffe/layers/pascal_multilabel_datalayers.py
|
python
|
PascalMultilabelDataLayerSync.reshape
|
(self, bottom, top)
|
There is no need to reshape the data, since the input is of fixed size
(rows and columns)
|
There is no need to reshape the data, since the input is of fixed size
(rows and columns)
|
[
"There",
"is",
"no",
"need",
"to",
"reshape",
"the",
"data",
"since",
"the",
"input",
"is",
"of",
"fixed",
"size",
"(",
"rows",
"and",
"columns",
")"
] |
def reshape(self, bottom, top):
"""
There is no need to reshape the data, since the input is of fixed size
(rows and columns)
"""
pass
|
[
"def",
"reshape",
"(",
"self",
",",
"bottom",
",",
"top",
")",
":",
"pass"
] |
https://github.com/gnina/gnina/blob/b9ae032f52fc7a8153987bde09c0efa3620d8bb6/caffe/examples/pycaffe/layers/pascal_multilabel_datalayers.py#L67-L72
|
||
idaholab/moose
|
9eeebc65e098b4c30f8205fb41591fd5b61eb6ff
|
python/MooseDocs/common/log.py
|
python
|
MultiprocessingHandler.aquire
|
(self)
|
Disable.
|
Disable.
|
[
"Disable",
"."
] |
def aquire(self):
"""Disable."""
pass
|
[
"def",
"aquire",
"(",
"self",
")",
":",
"pass"
] |
https://github.com/idaholab/moose/blob/9eeebc65e098b4c30f8205fb41591fd5b61eb6ff/python/MooseDocs/common/log.py#L66-L68
|
||
Polidea/SiriusObfuscator
|
b0e590d8130e97856afe578869b83a209e2b19be
|
SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py
|
python
|
SBTypeCategory.AddLanguage
|
(self, *args)
|
return _lldb.SBTypeCategory_AddLanguage(self, *args)
|
AddLanguage(self, LanguageType language)
|
AddLanguage(self, LanguageType language)
|
[
"AddLanguage",
"(",
"self",
"LanguageType",
"language",
")"
] |
def AddLanguage(self, *args):
"""AddLanguage(self, LanguageType language)"""
return _lldb.SBTypeCategory_AddLanguage(self, *args)
|
[
"def",
"AddLanguage",
"(",
"self",
",",
"*",
"args",
")",
":",
"return",
"_lldb",
".",
"SBTypeCategory_AddLanguage",
"(",
"self",
",",
"*",
"args",
")"
] |
https://github.com/Polidea/SiriusObfuscator/blob/b0e590d8130e97856afe578869b83a209e2b19be/SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py#L10760-L10762
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/windows/Lib/site-packages/pkg_resources/_vendor/packaging/version.py
|
python
|
_parse_local_version
|
(local)
|
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
[
"Takes",
"a",
"string",
"like",
"abc",
".",
"1",
".",
"twelve",
"and",
"turns",
"it",
"into",
"(",
"abc",
"1",
"twelve",
")",
"."
] |
def _parse_local_version(local):
"""
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
"""
if local is not None:
return tuple(
part.lower() if not part.isdigit() else int(part)
for part in _local_version_seperators.split(local)
)
|
[
"def",
"_parse_local_version",
"(",
"local",
")",
":",
"if",
"local",
"is",
"not",
"None",
":",
"return",
"tuple",
"(",
"part",
".",
"lower",
"(",
")",
"if",
"not",
"part",
".",
"isdigit",
"(",
")",
"else",
"int",
"(",
"part",
")",
"for",
"part",
"in",
"_local_version_seperators",
".",
"split",
"(",
"local",
")",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/pkg_resources/_vendor/packaging/version.py#L332-L340
|
||
PX4/PX4-Autopilot
|
0b9f60a0370be53d683352c63fd92db3d6586e18
|
Tools/mavlink_px4.py
|
python
|
MAVLink.scaled_pressure_send
|
(self, time_boot_ms, press_abs, press_diff, temperature)
|
return self.send(self.scaled_pressure_encode(time_boot_ms, press_abs, press_diff, temperature))
|
The pressure readings for the typical setup of one absolute and
differential pressure sensor. The units are as
specified in each field.
time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t)
press_abs : Absolute pressure (hectopascal) (float)
press_diff : Differential pressure 1 (hectopascal) (float)
temperature : Temperature measurement (0.01 degrees celsius) (int16_t)
|
The pressure readings for the typical setup of one absolute and
differential pressure sensor. The units are as
specified in each field.
|
[
"The",
"pressure",
"readings",
"for",
"the",
"typical",
"setup",
"of",
"one",
"absolute",
"and",
"differential",
"pressure",
"sensor",
".",
"The",
"units",
"are",
"as",
"specified",
"in",
"each",
"field",
"."
] |
def scaled_pressure_send(self, time_boot_ms, press_abs, press_diff, temperature):
'''
The pressure readings for the typical setup of one absolute and
differential pressure sensor. The units are as
specified in each field.
time_boot_ms : Timestamp (milliseconds since system boot) (uint32_t)
press_abs : Absolute pressure (hectopascal) (float)
press_diff : Differential pressure 1 (hectopascal) (float)
temperature : Temperature measurement (0.01 degrees celsius) (int16_t)
'''
return self.send(self.scaled_pressure_encode(time_boot_ms, press_abs, press_diff, temperature))
|
[
"def",
"scaled_pressure_send",
"(",
"self",
",",
"time_boot_ms",
",",
"press_abs",
",",
"press_diff",
",",
"temperature",
")",
":",
"return",
"self",
".",
"send",
"(",
"self",
".",
"scaled_pressure_encode",
"(",
"time_boot_ms",
",",
"press_abs",
",",
"press_diff",
",",
"temperature",
")",
")"
] |
https://github.com/PX4/PX4-Autopilot/blob/0b9f60a0370be53d683352c63fd92db3d6586e18/Tools/mavlink_px4.py#L3029-L3041
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/pandas/py3/pandas/io/formats/printing.py
|
python
|
format_object_attrs
|
(
obj: Sized, include_dtype: bool = True
)
|
return attrs
|
Return a list of tuples of the (attr, formatted_value)
for common attrs, including dtype, name, length
Parameters
----------
obj : object
Must be sized.
include_dtype : bool
If False, dtype won't be in the returned list
Returns
-------
list of 2-tuple
|
Return a list of tuples of the (attr, formatted_value)
for common attrs, including dtype, name, length
|
[
"Return",
"a",
"list",
"of",
"tuples",
"of",
"the",
"(",
"attr",
"formatted_value",
")",
"for",
"common",
"attrs",
"including",
"dtype",
"name",
"length"
] |
def format_object_attrs(
obj: Sized, include_dtype: bool = True
) -> list[tuple[str, str | int]]:
"""
Return a list of tuples of the (attr, formatted_value)
for common attrs, including dtype, name, length
Parameters
----------
obj : object
Must be sized.
include_dtype : bool
If False, dtype won't be in the returned list
Returns
-------
list of 2-tuple
"""
attrs: list[tuple[str, str | int]] = []
if hasattr(obj, "dtype") and include_dtype:
# error: "Sized" has no attribute "dtype"
attrs.append(("dtype", f"'{obj.dtype}'")) # type: ignore[attr-defined]
if getattr(obj, "name", None) is not None:
# error: "Sized" has no attribute "name"
attrs.append(("name", default_pprint(obj.name))) # type: ignore[attr-defined]
# error: "Sized" has no attribute "names"
elif getattr(obj, "names", None) is not None and any(
obj.names # type: ignore[attr-defined]
):
# error: "Sized" has no attribute "names"
attrs.append(("names", default_pprint(obj.names))) # type: ignore[attr-defined]
max_seq_items = get_option("display.max_seq_items") or len(obj)
if len(obj) > max_seq_items:
attrs.append(("length", len(obj)))
return attrs
|
[
"def",
"format_object_attrs",
"(",
"obj",
":",
"Sized",
",",
"include_dtype",
":",
"bool",
"=",
"True",
")",
"->",
"list",
"[",
"tuple",
"[",
"str",
",",
"str",
"|",
"int",
"]",
"]",
":",
"attrs",
":",
"list",
"[",
"tuple",
"[",
"str",
",",
"str",
"|",
"int",
"]",
"]",
"=",
"[",
"]",
"if",
"hasattr",
"(",
"obj",
",",
"\"dtype\"",
")",
"and",
"include_dtype",
":",
"# error: \"Sized\" has no attribute \"dtype\"",
"attrs",
".",
"append",
"(",
"(",
"\"dtype\"",
",",
"f\"'{obj.dtype}'\"",
")",
")",
"# type: ignore[attr-defined]",
"if",
"getattr",
"(",
"obj",
",",
"\"name\"",
",",
"None",
")",
"is",
"not",
"None",
":",
"# error: \"Sized\" has no attribute \"name\"",
"attrs",
".",
"append",
"(",
"(",
"\"name\"",
",",
"default_pprint",
"(",
"obj",
".",
"name",
")",
")",
")",
"# type: ignore[attr-defined]",
"# error: \"Sized\" has no attribute \"names\"",
"elif",
"getattr",
"(",
"obj",
",",
"\"names\"",
",",
"None",
")",
"is",
"not",
"None",
"and",
"any",
"(",
"obj",
".",
"names",
"# type: ignore[attr-defined]",
")",
":",
"# error: \"Sized\" has no attribute \"names\"",
"attrs",
".",
"append",
"(",
"(",
"\"names\"",
",",
"default_pprint",
"(",
"obj",
".",
"names",
")",
")",
")",
"# type: ignore[attr-defined]",
"max_seq_items",
"=",
"get_option",
"(",
"\"display.max_seq_items\"",
")",
"or",
"len",
"(",
"obj",
")",
"if",
"len",
"(",
"obj",
")",
">",
"max_seq_items",
":",
"attrs",
".",
"append",
"(",
"(",
"\"length\"",
",",
"len",
"(",
"obj",
")",
")",
")",
"return",
"attrs"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py3/pandas/io/formats/printing.py#L508-L543
|
|
eldar/deepcut-cnn
|
928bf2f224fce132f6e4404b4c95fb017297a5e0
|
scripts/cpp_lint.py
|
python
|
GetPreviousNonBlankLine
|
(clean_lines, linenum)
|
return ('', -1)
|
Return the most recent non-blank line and its line number.
Args:
clean_lines: A CleansedLines instance containing the file contents.
linenum: The number of the line to check.
Returns:
A tuple with two elements. The first element is the contents of the last
non-blank line before the current line, or the empty string if this is the
first non-blank line. The second is the line number of that line, or -1
if this is the first non-blank line.
|
Return the most recent non-blank line and its line number.
|
[
"Return",
"the",
"most",
"recent",
"non",
"-",
"blank",
"line",
"and",
"its",
"line",
"number",
"."
] |
def GetPreviousNonBlankLine(clean_lines, linenum):
"""Return the most recent non-blank line and its line number.
Args:
clean_lines: A CleansedLines instance containing the file contents.
linenum: The number of the line to check.
Returns:
A tuple with two elements. The first element is the contents of the last
non-blank line before the current line, or the empty string if this is the
first non-blank line. The second is the line number of that line, or -1
if this is the first non-blank line.
"""
prevlinenum = linenum - 1
while prevlinenum >= 0:
prevline = clean_lines.elided[prevlinenum]
if not IsBlankLine(prevline): # if not a blank line...
return (prevline, prevlinenum)
prevlinenum -= 1
return ('', -1)
|
[
"def",
"GetPreviousNonBlankLine",
"(",
"clean_lines",
",",
"linenum",
")",
":",
"prevlinenum",
"=",
"linenum",
"-",
"1",
"while",
"prevlinenum",
">=",
"0",
":",
"prevline",
"=",
"clean_lines",
".",
"elided",
"[",
"prevlinenum",
"]",
"if",
"not",
"IsBlankLine",
"(",
"prevline",
")",
":",
"# if not a blank line...",
"return",
"(",
"prevline",
",",
"prevlinenum",
")",
"prevlinenum",
"-=",
"1",
"return",
"(",
"''",
",",
"-",
"1",
")"
] |
https://github.com/eldar/deepcut-cnn/blob/928bf2f224fce132f6e4404b4c95fb017297a5e0/scripts/cpp_lint.py#L3046-L3066
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/pandas/py3/pandas/core/computation/expressions.py
|
python
|
_can_use_numexpr
|
(op, op_str, a, b, dtype_check)
|
return False
|
return a boolean if we WILL be using numexpr
|
return a boolean if we WILL be using numexpr
|
[
"return",
"a",
"boolean",
"if",
"we",
"WILL",
"be",
"using",
"numexpr"
] |
def _can_use_numexpr(op, op_str, a, b, dtype_check):
"""return a boolean if we WILL be using numexpr"""
if op_str is not None:
# required min elements (otherwise we are adding overhead)
if a.size > _MIN_ELEMENTS:
# check for dtype compatibility
dtypes: set[str] = set()
for o in [a, b]:
# ndarray and Series Case
if hasattr(o, "dtype"):
dtypes |= {o.dtype.name}
# allowed are a superset
if not len(dtypes) or _ALLOWED_DTYPES[dtype_check] >= dtypes:
return True
return False
|
[
"def",
"_can_use_numexpr",
"(",
"op",
",",
"op_str",
",",
"a",
",",
"b",
",",
"dtype_check",
")",
":",
"if",
"op_str",
"is",
"not",
"None",
":",
"# required min elements (otherwise we are adding overhead)",
"if",
"a",
".",
"size",
">",
"_MIN_ELEMENTS",
":",
"# check for dtype compatibility",
"dtypes",
":",
"set",
"[",
"str",
"]",
"=",
"set",
"(",
")",
"for",
"o",
"in",
"[",
"a",
",",
"b",
"]",
":",
"# ndarray and Series Case",
"if",
"hasattr",
"(",
"o",
",",
"\"dtype\"",
")",
":",
"dtypes",
"|=",
"{",
"o",
".",
"dtype",
".",
"name",
"}",
"# allowed are a superset",
"if",
"not",
"len",
"(",
"dtypes",
")",
"or",
"_ALLOWED_DTYPES",
"[",
"dtype_check",
"]",
">=",
"dtypes",
":",
"return",
"True",
"return",
"False"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py3/pandas/core/computation/expressions.py#L72-L89
|
|
deepmodeling/deepmd-kit
|
159e45d248b0429844fb6a8cb3b3a201987c8d79
|
deepmd/descriptor/descriptor.py
|
python
|
Descriptor.get_dim_rot_mat_1
|
(self)
|
Returns the first dimension of the rotation matrix. The rotation is of shape
dim_1 x 3
Returns
-------
int
the first dimension of the rotation matrix
|
Returns the first dimension of the rotation matrix. The rotation is of shape
dim_1 x 3
|
[
"Returns",
"the",
"first",
"dimension",
"of",
"the",
"rotation",
"matrix",
".",
"The",
"rotation",
"is",
"of",
"shape",
"dim_1",
"x",
"3"
] |
def get_dim_rot_mat_1(self) -> int:
"""
Returns the first dimension of the rotation matrix. The rotation is of shape
dim_1 x 3
Returns
-------
int
the first dimension of the rotation matrix
"""
# TODO: I think this method should be implemented as it's called by dipole and
# polar fitting network. However, currently not all descriptors have this
# method.
raise NotImplementedError
|
[
"def",
"get_dim_rot_mat_1",
"(",
"self",
")",
"->",
"int",
":",
"# TODO: I think this method should be implemented as it's called by dipole and",
"# polar fitting network. However, currently not all descriptors have this",
"# method.",
"raise",
"NotImplementedError"
] |
https://github.com/deepmodeling/deepmd-kit/blob/159e45d248b0429844fb6a8cb3b3a201987c8d79/deepmd/descriptor/descriptor.py#L109-L122
|
||
weolar/miniblink49
|
1c4678db0594a4abde23d3ebbcc7cd13c3170777
|
third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/mod_pywebsocket/dispatch.py
|
python
|
Dispatcher.transfer_data
|
(self, request)
|
Let a handler transfer_data with a WebSocket client.
Select a handler based on request.ws_resource and call its
web_socket_transfer_data function.
Args:
request: mod_python request.
Raises:
DispatchException: when handler was not found
AbortedByUserException: when user handler abort connection
|
Let a handler transfer_data with a WebSocket client.
|
[
"Let",
"a",
"handler",
"transfer_data",
"with",
"a",
"WebSocket",
"client",
"."
] |
def transfer_data(self, request):
"""Let a handler transfer_data with a WebSocket client.
Select a handler based on request.ws_resource and call its
web_socket_transfer_data function.
Args:
request: mod_python request.
Raises:
DispatchException: when handler was not found
AbortedByUserException: when user handler abort connection
"""
# TODO(tyoshino): Terminate underlying TCP connection if possible.
try:
if mux.use_mux(request):
mux.start(request, self)
else:
handler_suite = self.get_handler_suite(request.ws_resource)
if handler_suite is None:
raise DispatchException('No handler for: %r' %
request.ws_resource)
transfer_data_ = handler_suite.transfer_data
transfer_data_(request)
if not request.server_terminated:
request.ws_stream.close_connection()
# Catch non-critical exceptions the handler didn't handle.
except handshake.AbortedByUserException, e:
self._logger.debug('%s', util.get_stack_trace())
raise
except msgutil.BadOperationException, e:
self._logger.debug('%s', e)
request.ws_stream.close_connection(
common.STATUS_INTERNAL_ENDPOINT_ERROR)
except msgutil.InvalidFrameException, e:
# InvalidFrameException must be caught before
# ConnectionTerminatedException that catches InvalidFrameException.
self._logger.debug('%s', e)
request.ws_stream.close_connection(common.STATUS_PROTOCOL_ERROR)
except msgutil.UnsupportedFrameException, e:
self._logger.debug('%s', e)
request.ws_stream.close_connection(common.STATUS_UNSUPPORTED_DATA)
except stream.InvalidUTF8Exception, e:
self._logger.debug('%s', e)
request.ws_stream.close_connection(
common.STATUS_INVALID_FRAME_PAYLOAD_DATA)
except msgutil.ConnectionTerminatedException, e:
self._logger.debug('%s', e)
except Exception, e:
# Any other exceptions are forwarded to the caller of this
# function.
util.prepend_message_to_exception(
'%s raised exception for %s: ' % (
_TRANSFER_DATA_HANDLER_NAME, request.ws_resource),
e)
raise
|
[
"def",
"transfer_data",
"(",
"self",
",",
"request",
")",
":",
"# TODO(tyoshino): Terminate underlying TCP connection if possible.",
"try",
":",
"if",
"mux",
".",
"use_mux",
"(",
"request",
")",
":",
"mux",
".",
"start",
"(",
"request",
",",
"self",
")",
"else",
":",
"handler_suite",
"=",
"self",
".",
"get_handler_suite",
"(",
"request",
".",
"ws_resource",
")",
"if",
"handler_suite",
"is",
"None",
":",
"raise",
"DispatchException",
"(",
"'No handler for: %r'",
"%",
"request",
".",
"ws_resource",
")",
"transfer_data_",
"=",
"handler_suite",
".",
"transfer_data",
"transfer_data_",
"(",
"request",
")",
"if",
"not",
"request",
".",
"server_terminated",
":",
"request",
".",
"ws_stream",
".",
"close_connection",
"(",
")",
"# Catch non-critical exceptions the handler didn't handle.",
"except",
"handshake",
".",
"AbortedByUserException",
",",
"e",
":",
"self",
".",
"_logger",
".",
"debug",
"(",
"'%s'",
",",
"util",
".",
"get_stack_trace",
"(",
")",
")",
"raise",
"except",
"msgutil",
".",
"BadOperationException",
",",
"e",
":",
"self",
".",
"_logger",
".",
"debug",
"(",
"'%s'",
",",
"e",
")",
"request",
".",
"ws_stream",
".",
"close_connection",
"(",
"common",
".",
"STATUS_INTERNAL_ENDPOINT_ERROR",
")",
"except",
"msgutil",
".",
"InvalidFrameException",
",",
"e",
":",
"# InvalidFrameException must be caught before",
"# ConnectionTerminatedException that catches InvalidFrameException.",
"self",
".",
"_logger",
".",
"debug",
"(",
"'%s'",
",",
"e",
")",
"request",
".",
"ws_stream",
".",
"close_connection",
"(",
"common",
".",
"STATUS_PROTOCOL_ERROR",
")",
"except",
"msgutil",
".",
"UnsupportedFrameException",
",",
"e",
":",
"self",
".",
"_logger",
".",
"debug",
"(",
"'%s'",
",",
"e",
")",
"request",
".",
"ws_stream",
".",
"close_connection",
"(",
"common",
".",
"STATUS_UNSUPPORTED_DATA",
")",
"except",
"stream",
".",
"InvalidUTF8Exception",
",",
"e",
":",
"self",
".",
"_logger",
".",
"debug",
"(",
"'%s'",
",",
"e",
")",
"request",
".",
"ws_stream",
".",
"close_connection",
"(",
"common",
".",
"STATUS_INVALID_FRAME_PAYLOAD_DATA",
")",
"except",
"msgutil",
".",
"ConnectionTerminatedException",
",",
"e",
":",
"self",
".",
"_logger",
".",
"debug",
"(",
"'%s'",
",",
"e",
")",
"except",
"Exception",
",",
"e",
":",
"# Any other exceptions are forwarded to the caller of this",
"# function.",
"util",
".",
"prepend_message_to_exception",
"(",
"'%s raised exception for %s: '",
"%",
"(",
"_TRANSFER_DATA_HANDLER_NAME",
",",
"request",
".",
"ws_resource",
")",
",",
"e",
")",
"raise"
] |
https://github.com/weolar/miniblink49/blob/1c4678db0594a4abde23d3ebbcc7cd13c3170777/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/mod_pywebsocket/dispatch.py#L270-L327
|
||
rrwick/Porechop
|
109e437280436d1ec27e5a5b7a34ffb752176390
|
ez_setup.py
|
python
|
has_powershell
|
()
|
return True
|
Determine if Powershell is available.
|
Determine if Powershell is available.
|
[
"Determine",
"if",
"Powershell",
"is",
"available",
"."
] |
def has_powershell():
"""Determine if Powershell is available."""
if platform.system() != 'Windows':
return False
cmd = ['powershell', '-Command', 'echo test']
with open(os.path.devnull, 'wb') as devnull:
try:
subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
except Exception:
return False
return True
|
[
"def",
"has_powershell",
"(",
")",
":",
"if",
"platform",
".",
"system",
"(",
")",
"!=",
"'Windows'",
":",
"return",
"False",
"cmd",
"=",
"[",
"'powershell'",
",",
"'-Command'",
",",
"'echo test'",
"]",
"with",
"open",
"(",
"os",
".",
"path",
".",
"devnull",
",",
"'wb'",
")",
"as",
"devnull",
":",
"try",
":",
"subprocess",
".",
"check_call",
"(",
"cmd",
",",
"stdout",
"=",
"devnull",
",",
"stderr",
"=",
"devnull",
")",
"except",
"Exception",
":",
"return",
"False",
"return",
"True"
] |
https://github.com/rrwick/Porechop/blob/109e437280436d1ec27e5a5b7a34ffb752176390/ez_setup.py#L259-L269
|
|
baidu/bigflow
|
449245016c0df7d1252e85581e588bfc60cefad3
|
bigflow_python/python/bigflow/transforms.py
|
python
|
transform
|
(pcollection, first_arg, *other_args, **options)
|
return bigflow.transform_impls.transform.transform(pcollection, first_arg, *other_args,
**options)
|
对给定PCollection进行任意的变换,结果为另一个PCollection
transform有两种形式,形式一:
基本原型为`transform(pcollection, initializer, transformer, finalizer, *side_inputs, **options)`
transform将PCollection的处理分为3个阶段: 初始化,遍历及结束,分别对应于
initializer, transformer和finalizer三个处理函数。三个函数之间有一个状态
status(也可以理解为上下文context),同时有一个emitter参数可以向输出PCollection发送数据
假定输入数据类型为I,输出数据类型为O,initializer, transformer, finalizer各自的期望签名为:
initializer(emitter, *side_inputs) => status(object)
transformer(status, emitter, I, *side_inputs) => status(object)
(transformer的第一个参数允许被修改)
finalizer(status, emitter, *side_inputs) => None (finalizer的第一个参数允许被修改)
emitter.emit(O)
Args:
pcollection (PCollection): 输入PCollection
initializer (callable): 初始化函数
transformer (callable): 变换函数
finalizer (callable): 结束函数
*side_inputs: 参与计算的SideInputs
**options: 可配置选项
Returns:
PCollection: 表示返回结果的PCollection
>>> from bigflow import transforms
>>> import copy
>>> def initializer(emitter):
>>> return []
>>>
>>> def transformer(status, emitter, inp):
>>> status.append(copy.deepcopy(inp)) #如果要缓存一个数据,最好复制一份。
>>> return status
>>>
>>> def finalizer(status, emitter):
>>> emitter.emit(status)
>>>
>>> _p = _pipeline.parallelize([1, 2, 3])
>>> _plist = transforms.transform(_p, initializer, transformer, finalizer)
>>> print _plist.count().get() # 只有一个元素,元素的内容是[1, 2, 3]这样一个列表。
1
>>> print _plist.get()
[[1, 2, 3]]
形式二:
基本原型为`transform(pcollection, transformer, *side_inputs, **options)`
其中transformer应为
:class:`bigflow.base.Transformer <bigflow.base.Transformer>`
类的子类,
Transformer.begin_process在数据开始处理前会被调用。
Transformer.process在数据开始处理时,每条数据调用一次,传入需要的数据。
Transformer.end_process在数据处理完成后被调用。
用户需要输出的数据以列表或其它可迭代对象的形式返回,其中所有元素都会被作为输出PCollection中的一个元素。
(注意,如果不需要输出请返回一个空的[],而不要返回None)
>>> class SumTransformer(base.Transformer):
...
... def begin_process(self):
... self._sum = 0
... return []
...
... def process(self, record):
... self._sum += record
... return []
...
... def end_process(self):
... yield self._sum
...
>>> p1 = _pipeline.parallelize([1, 2, 3])
>>> transforms.transform(p1, SumTransformer).get()
6
>>> class PartialSumTransformer(base.Transformer):
...
... def begin_process(self):
... self._sum = 0
... return []
...
... def process(self, record):
... self._sum += record
... yield self._sum
...
>>> transforms.transform(p1, PartialSumTransformer()),get()
[1, 3, 6]
>>> class ZipTransformer(base.Transformer):
...
... def begin_process(self, *si):
... self.index = 0
... lens = map(len, si)
... self.min_len = min(lens)
... return []
...
... def process(self, inp, *si):
... if self.index < self.min_len:
... yield (inp, ) + tuple(map(lambda x: x[self.index], si))
... self.index += 1
...
>>> p2 = _pipeline.parallelize([4, 5]).sort()
>>> transforms.transform(p1, ZipTransformer(), p2).get()
[(1, 4), (2, 5)]
本方法为Bigflow所提供的最底层和最复杂的变换方法,它可以表达对PCollection
的任意变换。
在有其它函数(如aggregate)能完成同样功能时,尽量不要使用该函数,框架无法了解该函数内部实现,
无法进行许多深层次的优化工作。
|
[] |
def transform(pcollection, first_arg, *other_args, **options):
"""
对给定PCollection进行任意的变换,结果为另一个PCollection
transform有两种形式,形式一:
基本原型为`transform(pcollection, initializer, transformer, finalizer, *side_inputs, **options)`
transform将PCollection的处理分为3个阶段: 初始化,遍历及结束,分别对应于
initializer, transformer和finalizer三个处理函数。三个函数之间有一个状态
status(也可以理解为上下文context),同时有一个emitter参数可以向输出PCollection发送数据
假定输入数据类型为I,输出数据类型为O,initializer, transformer, finalizer各自的期望签名为:
initializer(emitter, *side_inputs) => status(object)
transformer(status, emitter, I, *side_inputs) => status(object)
(transformer的第一个参数允许被修改)
finalizer(status, emitter, *side_inputs) => None (finalizer的第一个参数允许被修改)
emitter.emit(O)
Args:
pcollection (PCollection): 输入PCollection
initializer (callable): 初始化函数
transformer (callable): 变换函数
finalizer (callable): 结束函数
*side_inputs: 参与计算的SideInputs
**options: 可配置选项
Returns:
PCollection: 表示返回结果的PCollection
>>> from bigflow import transforms
>>> import copy
>>> def initializer(emitter):
>>> return []
>>>
>>> def transformer(status, emitter, inp):
>>> status.append(copy.deepcopy(inp)) #如果要缓存一个数据,最好复制一份。
>>> return status
>>>
>>> def finalizer(status, emitter):
>>> emitter.emit(status)
>>>
>>> _p = _pipeline.parallelize([1, 2, 3])
>>> _plist = transforms.transform(_p, initializer, transformer, finalizer)
>>> print _plist.count().get() # 只有一个元素,元素的内容是[1, 2, 3]这样一个列表。
1
>>> print _plist.get()
[[1, 2, 3]]
形式二:
基本原型为`transform(pcollection, transformer, *side_inputs, **options)`
其中transformer应为
:class:`bigflow.base.Transformer <bigflow.base.Transformer>`
类的子类,
Transformer.begin_process在数据开始处理前会被调用。
Transformer.process在数据开始处理时,每条数据调用一次,传入需要的数据。
Transformer.end_process在数据处理完成后被调用。
用户需要输出的数据以列表或其它可迭代对象的形式返回,其中所有元素都会被作为输出PCollection中的一个元素。
(注意,如果不需要输出请返回一个空的[],而不要返回None)
>>> class SumTransformer(base.Transformer):
...
... def begin_process(self):
... self._sum = 0
... return []
...
... def process(self, record):
... self._sum += record
... return []
...
... def end_process(self):
... yield self._sum
...
>>> p1 = _pipeline.parallelize([1, 2, 3])
>>> transforms.transform(p1, SumTransformer).get()
6
>>> class PartialSumTransformer(base.Transformer):
...
... def begin_process(self):
... self._sum = 0
... return []
...
... def process(self, record):
... self._sum += record
... yield self._sum
...
>>> transforms.transform(p1, PartialSumTransformer()),get()
[1, 3, 6]
>>> class ZipTransformer(base.Transformer):
...
... def begin_process(self, *si):
... self.index = 0
... lens = map(len, si)
... self.min_len = min(lens)
... return []
...
... def process(self, inp, *si):
... if self.index < self.min_len:
... yield (inp, ) + tuple(map(lambda x: x[self.index], si))
... self.index += 1
...
>>> p2 = _pipeline.parallelize([4, 5]).sort()
>>> transforms.transform(p1, ZipTransformer(), p2).get()
[(1, 4), (2, 5)]
本方法为Bigflow所提供的最底层和最复杂的变换方法,它可以表达对PCollection
的任意变换。
在有其它函数(如aggregate)能完成同样功能时,尽量不要使用该函数,框架无法了解该函数内部实现,
无法进行许多深层次的优化工作。
"""
import bigflow.transform_impls.transform
return bigflow.transform_impls.transform.transform(pcollection, first_arg, *other_args,
**options)
|
[
"def",
"transform",
"(",
"pcollection",
",",
"first_arg",
",",
"*",
"other_args",
",",
"*",
"*",
"options",
")",
":",
"import",
"bigflow",
".",
"transform_impls",
".",
"transform",
"return",
"bigflow",
".",
"transform_impls",
".",
"transform",
".",
"transform",
"(",
"pcollection",
",",
"first_arg",
",",
"*",
"other_args",
",",
"*",
"*",
"options",
")"
] |
https://github.com/baidu/bigflow/blob/449245016c0df7d1252e85581e588bfc60cefad3/bigflow_python/python/bigflow/transforms.py#L986-L1109
|
||
apache/arrow
|
af33dd1157eb8d7d9bfac25ebf61445b793b7943
|
python/benchmarks/common.py
|
python
|
BuiltinsGenerator._generate_varying_sequences
|
(self, random_factory, n, min_size,
max_size, none_prob)
|
return data
|
Generate a list of *n* sequences of varying size between *min_size*
and *max_size*, with *none_prob* probability of an entry being None.
The base material for each sequence is obtained by calling
`random_factory(<some size>)`
|
Generate a list of *n* sequences of varying size between *min_size*
and *max_size*, with *none_prob* probability of an entry being None.
The base material for each sequence is obtained by calling
`random_factory(<some size>)`
|
[
"Generate",
"a",
"list",
"of",
"*",
"n",
"*",
"sequences",
"of",
"varying",
"size",
"between",
"*",
"min_size",
"*",
"and",
"*",
"max_size",
"*",
"with",
"*",
"none_prob",
"*",
"probability",
"of",
"an",
"entry",
"being",
"None",
".",
"The",
"base",
"material",
"for",
"each",
"sequence",
"is",
"obtained",
"by",
"calling",
"random_factory",
"(",
"<some",
"size",
">",
")"
] |
def _generate_varying_sequences(self, random_factory, n, min_size,
max_size, none_prob):
"""
Generate a list of *n* sequences of varying size between *min_size*
and *max_size*, with *none_prob* probability of an entry being None.
The base material for each sequence is obtained by calling
`random_factory(<some size>)`
"""
base_size = 10000
base = random_factory(base_size + max_size)
data = []
for i in range(n):
off = self.rnd.randint(base_size)
if min_size == max_size:
size = min_size
else:
size = self.rnd.randint(min_size, max_size + 1)
data.append(base[off:off + size])
self.sprinkle_nones(data, none_prob)
assert len(data) == n
return data
|
[
"def",
"_generate_varying_sequences",
"(",
"self",
",",
"random_factory",
",",
"n",
",",
"min_size",
",",
"max_size",
",",
"none_prob",
")",
":",
"base_size",
"=",
"10000",
"base",
"=",
"random_factory",
"(",
"base_size",
"+",
"max_size",
")",
"data",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"n",
")",
":",
"off",
"=",
"self",
".",
"rnd",
".",
"randint",
"(",
"base_size",
")",
"if",
"min_size",
"==",
"max_size",
":",
"size",
"=",
"min_size",
"else",
":",
"size",
"=",
"self",
".",
"rnd",
".",
"randint",
"(",
"min_size",
",",
"max_size",
"+",
"1",
")",
"data",
".",
"append",
"(",
"base",
"[",
"off",
":",
"off",
"+",
"size",
"]",
")",
"self",
".",
"sprinkle_nones",
"(",
"data",
",",
"none_prob",
")",
"assert",
"len",
"(",
"data",
")",
"==",
"n",
"return",
"data"
] |
https://github.com/apache/arrow/blob/af33dd1157eb8d7d9bfac25ebf61445b793b7943/python/benchmarks/common.py#L181-L201
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/gtk/_windows.py
|
python
|
Dialog.SetReturnCode
|
(*args, **kwargs)
|
return _windows_.Dialog_SetReturnCode(*args, **kwargs)
|
SetReturnCode(self, int returnCode)
|
SetReturnCode(self, int returnCode)
|
[
"SetReturnCode",
"(",
"self",
"int",
"returnCode",
")"
] |
def SetReturnCode(*args, **kwargs):
"""SetReturnCode(self, int returnCode)"""
return _windows_.Dialog_SetReturnCode(*args, **kwargs)
|
[
"def",
"SetReturnCode",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_windows_",
".",
"Dialog_SetReturnCode",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_windows.py#L745-L747
|
|
windystrife/UnrealEngine_NVIDIAGameWorks
|
b50e6338a7c5b26374d66306ebc7807541ff815e
|
Engine/Source/ThirdParty/CEF3/cef_source/tools/file_util.py
|
python
|
read_version_file
|
(file, args)
|
Read and parse a version file (key=value pairs, one per line).
|
Read and parse a version file (key=value pairs, one per line).
|
[
"Read",
"and",
"parse",
"a",
"version",
"file",
"(",
"key",
"=",
"value",
"pairs",
"one",
"per",
"line",
")",
"."
] |
def read_version_file(file, args):
""" Read and parse a version file (key=value pairs, one per line). """
lines = read_file(file).split("\n")
for line in lines:
parts = line.split('=', 1)
if len(parts) == 2:
args[parts[0]] = parts[1]
|
[
"def",
"read_version_file",
"(",
"file",
",",
"args",
")",
":",
"lines",
"=",
"read_file",
"(",
"file",
")",
".",
"split",
"(",
"\"\\n\"",
")",
"for",
"line",
"in",
"lines",
":",
"parts",
"=",
"line",
".",
"split",
"(",
"'='",
",",
"1",
")",
"if",
"len",
"(",
"parts",
")",
"==",
"2",
":",
"args",
"[",
"parts",
"[",
"0",
"]",
"]",
"=",
"parts",
"[",
"1",
"]"
] |
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Source/ThirdParty/CEF3/cef_source/tools/file_util.py#L125-L131
|
||
nest/nest-simulator
|
f2623eb78518cdbd55e77e0ed486bf1111bcb62f
|
pynest/nest/lib/hl_api_helper.py
|
python
|
__is_executable
|
(path, candidate)
|
return os.access(candidate, os.X_OK) and os.path.isfile(candidate)
|
Returns true for executable files.
|
Returns true for executable files.
|
[
"Returns",
"true",
"for",
"executable",
"files",
"."
] |
def __is_executable(path, candidate):
"""Returns true for executable files."""
candidate = os.path.join(path, candidate)
return os.access(candidate, os.X_OK) and os.path.isfile(candidate)
|
[
"def",
"__is_executable",
"(",
"path",
",",
"candidate",
")",
":",
"candidate",
"=",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"candidate",
")",
"return",
"os",
".",
"access",
"(",
"candidate",
",",
"os",
".",
"X_OK",
")",
"and",
"os",
".",
"path",
".",
"isfile",
"(",
"candidate",
")"
] |
https://github.com/nest/nest-simulator/blob/f2623eb78518cdbd55e77e0ed486bf1111bcb62f/pynest/nest/lib/hl_api_helper.py#L442-L446
|
|
netket/netket
|
0d534e54ecbf25b677ea72af6b85947979420652
|
netket/vqs/mc/mc_mixed_state/state.py
|
python
|
MCMixedState.__init__
|
(
self,
sampler,
model=None,
*,
sampler_diag: Sampler = None,
n_samples_diag: int = None,
n_samples_per_rank_diag: Optional[int] = None,
n_discard_per_chain_diag: Optional[int] = None,
n_discard_diag: Optional[int] = None, # deprecated
seed=None,
sampler_seed: Optional[int] = None,
variables=None,
**kwargs,
)
|
Constructs the MCMixedState.
Arguments are the same as :class:`MCState`.
Arguments:
sampler: The sampler
model: (Optional) The model. If not provided, you must provide init_fun and apply_fun.
n_samples: the total number of samples across chains and processes when sampling (default=1000).
n_samples_per_rank: the total number of samples across chains on one process when sampling. Cannot be
specified together with n_samples (default=None).
n_discard_per_chain: number of discarded samples at the beginning of each monte-carlo chain (default=n_samples/10).
n_samples_diag: the total number of samples across chains and processes when sampling the diagonal
of the density matrix (default=1000).
n_samples_per_rank_diag: the total number of samples across chains on one process when sampling the diagonal.
Cannot be specified together with `n_samples_diag` (default=None).
n_discard_per_chain_diag: number of discarded samples at the beginning of each monte-carlo chain used when sampling
the diagonal of the density matrix for observables (default=n_samples_diag/10).
parameters: Optional PyTree of weights from which to start.
seed: rng seed used to generate a set of parameters (only if parameters is not passed). Defaults to a random one.
sampler_seed: rng seed used to initialise the sampler. Defaults to a random one.
mutable: Dict specifing mutable arguments. Use it to specify if the model has a state that can change
during evaluation, but that should not be optimised. See also flax.linen.module.apply documentation
(default=False)
init_fun: Function of the signature f(model, shape, rng_key, dtype) -> Optional_state, parameters used to
initialise the parameters. Defaults to the standard flax initialiser. Only specify if your network has
a non-standard init method.
apply_fun: Function of the signature f(model, variables, σ) that should evaluate the model. Defafults to
`model.apply(variables, σ)`. specify only if your network has a non-standard apply method.
training_kwargs: a dict containing the optionaal keyword arguments to be passed to the apply_fun during training.
Useful for example when you have a batchnorm layer that constructs the average/mean only during training.
|
Constructs the MCMixedState.
Arguments are the same as :class:`MCState`.
|
[
"Constructs",
"the",
"MCMixedState",
".",
"Arguments",
"are",
"the",
"same",
"as",
":",
"class",
":",
"MCState",
"."
] |
def __init__(
self,
sampler,
model=None,
*,
sampler_diag: Sampler = None,
n_samples_diag: int = None,
n_samples_per_rank_diag: Optional[int] = None,
n_discard_per_chain_diag: Optional[int] = None,
n_discard_diag: Optional[int] = None, # deprecated
seed=None,
sampler_seed: Optional[int] = None,
variables=None,
**kwargs,
):
"""
Constructs the MCMixedState.
Arguments are the same as :class:`MCState`.
Arguments:
sampler: The sampler
model: (Optional) The model. If not provided, you must provide init_fun and apply_fun.
n_samples: the total number of samples across chains and processes when sampling (default=1000).
n_samples_per_rank: the total number of samples across chains on one process when sampling. Cannot be
specified together with n_samples (default=None).
n_discard_per_chain: number of discarded samples at the beginning of each monte-carlo chain (default=n_samples/10).
n_samples_diag: the total number of samples across chains and processes when sampling the diagonal
of the density matrix (default=1000).
n_samples_per_rank_diag: the total number of samples across chains on one process when sampling the diagonal.
Cannot be specified together with `n_samples_diag` (default=None).
n_discard_per_chain_diag: number of discarded samples at the beginning of each monte-carlo chain used when sampling
the diagonal of the density matrix for observables (default=n_samples_diag/10).
parameters: Optional PyTree of weights from which to start.
seed: rng seed used to generate a set of parameters (only if parameters is not passed). Defaults to a random one.
sampler_seed: rng seed used to initialise the sampler. Defaults to a random one.
mutable: Dict specifing mutable arguments. Use it to specify if the model has a state that can change
during evaluation, but that should not be optimised. See also flax.linen.module.apply documentation
(default=False)
init_fun: Function of the signature f(model, shape, rng_key, dtype) -> Optional_state, parameters used to
initialise the parameters. Defaults to the standard flax initialiser. Only specify if your network has
a non-standard init method.
apply_fun: Function of the signature f(model, variables, σ) that should evaluate the model. Defafults to
`model.apply(variables, σ)`. specify only if your network has a non-standard apply method.
training_kwargs: a dict containing the optionaal keyword arguments to be passed to the apply_fun during training.
Useful for example when you have a batchnorm layer that constructs the average/mean only during training.
"""
seed, seed_diag = jax.random.split(nkjax.PRNGKey(seed))
if sampler_seed is None:
sampler_seed_diag = None
else:
sampler_seed, sampler_seed_diag = jax.random.split(
nkjax.PRNGKey(sampler_seed)
)
self._diagonal = None
hilbert_physical = sampler.hilbert.physical
super().__init__(
sampler.hilbert.physical,
sampler,
model,
**kwargs,
seed=seed,
sampler_seed=sampler_seed,
variables=variables,
)
if sampler_diag is None:
sampler_diag = sampler.replace(hilbert=hilbert_physical)
sampler_diag = sampler_diag.replace(machine_pow=1)
diagonal_apply_fun = nkjax.HashablePartial(apply_diagonal, self._apply_fun)
for kw in [
"n_samples",
"n_discard",
"n_discard_per_chain",
]: # TODO remove n_discard after deprecation.
if kw in kwargs:
kwargs.pop(kw)
# TODO: remove deprecation.
if n_discard_diag is not None and n_discard_per_chain_diag is not None:
raise ValueError(
"`n_discard_diag` has been renamed to `n_discard_per_chain_diag` and deprecated."
"Specify only `n_discard_per_chain_diag`."
)
elif n_discard_diag is not None:
warn_deprecation(
"`n_discard_diag` has been renamed to `n_discard_per_chain_diag` and deprecated."
"Please update your code to `n_discard_per_chain_diag`."
)
n_discard_per_chain_diag = n_discard_diag
self._diagonal = MCState(
sampler_diag,
apply_fun=diagonal_apply_fun,
n_samples=n_samples_diag,
n_samples_per_rank=n_samples_per_rank_diag,
n_discard_per_chain=n_discard_per_chain_diag,
variables=self.variables,
seed=seed_diag,
sampler_seed=sampler_seed_diag,
**kwargs,
)
|
[
"def",
"__init__",
"(",
"self",
",",
"sampler",
",",
"model",
"=",
"None",
",",
"*",
",",
"sampler_diag",
":",
"Sampler",
"=",
"None",
",",
"n_samples_diag",
":",
"int",
"=",
"None",
",",
"n_samples_per_rank_diag",
":",
"Optional",
"[",
"int",
"]",
"=",
"None",
",",
"n_discard_per_chain_diag",
":",
"Optional",
"[",
"int",
"]",
"=",
"None",
",",
"n_discard_diag",
":",
"Optional",
"[",
"int",
"]",
"=",
"None",
",",
"# deprecated",
"seed",
"=",
"None",
",",
"sampler_seed",
":",
"Optional",
"[",
"int",
"]",
"=",
"None",
",",
"variables",
"=",
"None",
",",
"*",
"*",
"kwargs",
",",
")",
":",
"seed",
",",
"seed_diag",
"=",
"jax",
".",
"random",
".",
"split",
"(",
"nkjax",
".",
"PRNGKey",
"(",
"seed",
")",
")",
"if",
"sampler_seed",
"is",
"None",
":",
"sampler_seed_diag",
"=",
"None",
"else",
":",
"sampler_seed",
",",
"sampler_seed_diag",
"=",
"jax",
".",
"random",
".",
"split",
"(",
"nkjax",
".",
"PRNGKey",
"(",
"sampler_seed",
")",
")",
"self",
".",
"_diagonal",
"=",
"None",
"hilbert_physical",
"=",
"sampler",
".",
"hilbert",
".",
"physical",
"super",
"(",
")",
".",
"__init__",
"(",
"sampler",
".",
"hilbert",
".",
"physical",
",",
"sampler",
",",
"model",
",",
"*",
"*",
"kwargs",
",",
"seed",
"=",
"seed",
",",
"sampler_seed",
"=",
"sampler_seed",
",",
"variables",
"=",
"variables",
",",
")",
"if",
"sampler_diag",
"is",
"None",
":",
"sampler_diag",
"=",
"sampler",
".",
"replace",
"(",
"hilbert",
"=",
"hilbert_physical",
")",
"sampler_diag",
"=",
"sampler_diag",
".",
"replace",
"(",
"machine_pow",
"=",
"1",
")",
"diagonal_apply_fun",
"=",
"nkjax",
".",
"HashablePartial",
"(",
"apply_diagonal",
",",
"self",
".",
"_apply_fun",
")",
"for",
"kw",
"in",
"[",
"\"n_samples\"",
",",
"\"n_discard\"",
",",
"\"n_discard_per_chain\"",
",",
"]",
":",
"# TODO remove n_discard after deprecation.",
"if",
"kw",
"in",
"kwargs",
":",
"kwargs",
".",
"pop",
"(",
"kw",
")",
"# TODO: remove deprecation.",
"if",
"n_discard_diag",
"is",
"not",
"None",
"and",
"n_discard_per_chain_diag",
"is",
"not",
"None",
":",
"raise",
"ValueError",
"(",
"\"`n_discard_diag` has been renamed to `n_discard_per_chain_diag` and deprecated.\"",
"\"Specify only `n_discard_per_chain_diag`.\"",
")",
"elif",
"n_discard_diag",
"is",
"not",
"None",
":",
"warn_deprecation",
"(",
"\"`n_discard_diag` has been renamed to `n_discard_per_chain_diag` and deprecated.\"",
"\"Please update your code to `n_discard_per_chain_diag`.\"",
")",
"n_discard_per_chain_diag",
"=",
"n_discard_diag",
"self",
".",
"_diagonal",
"=",
"MCState",
"(",
"sampler_diag",
",",
"apply_fun",
"=",
"diagonal_apply_fun",
",",
"n_samples",
"=",
"n_samples_diag",
",",
"n_samples_per_rank",
"=",
"n_samples_per_rank_diag",
",",
"n_discard_per_chain",
"=",
"n_discard_per_chain_diag",
",",
"variables",
"=",
"self",
".",
"variables",
",",
"seed",
"=",
"seed_diag",
",",
"sampler_seed",
"=",
"sampler_seed_diag",
",",
"*",
"*",
"kwargs",
",",
")"
] |
https://github.com/netket/netket/blob/0d534e54ecbf25b677ea72af6b85947979420652/netket/vqs/mc/mc_mixed_state/state.py#L47-L155
|
||
KDE/krita
|
10ea63984e00366865769c193ab298de73a59c5c
|
plugins/python/scripter/ui_scripter/editor/debugarea.py
|
python
|
DebugArea.paintEvent
|
(self, event)
|
It Invokes the draw method(debugAreaPaintEvent) in CodeEditor
|
It Invokes the draw method(debugAreaPaintEvent) in CodeEditor
|
[
"It",
"Invokes",
"the",
"draw",
"method",
"(",
"debugAreaPaintEvent",
")",
"in",
"CodeEditor"
] |
def paintEvent(self, event):
"""It Invokes the draw method(debugAreaPaintEvent) in CodeEditor"""
self.codeEditor.debugAreaPaintEvent(event)
|
[
"def",
"paintEvent",
"(",
"self",
",",
"event",
")",
":",
"self",
".",
"codeEditor",
".",
"debugAreaPaintEvent",
"(",
"event",
")"
] |
https://github.com/KDE/krita/blob/10ea63984e00366865769c193ab298de73a59c5c/plugins/python/scripter/ui_scripter/editor/debugarea.py#L19-L21
|
||
BitMEX/api-connectors
|
37a3a5b806ad5d0e0fc975ab86d9ed43c3bcd812
|
auto-generated/python/swagger_client/models/user_event.py
|
python
|
UserEvent.status
|
(self, status)
|
Sets the status of this UserEvent.
:param status: The status of this UserEvent. # noqa: E501
:type: str
|
Sets the status of this UserEvent.
|
[
"Sets",
"the",
"status",
"of",
"this",
"UserEvent",
"."
] |
def status(self, status):
"""Sets the status of this UserEvent.
:param status: The status of this UserEvent. # noqa: E501
:type: str
"""
if status is None:
raise ValueError("Invalid value for `status`, must not be `None`") # noqa: E501
allowed_values = ["success", "failure"] # noqa: E501
if status not in allowed_values:
raise ValueError(
"Invalid value for `status` ({0}), must be one of {1}" # noqa: E501
.format(status, allowed_values)
)
self._status = status
|
[
"def",
"status",
"(",
"self",
",",
"status",
")",
":",
"if",
"status",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"Invalid value for `status`, must not be `None`\"",
")",
"# noqa: E501",
"allowed_values",
"=",
"[",
"\"success\"",
",",
"\"failure\"",
"]",
"# noqa: E501",
"if",
"status",
"not",
"in",
"allowed_values",
":",
"raise",
"ValueError",
"(",
"\"Invalid value for `status` ({0}), must be one of {1}\"",
"# noqa: E501",
".",
"format",
"(",
"status",
",",
"allowed_values",
")",
")",
"self",
".",
"_status",
"=",
"status"
] |
https://github.com/BitMEX/api-connectors/blob/37a3a5b806ad5d0e0fc975ab86d9ed43c3bcd812/auto-generated/python/swagger_client/models/user_event.py#L156-L172
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/logging/handlers.py
|
python
|
NTEventLogHandler.emit
|
(self, record)
|
Emit a record.
Determine the message ID, event category and event type. Then
log the message in the NT event log.
|
Emit a record.
|
[
"Emit",
"a",
"record",
"."
] |
def emit(self, record):
"""
Emit a record.
Determine the message ID, event category and event type. Then
log the message in the NT event log.
"""
if self._welu:
try:
id = self.getMessageID(record)
cat = self.getEventCategory(record)
type = self.getEventType(record)
msg = self.format(record)
self._welu.ReportEvent(self.appname, id, cat, type, [msg])
except Exception:
self.handleError(record)
|
[
"def",
"emit",
"(",
"self",
",",
"record",
")",
":",
"if",
"self",
".",
"_welu",
":",
"try",
":",
"id",
"=",
"self",
".",
"getMessageID",
"(",
"record",
")",
"cat",
"=",
"self",
".",
"getEventCategory",
"(",
"record",
")",
"type",
"=",
"self",
".",
"getEventType",
"(",
"record",
")",
"msg",
"=",
"self",
".",
"format",
"(",
"record",
")",
"self",
".",
"_welu",
".",
"ReportEvent",
"(",
"self",
".",
"appname",
",",
"id",
",",
"cat",
",",
"type",
",",
"[",
"msg",
"]",
")",
"except",
"Exception",
":",
"self",
".",
"handleError",
"(",
"record",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/logging/handlers.py#L1094-L1109
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
wx/lib/agw/aui/auibar.py
|
python
|
AuiToolBar.AddCheckTool
|
(self, tool_id, label, bitmap, disabled_bitmap, short_help_string="", long_help_string="", client_data=None)
|
return self.AddTool(tool_id, label, bitmap, disabled_bitmap, ITEM_CHECK, short_help_string, long_help_string, client_data)
|
Adds a new check (or toggle) tool to the :class:`AuiToolBar`.
:see: :meth:`AddTool` for an explanation of the input parameters.
|
Adds a new check (or toggle) tool to the :class:`AuiToolBar`.
:see: :meth:`AddTool` for an explanation of the input parameters.
|
[
"Adds",
"a",
"new",
"check",
"(",
"or",
"toggle",
")",
"tool",
"to",
"the",
":",
"class",
":",
"AuiToolBar",
".",
":",
"see",
":",
":",
"meth",
":",
"AddTool",
"for",
"an",
"explanation",
"of",
"the",
"input",
"parameters",
"."
] |
def AddCheckTool(self, tool_id, label, bitmap, disabled_bitmap, short_help_string="", long_help_string="", client_data=None):
"""
Adds a new check (or toggle) tool to the :class:`AuiToolBar`.
:see: :meth:`AddTool` for an explanation of the input parameters.
"""
return self.AddTool(tool_id, label, bitmap, disabled_bitmap, ITEM_CHECK, short_help_string, long_help_string, client_data)
|
[
"def",
"AddCheckTool",
"(",
"self",
",",
"tool_id",
",",
"label",
",",
"bitmap",
",",
"disabled_bitmap",
",",
"short_help_string",
"=",
"\"\"",
",",
"long_help_string",
"=",
"\"\"",
",",
"client_data",
"=",
"None",
")",
":",
"return",
"self",
".",
"AddTool",
"(",
"tool_id",
",",
"label",
",",
"bitmap",
",",
"disabled_bitmap",
",",
"ITEM_CHECK",
",",
"short_help_string",
",",
"long_help_string",
",",
"client_data",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/aui/auibar.py#L1832-L1839
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemFramework/v1/AWS/resource-manager-code/lib/swagger_spec_validator/validator20.py
|
python
|
validate_definitions
|
(definitions, deref)
|
Validates the semantic errors in #/definitions.
:param definitions: dict of all the definitions
:param deref: callable that dereferences $refs
:raises: :py:class:`swagger_spec_validator.SwaggerValidationError`
:raises: :py:class:`jsonschema.exceptions.ValidationError`
|
Validates the semantic errors in #/definitions.
|
[
"Validates",
"the",
"semantic",
"errors",
"in",
"#",
"/",
"definitions",
"."
] |
def validate_definitions(definitions, deref):
"""Validates the semantic errors in #/definitions.
:param definitions: dict of all the definitions
:param deref: callable that dereferences $refs
:raises: :py:class:`swagger_spec_validator.SwaggerValidationError`
:raises: :py:class:`jsonschema.exceptions.ValidationError`
"""
visited_definitions_ids = set()
for def_name, definition in iteritems(definitions):
validate_definition(
definition=definition,
deref=deref,
def_name='#/definitions/{}'.format(def_name),
visited_definitions_ids=visited_definitions_ids,
)
|
[
"def",
"validate_definitions",
"(",
"definitions",
",",
"deref",
")",
":",
"visited_definitions_ids",
"=",
"set",
"(",
")",
"for",
"def_name",
",",
"definition",
"in",
"iteritems",
"(",
"definitions",
")",
":",
"validate_definition",
"(",
"definition",
"=",
"definition",
",",
"deref",
"=",
"deref",
",",
"def_name",
"=",
"'#/definitions/{}'",
".",
"format",
"(",
"def_name",
")",
",",
"visited_definitions_ids",
"=",
"visited_definitions_ids",
",",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemFramework/v1/AWS/resource-manager-code/lib/swagger_spec_validator/validator20.py#L515-L531
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/build/waf-1.7.13/waflib/Scripting.py
|
python
|
Dist.get_tar_path
|
(self, node)
|
return node.abspath()
|
return the path to use for a node in the tar archive, the purpose of this
is to let subclases resolve symbolic links or to change file names
|
return the path to use for a node in the tar archive, the purpose of this
is to let subclases resolve symbolic links or to change file names
|
[
"return",
"the",
"path",
"to",
"use",
"for",
"a",
"node",
"in",
"the",
"tar",
"archive",
"the",
"purpose",
"of",
"this",
"is",
"to",
"let",
"subclases",
"resolve",
"symbolic",
"links",
"or",
"to",
"change",
"file",
"names"
] |
def get_tar_path(self, node):
"""
return the path to use for a node in the tar archive, the purpose of this
is to let subclases resolve symbolic links or to change file names
"""
return node.abspath()
|
[
"def",
"get_tar_path",
"(",
"self",
",",
"node",
")",
":",
"return",
"node",
".",
"abspath",
"(",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/build/waf-1.7.13/waflib/Scripting.py#L483-L488
|
|
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/compiler/pyassem.py
|
python
|
PyFlowGraph.getConsts
|
(self)
|
return tuple(l)
|
Return a tuple for the const slot of the code object
Must convert references to code (MAKE_FUNCTION) to code
objects recursively.
|
Return a tuple for the const slot of the code object
|
[
"Return",
"a",
"tuple",
"for",
"the",
"const",
"slot",
"of",
"the",
"code",
"object"
] |
def getConsts(self):
"""Return a tuple for the const slot of the code object
Must convert references to code (MAKE_FUNCTION) to code
objects recursively.
"""
l = []
for elt in self.consts:
if isinstance(elt, PyFlowGraph):
elt = elt.getCode()
l.append(elt)
return tuple(l)
|
[
"def",
"getConsts",
"(",
"self",
")",
":",
"l",
"=",
"[",
"]",
"for",
"elt",
"in",
"self",
".",
"consts",
":",
"if",
"isinstance",
"(",
"elt",
",",
"PyFlowGraph",
")",
":",
"elt",
"=",
"elt",
".",
"getCode",
"(",
")",
"l",
".",
"append",
"(",
"elt",
")",
"return",
"tuple",
"(",
"l",
")"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/compiler/pyassem.py#L546-L557
|
|
v8/v8
|
fee3bf095260bf657a3eea4d3d41f90c42c6c857
|
tools/grokdump.py
|
python
|
InspectionShell.do_list_modules
|
(self, arg)
|
List details for all loaded modules in the minidump.
An argument can be passed to limit the output to only those modules that
contain the argument as a substring (case insensitive match).
|
List details for all loaded modules in the minidump.
|
[
"List",
"details",
"for",
"all",
"loaded",
"modules",
"in",
"the",
"minidump",
"."
] |
def do_list_modules(self, arg):
"""
List details for all loaded modules in the minidump.
An argument can be passed to limit the output to only those modules that
contain the argument as a substring (case insensitive match).
"""
for module in self.reader.module_list.modules:
if arg:
name = GetModuleName(self.reader, module).lower()
if name.find(arg.lower()) >= 0:
PrintModuleDetails(self.reader, module)
else:
PrintModuleDetails(self.reader, module)
print()
|
[
"def",
"do_list_modules",
"(",
"self",
",",
"arg",
")",
":",
"for",
"module",
"in",
"self",
".",
"reader",
".",
"module_list",
".",
"modules",
":",
"if",
"arg",
":",
"name",
"=",
"GetModuleName",
"(",
"self",
".",
"reader",
",",
"module",
")",
".",
"lower",
"(",
")",
"if",
"name",
".",
"find",
"(",
"arg",
".",
"lower",
"(",
")",
")",
">=",
"0",
":",
"PrintModuleDetails",
"(",
"self",
".",
"reader",
",",
"module",
")",
"else",
":",
"PrintModuleDetails",
"(",
"self",
".",
"reader",
",",
"module",
")",
"print",
"(",
")"
] |
https://github.com/v8/v8/blob/fee3bf095260bf657a3eea4d3d41f90c42c6c857/tools/grokdump.py#L3762-L3776
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/gtk/_core.py
|
python
|
TextEntryBase.AutoComplete
|
(*args, **kwargs)
|
return _core_.TextEntryBase_AutoComplete(*args, **kwargs)
|
AutoComplete(self, wxArrayString choices) -> bool
|
AutoComplete(self, wxArrayString choices) -> bool
|
[
"AutoComplete",
"(",
"self",
"wxArrayString",
"choices",
")",
"-",
">",
"bool"
] |
def AutoComplete(*args, **kwargs):
"""AutoComplete(self, wxArrayString choices) -> bool"""
return _core_.TextEntryBase_AutoComplete(*args, **kwargs)
|
[
"def",
"AutoComplete",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"TextEntryBase_AutoComplete",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_core.py#L13316-L13318
|
|
BlzFans/wke
|
b0fa21158312e40c5fbd84682d643022b6c34a93
|
cygwin/lib/python2.6/encodings/base64_codec.py
|
python
|
base64_encode
|
(input,errors='strict')
|
return (output, len(input))
|
Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
|
Encodes the object input and returns a tuple (output
object, length consumed).
|
[
"Encodes",
"the",
"object",
"input",
"and",
"returns",
"a",
"tuple",
"(",
"output",
"object",
"length",
"consumed",
")",
"."
] |
def base64_encode(input,errors='strict'):
""" Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
output = base64.encodestring(input)
return (output, len(input))
|
[
"def",
"base64_encode",
"(",
"input",
",",
"errors",
"=",
"'strict'",
")",
":",
"assert",
"errors",
"==",
"'strict'",
"output",
"=",
"base64",
".",
"encodestring",
"(",
"input",
")",
"return",
"(",
"output",
",",
"len",
"(",
"input",
")",
")"
] |
https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/encodings/base64_codec.py#L13-L25
|
|
mickem/nscp
|
79f89fdbb6da63f91bc9dedb7aea202fe938f237
|
scripts/python/lib/google/protobuf/internal/containers.py
|
python
|
RepeatedScalarFieldContainer.extend
|
(self, elem_seq)
|
Extends by appending the given sequence. Similar to list.extend().
|
Extends by appending the given sequence. Similar to list.extend().
|
[
"Extends",
"by",
"appending",
"the",
"given",
"sequence",
".",
"Similar",
"to",
"list",
".",
"extend",
"()",
"."
] |
def extend(self, elem_seq):
"""Extends by appending the given sequence. Similar to list.extend()."""
if not elem_seq:
return
new_values = []
for elem in elem_seq:
self._type_checker.CheckValue(elem)
new_values.append(elem)
self._values.extend(new_values)
self._message_listener.Modified()
|
[
"def",
"extend",
"(",
"self",
",",
"elem_seq",
")",
":",
"if",
"not",
"elem_seq",
":",
"return",
"new_values",
"=",
"[",
"]",
"for",
"elem",
"in",
"elem_seq",
":",
"self",
".",
"_type_checker",
".",
"CheckValue",
"(",
"elem",
")",
"new_values",
".",
"append",
"(",
"elem",
")",
"self",
".",
"_values",
".",
"extend",
"(",
"new_values",
")",
"self",
".",
"_message_listener",
".",
"Modified",
"(",
")"
] |
https://github.com/mickem/nscp/blob/79f89fdbb6da63f91bc9dedb7aea202fe938f237/scripts/python/lib/google/protobuf/internal/containers.py#L118-L128
|
||
zju3dv/clean-pvnet
|
5870c509e3cc205e1bb28910a7b1a9a3c8add9a8
|
lib/utils/pysixd/transform.py
|
python
|
Arcball.drag
|
(self, point)
|
Update current cursor window coordinates.
|
Update current cursor window coordinates.
|
[
"Update",
"current",
"cursor",
"window",
"coordinates",
"."
] |
def drag(self, point):
"""Update current cursor window coordinates."""
vnow = arcball_map_to_sphere(point, self._center, self._radius)
if self._axis is not None:
vnow = arcball_constrain_to_axis(vnow, self._axis)
self._qpre = self._qnow
t = numpy.cross(self._vdown, vnow)
if numpy.dot(t, t) < _EPS:
self._qnow = self._qdown
else:
q = [numpy.dot(self._vdown, vnow), t[0], t[1], t[2]]
self._qnow = quaternion_multiply(q, self._qdown)
|
[
"def",
"drag",
"(",
"self",
",",
"point",
")",
":",
"vnow",
"=",
"arcball_map_to_sphere",
"(",
"point",
",",
"self",
".",
"_center",
",",
"self",
".",
"_radius",
")",
"if",
"self",
".",
"_axis",
"is",
"not",
"None",
":",
"vnow",
"=",
"arcball_constrain_to_axis",
"(",
"vnow",
",",
"self",
".",
"_axis",
")",
"self",
".",
"_qpre",
"=",
"self",
".",
"_qnow",
"t",
"=",
"numpy",
".",
"cross",
"(",
"self",
".",
"_vdown",
",",
"vnow",
")",
"if",
"numpy",
".",
"dot",
"(",
"t",
",",
"t",
")",
"<",
"_EPS",
":",
"self",
".",
"_qnow",
"=",
"self",
".",
"_qdown",
"else",
":",
"q",
"=",
"[",
"numpy",
".",
"dot",
"(",
"self",
".",
"_vdown",
",",
"vnow",
")",
",",
"t",
"[",
"0",
"]",
",",
"t",
"[",
"1",
"]",
",",
"t",
"[",
"2",
"]",
"]",
"self",
".",
"_qnow",
"=",
"quaternion_multiply",
"(",
"q",
",",
"self",
".",
"_qdown",
")"
] |
https://github.com/zju3dv/clean-pvnet/blob/5870c509e3cc205e1bb28910a7b1a9a3c8add9a8/lib/utils/pysixd/transform.py#L1594-L1605
|
||
adobe/chromium
|
cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7
|
gpu/command_buffer/build_gles2_cmd_buffer.py
|
python
|
CustomHandler.WriteImmediateServiceImplementation
|
(self, func, file)
|
Overrriden from TypeHandler.
|
Overrriden from TypeHandler.
|
[
"Overrriden",
"from",
"TypeHandler",
"."
] |
def WriteImmediateServiceImplementation(self, func, file):
"""Overrriden from TypeHandler."""
pass
|
[
"def",
"WriteImmediateServiceImplementation",
"(",
"self",
",",
"func",
",",
"file",
")",
":",
"pass"
] |
https://github.com/adobe/chromium/blob/cfe5bf0b51b1f6b9fe239c2a3c2f2364da9967d7/gpu/command_buffer/build_gles2_cmd_buffer.py#L2337-L2339
|
||
okex/V3-Open-API-SDK
|
c5abb0db7e2287718e0055e17e57672ce0ec7fd9
|
okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_vendor/urllib3/util/retry.py
|
python
|
Retry.is_retry
|
(self, method, status_code, has_retry_after=False)
|
return (self.total and self.respect_retry_after_header and
has_retry_after and (status_code in self.RETRY_AFTER_STATUS_CODES))
|
Is this method/status code retryable? (Based on whitelists and control
variables such as the number of total retries to allow, whether to
respect the Retry-After header, whether this header is present, and
whether the returned status code is on the list of status codes to
be retried upon on the presence of the aforementioned header)
|
Is this method/status code retryable? (Based on whitelists and control
variables such as the number of total retries to allow, whether to
respect the Retry-After header, whether this header is present, and
whether the returned status code is on the list of status codes to
be retried upon on the presence of the aforementioned header)
|
[
"Is",
"this",
"method",
"/",
"status",
"code",
"retryable?",
"(",
"Based",
"on",
"whitelists",
"and",
"control",
"variables",
"such",
"as",
"the",
"number",
"of",
"total",
"retries",
"to",
"allow",
"whether",
"to",
"respect",
"the",
"Retry",
"-",
"After",
"header",
"whether",
"this",
"header",
"is",
"present",
"and",
"whether",
"the",
"returned",
"status",
"code",
"is",
"on",
"the",
"list",
"of",
"status",
"codes",
"to",
"be",
"retried",
"upon",
"on",
"the",
"presence",
"of",
"the",
"aforementioned",
"header",
")"
] |
def is_retry(self, method, status_code, has_retry_after=False):
""" Is this method/status code retryable? (Based on whitelists and control
variables such as the number of total retries to allow, whether to
respect the Retry-After header, whether this header is present, and
whether the returned status code is on the list of status codes to
be retried upon on the presence of the aforementioned header)
"""
if not self._is_method_retryable(method):
return False
if self.status_forcelist and status_code in self.status_forcelist:
return True
return (self.total and self.respect_retry_after_header and
has_retry_after and (status_code in self.RETRY_AFTER_STATUS_CODES))
|
[
"def",
"is_retry",
"(",
"self",
",",
"method",
",",
"status_code",
",",
"has_retry_after",
"=",
"False",
")",
":",
"if",
"not",
"self",
".",
"_is_method_retryable",
"(",
"method",
")",
":",
"return",
"False",
"if",
"self",
".",
"status_forcelist",
"and",
"status_code",
"in",
"self",
".",
"status_forcelist",
":",
"return",
"True",
"return",
"(",
"self",
".",
"total",
"and",
"self",
".",
"respect_retry_after_header",
"and",
"has_retry_after",
"and",
"(",
"status_code",
"in",
"self",
".",
"RETRY_AFTER_STATUS_CODES",
")",
")"
] |
https://github.com/okex/V3-Open-API-SDK/blob/c5abb0db7e2287718e0055e17e57672ce0ec7fd9/okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_vendor/urllib3/util/retry.py#L304-L318
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/Blast/houdini/python2.7libs/blastExport/slice.py
|
python
|
Slice.metaDataEntityId
|
(self, value)
|
:return: str
|
:return: str
|
[
":",
"return",
":",
"str"
] |
def metaDataEntityId(self, value):
"""
:return: str
"""
if self.__metaDataEntityId == value:
return
self.__metaDataEntityId = value
|
[
"def",
"metaDataEntityId",
"(",
"self",
",",
"value",
")",
":",
"if",
"self",
".",
"__metaDataEntityId",
"==",
"value",
":",
"return",
"self",
".",
"__metaDataEntityId",
"=",
"value"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/Blast/houdini/python2.7libs/blastExport/slice.py#L262-L270
|
||
mindspore-ai/mindspore
|
fb8fd3338605bb34fa5cea054e535a8b1d753fab
|
mindspore/python/mindspore/nn/layer/container.py
|
python
|
_get_prefix_and_index
|
(cells)
|
return prefix, index
|
get prefix and index of parameter name in sequential cell or cell list.
|
get prefix and index of parameter name in sequential cell or cell list.
|
[
"get",
"prefix",
"and",
"index",
"of",
"parameter",
"name",
"in",
"sequential",
"cell",
"or",
"cell",
"list",
"."
] |
def _get_prefix_and_index(cells):
"""get prefix and index of parameter name in sequential cell or cell list."""
prefix = ""
index = 0
if not cells:
return prefix, index
cell_list = list(cells.items())
first_param, first_key = None, None
second_param, second_key = None, None
for key, cell in cell_list:
try:
_, param = next(cell.parameters_and_names())
except StopIteration:
continue
if first_param is None:
first_param = param
first_key = key
continue
second_param = param
second_key = key
break
if first_param is None:
return prefix, index
split_names = first_param.name.split(".")
for idx, name in enumerate(split_names):
if name == first_key:
prefix = ".".join(split_names[:idx])
prefix = prefix + "." if prefix else prefix
index = idx
if second_param is not None and second_param.name.split(".")[idx] == second_key:
break
return prefix, index
|
[
"def",
"_get_prefix_and_index",
"(",
"cells",
")",
":",
"prefix",
"=",
"\"\"",
"index",
"=",
"0",
"if",
"not",
"cells",
":",
"return",
"prefix",
",",
"index",
"cell_list",
"=",
"list",
"(",
"cells",
".",
"items",
"(",
")",
")",
"first_param",
",",
"first_key",
"=",
"None",
",",
"None",
"second_param",
",",
"second_key",
"=",
"None",
",",
"None",
"for",
"key",
",",
"cell",
"in",
"cell_list",
":",
"try",
":",
"_",
",",
"param",
"=",
"next",
"(",
"cell",
".",
"parameters_and_names",
"(",
")",
")",
"except",
"StopIteration",
":",
"continue",
"if",
"first_param",
"is",
"None",
":",
"first_param",
"=",
"param",
"first_key",
"=",
"key",
"continue",
"second_param",
"=",
"param",
"second_key",
"=",
"key",
"break",
"if",
"first_param",
"is",
"None",
":",
"return",
"prefix",
",",
"index",
"split_names",
"=",
"first_param",
".",
"name",
".",
"split",
"(",
"\".\"",
")",
"for",
"idx",
",",
"name",
"in",
"enumerate",
"(",
"split_names",
")",
":",
"if",
"name",
"==",
"first_key",
":",
"prefix",
"=",
"\".\"",
".",
"join",
"(",
"split_names",
"[",
":",
"idx",
"]",
")",
"prefix",
"=",
"prefix",
"+",
"\".\"",
"if",
"prefix",
"else",
"prefix",
"index",
"=",
"idx",
"if",
"second_param",
"is",
"not",
"None",
"and",
"second_param",
".",
"name",
".",
"split",
"(",
"\".\"",
")",
"[",
"idx",
"]",
"==",
"second_key",
":",
"break",
"return",
"prefix",
",",
"index"
] |
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/nn/layer/container.py#L42-L76
|
|
francinexue/xuefu
|
b6ff79747a42e020588c0c0a921048e08fe4680c
|
api/ctpx/ctptd.py
|
python
|
CtpTd.onRtnOrder
|
(self, OrderField)
|
报单通知
|
报单通知
|
[
"报单通知"
] |
def onRtnOrder(self, OrderField):
"""报单通知"""
# #如果委托成功,OrderSubmitStatus先是0(不用管),则OrderSubmitStatus = 3,且orderStatus = 3,等待排队.
# 紧接着成交则OrderSubmitStatus = 3,且orderStatus = 0-1
## 如果撤单成功,OrderSubmitStatus先是1(不用管),则OrderSubmitStatus = 3,且orderStatus = *.等待撤单
# 紧接着成交则OrderSubmitStatus = 3,且orderStatus = 5 ??
for listener in self._barEventListeners:
listener.onRtnOrderEvent(OrderField)
|
[
"def",
"onRtnOrder",
"(",
"self",
",",
"OrderField",
")",
":",
"# #如果委托成功,OrderSubmitStatus先是0(不用管),则OrderSubmitStatus = 3,且orderStatus = 3,等待排队.",
"# 紧接着成交则OrderSubmitStatus = 3,且orderStatus = 0-1",
"## 如果撤单成功,OrderSubmitStatus先是1(不用管),则OrderSubmitStatus = 3,且orderStatus = *.等待撤单",
"# 紧接着成交则OrderSubmitStatus = 3,且orderStatus = 5 ??",
"for",
"listener",
"in",
"self",
".",
"_barEventListeners",
":",
"listener",
".",
"onRtnOrderEvent",
"(",
"OrderField",
")"
] |
https://github.com/francinexue/xuefu/blob/b6ff79747a42e020588c0c0a921048e08fe4680c/api/ctpx/ctptd.py#L753-L760
|
||
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
third_party/catapult/third_party/mapreduce/mapreduce/api/map_job/map_job_control.py
|
python
|
Job.__update_state
|
(self)
|
Fetches most up to date state from db.
|
Fetches most up to date state from db.
|
[
"Fetches",
"most",
"up",
"to",
"date",
"state",
"from",
"db",
"."
] |
def __update_state(self):
"""Fetches most up to date state from db."""
# Only if the job was not in a terminal state.
if self._state.active:
self._state = self.__get_state_by_id(self.job_config.job_id)
|
[
"def",
"__update_state",
"(",
"self",
")",
":",
"# Only if the job was not in a terminal state.",
"if",
"self",
".",
"_state",
".",
"active",
":",
"self",
".",
"_state",
"=",
"self",
".",
"__get_state_by_id",
"(",
"self",
".",
"job_config",
".",
"job_id",
")"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/mapreduce/mapreduce/api/map_job/map_job_control.py#L157-L161
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
wx/lib/agw/ultimatelistctrl.py
|
python
|
UltimateListItemData.SetImage
|
(self, image)
|
Sets the zero-based indexes of the images associated with the item into the
image list.
:param `image`: a Python list with the zero-based indexes of the images
associated with the item into the image list.
|
Sets the zero-based indexes of the images associated with the item into the
image list.
|
[
"Sets",
"the",
"zero",
"-",
"based",
"indexes",
"of",
"the",
"images",
"associated",
"with",
"the",
"item",
"into",
"the",
"image",
"list",
"."
] |
def SetImage(self, image):
"""
Sets the zero-based indexes of the images associated with the item into the
image list.
:param `image`: a Python list with the zero-based indexes of the images
associated with the item into the image list.
"""
self._image = to_list(image)
|
[
"def",
"SetImage",
"(",
"self",
",",
"image",
")",
":",
"self",
".",
"_image",
"=",
"to_list",
"(",
"image",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/agw/ultimatelistctrl.py#L2553-L2562
|
||
lammps/lammps
|
b75c3065430a75b1b5543a10e10f46d9b4c91913
|
python/lammps/core.py
|
python
|
lammps.get_os_info
|
(self)
|
return sb.value.decode()
|
Return a string with information about the OS and compiler runtime
This is a wrapper around the :cpp:func:`lammps_get_os_info` function of the C-library interface.
:return: OS info string
:rtype: string
|
Return a string with information about the OS and compiler runtime
|
[
"Return",
"a",
"string",
"with",
"information",
"about",
"the",
"OS",
"and",
"compiler",
"runtime"
] |
def get_os_info(self):
"""Return a string with information about the OS and compiler runtime
This is a wrapper around the :cpp:func:`lammps_get_os_info` function of the C-library interface.
:return: OS info string
:rtype: string
"""
sb = create_string_buffer(512)
self.lib.lammps_get_os_info(sb,512)
return sb.value.decode()
|
[
"def",
"get_os_info",
"(",
"self",
")",
":",
"sb",
"=",
"create_string_buffer",
"(",
"512",
")",
"self",
".",
"lib",
".",
"lammps_get_os_info",
"(",
"sb",
",",
"512",
")",
"return",
"sb",
".",
"value",
".",
"decode",
"(",
")"
] |
https://github.com/lammps/lammps/blob/b75c3065430a75b1b5543a10e10f46d9b4c91913/python/lammps/core.py#L505-L516
|
|
mindspore-ai/mindspore
|
fb8fd3338605bb34fa5cea054e535a8b1d753fab
|
mindspore/python/mindspore/nn/probability/distribution/gamma.py
|
python
|
Gamma._log_prob
|
(self, value, concentration=None, rate=None)
|
return unnormalized_log_prob - log_normalization
|
r"""
Evaluate log probability.
Args:
value (Tensor): The value to be evaluated.
concentration (Tensor): The concentration of the distribution. Default: self._concentration.
rate (Tensor): The rate the distribution. Default: self._rate.
.. math::
L(x) = (\alpha - 1) * \log(x) - \beta * x - \log(\gamma(\alpha)) - \alpha * \log(\beta)
|
r"""
Evaluate log probability.
|
[
"r",
"Evaluate",
"log",
"probability",
"."
] |
def _log_prob(self, value, concentration=None, rate=None):
r"""
Evaluate log probability.
Args:
value (Tensor): The value to be evaluated.
concentration (Tensor): The concentration of the distribution. Default: self._concentration.
rate (Tensor): The rate the distribution. Default: self._rate.
.. math::
L(x) = (\alpha - 1) * \log(x) - \beta * x - \log(\gamma(\alpha)) - \alpha * \log(\beta)
"""
value = self._check_value(value, 'value')
value = self.cast(value, self.dtype)
concentration, rate = self._check_param_type(concentration, rate)
unnormalized_log_prob = (concentration - 1.) * \
self.log(value) - rate * value
log_normalization = self.lgamma(
concentration) - concentration * self.log(rate)
return unnormalized_log_prob - log_normalization
|
[
"def",
"_log_prob",
"(",
"self",
",",
"value",
",",
"concentration",
"=",
"None",
",",
"rate",
"=",
"None",
")",
":",
"value",
"=",
"self",
".",
"_check_value",
"(",
"value",
",",
"'value'",
")",
"value",
"=",
"self",
".",
"cast",
"(",
"value",
",",
"self",
".",
"dtype",
")",
"concentration",
",",
"rate",
"=",
"self",
".",
"_check_param_type",
"(",
"concentration",
",",
"rate",
")",
"unnormalized_log_prob",
"=",
"(",
"concentration",
"-",
"1.",
")",
"*",
"self",
".",
"log",
"(",
"value",
")",
"-",
"rate",
"*",
"value",
"log_normalization",
"=",
"self",
".",
"lgamma",
"(",
"concentration",
")",
"-",
"concentration",
"*",
"self",
".",
"log",
"(",
"rate",
")",
"return",
"unnormalized_log_prob",
"-",
"log_normalization"
] |
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/nn/probability/distribution/gamma.py#L312-L331
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/scipy/py3/scipy/ndimage/interpolation.py
|
python
|
shift
|
(input, shift, output=None, order=3, mode='constant', cval=0.0,
prefilter=True)
|
return output
|
Shift an array.
The array is shifted using spline interpolation of the requested order.
Points outside the boundaries of the input are filled according to the
given mode.
Parameters
----------
%(input)s
shift : float or sequence
The shift along the axes. If a float, `shift` is the same for each
axis. If a sequence, `shift` should contain one value for each axis.
%(output)s
order : int, optional
The order of the spline interpolation, default is 3.
The order has to be in the range 0-5.
%(mode)s
%(cval)s
%(prefilter)s
Returns
-------
shift : ndarray
The shifted input.
|
Shift an array.
|
[
"Shift",
"an",
"array",
"."
] |
def shift(input, shift, output=None, order=3, mode='constant', cval=0.0,
prefilter=True):
"""
Shift an array.
The array is shifted using spline interpolation of the requested order.
Points outside the boundaries of the input are filled according to the
given mode.
Parameters
----------
%(input)s
shift : float or sequence
The shift along the axes. If a float, `shift` is the same for each
axis. If a sequence, `shift` should contain one value for each axis.
%(output)s
order : int, optional
The order of the spline interpolation, default is 3.
The order has to be in the range 0-5.
%(mode)s
%(cval)s
%(prefilter)s
Returns
-------
shift : ndarray
The shifted input.
"""
if order < 0 or order > 5:
raise RuntimeError('spline order not supported')
input = numpy.asarray(input)
if numpy.iscomplexobj(input):
raise TypeError('Complex type not supported')
if input.ndim < 1:
raise RuntimeError('input and output rank must be > 0')
mode = _ni_support._extend_mode_to_code(mode)
if prefilter and order > 1:
filtered = spline_filter(input, order, output=numpy.float64)
else:
filtered = input
output = _ni_support._get_output(output, input)
shift = _ni_support._normalize_sequence(shift, input.ndim)
shift = [-ii for ii in shift]
shift = numpy.asarray(shift, dtype=numpy.float64)
if not shift.flags.contiguous:
shift = shift.copy()
_nd_image.zoom_shift(filtered, None, shift, output, order, mode, cval)
return output
|
[
"def",
"shift",
"(",
"input",
",",
"shift",
",",
"output",
"=",
"None",
",",
"order",
"=",
"3",
",",
"mode",
"=",
"'constant'",
",",
"cval",
"=",
"0.0",
",",
"prefilter",
"=",
"True",
")",
":",
"if",
"order",
"<",
"0",
"or",
"order",
">",
"5",
":",
"raise",
"RuntimeError",
"(",
"'spline order not supported'",
")",
"input",
"=",
"numpy",
".",
"asarray",
"(",
"input",
")",
"if",
"numpy",
".",
"iscomplexobj",
"(",
"input",
")",
":",
"raise",
"TypeError",
"(",
"'Complex type not supported'",
")",
"if",
"input",
".",
"ndim",
"<",
"1",
":",
"raise",
"RuntimeError",
"(",
"'input and output rank must be > 0'",
")",
"mode",
"=",
"_ni_support",
".",
"_extend_mode_to_code",
"(",
"mode",
")",
"if",
"prefilter",
"and",
"order",
">",
"1",
":",
"filtered",
"=",
"spline_filter",
"(",
"input",
",",
"order",
",",
"output",
"=",
"numpy",
".",
"float64",
")",
"else",
":",
"filtered",
"=",
"input",
"output",
"=",
"_ni_support",
".",
"_get_output",
"(",
"output",
",",
"input",
")",
"shift",
"=",
"_ni_support",
".",
"_normalize_sequence",
"(",
"shift",
",",
"input",
".",
"ndim",
")",
"shift",
"=",
"[",
"-",
"ii",
"for",
"ii",
"in",
"shift",
"]",
"shift",
"=",
"numpy",
".",
"asarray",
"(",
"shift",
",",
"dtype",
"=",
"numpy",
".",
"float64",
")",
"if",
"not",
"shift",
".",
"flags",
".",
"contiguous",
":",
"shift",
"=",
"shift",
".",
"copy",
"(",
")",
"_nd_image",
".",
"zoom_shift",
"(",
"filtered",
",",
"None",
",",
"shift",
",",
"output",
",",
"order",
",",
"mode",
",",
"cval",
")",
"return",
"output"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/ndimage/interpolation.py#L485-L533
|
|
okex/V3-Open-API-SDK
|
c5abb0db7e2287718e0055e17e57672ce0ec7fd9
|
okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_vendor/ipaddress.py
|
python
|
ip_interface
|
(address)
|
Take an IP string/int and return an object of the correct type.
Args:
address: A string or integer, the IP address. Either IPv4 or
IPv6 addresses may be supplied; integers less than 2**32 will
be considered to be IPv4 by default.
Returns:
An IPv4Interface or IPv6Interface object.
Raises:
ValueError: if the string passed isn't either a v4 or a v6
address.
Notes:
The IPv?Interface classes describe an Address on a particular
Network, so they're basically a combination of both the Address
and Network classes.
|
Take an IP string/int and return an object of the correct type.
|
[
"Take",
"an",
"IP",
"string",
"/",
"int",
"and",
"return",
"an",
"object",
"of",
"the",
"correct",
"type",
"."
] |
def ip_interface(address):
"""Take an IP string/int and return an object of the correct type.
Args:
address: A string or integer, the IP address. Either IPv4 or
IPv6 addresses may be supplied; integers less than 2**32 will
be considered to be IPv4 by default.
Returns:
An IPv4Interface or IPv6Interface object.
Raises:
ValueError: if the string passed isn't either a v4 or a v6
address.
Notes:
The IPv?Interface classes describe an Address on a particular
Network, so they're basically a combination of both the Address
and Network classes.
"""
try:
return IPv4Interface(address)
except (AddressValueError, NetmaskValueError):
pass
try:
return IPv6Interface(address)
except (AddressValueError, NetmaskValueError):
pass
raise ValueError('%r does not appear to be an IPv4 or IPv6 interface' %
address)
|
[
"def",
"ip_interface",
"(",
"address",
")",
":",
"try",
":",
"return",
"IPv4Interface",
"(",
"address",
")",
"except",
"(",
"AddressValueError",
",",
"NetmaskValueError",
")",
":",
"pass",
"try",
":",
"return",
"IPv6Interface",
"(",
"address",
")",
"except",
"(",
"AddressValueError",
",",
"NetmaskValueError",
")",
":",
"pass",
"raise",
"ValueError",
"(",
"'%r does not appear to be an IPv4 or IPv6 interface'",
"%",
"address",
")"
] |
https://github.com/okex/V3-Open-API-SDK/blob/c5abb0db7e2287718e0055e17e57672ce0ec7fd9/okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_vendor/ipaddress.py#L207-L239
|
||
tensorflow/serving
|
3b29e18ab57c68604f599d0b3e1f8df417d22427
|
tensorflow_serving/apis/prediction_service_pb2_grpc.py
|
python
|
PredictionServiceServicer.Predict
|
(self, request, context)
|
Predict -- provides access to loaded TensorFlow model.
|
Predict -- provides access to loaded TensorFlow model.
|
[
"Predict",
"--",
"provides",
"access",
"to",
"loaded",
"TensorFlow",
"model",
"."
] |
def Predict(self, request, context):
"""Predict -- provides access to loaded TensorFlow model.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
|
[
"def",
"Predict",
"(",
"self",
",",
"request",
",",
"context",
")",
":",
"context",
".",
"set_code",
"(",
"grpc",
".",
"StatusCode",
".",
"UNIMPLEMENTED",
")",
"context",
".",
"set_details",
"(",
"'Method not implemented!'",
")",
"raise",
"NotImplementedError",
"(",
"'Method not implemented!'",
")"
] |
https://github.com/tensorflow/serving/blob/3b29e18ab57c68604f599d0b3e1f8df417d22427/tensorflow_serving/apis/prediction_service_pb2_grpc.py#L87-L92
|
||
Yelp/MOE
|
5b5a6a2c6c3cf47320126f7f5894e2a83e347f5c
|
moe/views/optimizable_gp_pretty_view.py
|
python
|
OptimizableGpPrettyView._get_default_optimizer_params
|
(self, params)
|
return OPTIMIZER_TYPE_AND_OBJECTIVE_TO_DEFAULT_PARAMETERS[optimizer_parameters_lookup]
|
Get the default optimizer parameters associated with the desired ``optimizer_type`` and REST endpoint.
:param params: a (partially) deserialized REST request with everything except possibly
``params['optimizer_info']``
:type params: dict
:return: default multistart and optimizer parameters to use with this REST request
:rtype: :class:`moe.optimal_learning.python.constant.DefaultOptimizerInfoTuple`
|
Get the default optimizer parameters associated with the desired ``optimizer_type`` and REST endpoint.
|
[
"Get",
"the",
"default",
"optimizer",
"parameters",
"associated",
"with",
"the",
"desired",
"optimizer_type",
"and",
"REST",
"endpoint",
"."
] |
def _get_default_optimizer_params(self, params):
"""Get the default optimizer parameters associated with the desired ``optimizer_type`` and REST endpoint.
:param params: a (partially) deserialized REST request with everything except possibly
``params['optimizer_info']``
:type params: dict
:return: default multistart and optimizer parameters to use with this REST request
:rtype: :class:`moe.optimal_learning.python.constant.DefaultOptimizerInfoTuple`
"""
optimizer_type = params['optimizer_info']['optimizer_type']
optimizer_parameters_lookup = (optimizer_type, self._route_name)
return OPTIMIZER_TYPE_AND_OBJECTIVE_TO_DEFAULT_PARAMETERS[optimizer_parameters_lookup]
|
[
"def",
"_get_default_optimizer_params",
"(",
"self",
",",
"params",
")",
":",
"optimizer_type",
"=",
"params",
"[",
"'optimizer_info'",
"]",
"[",
"'optimizer_type'",
"]",
"optimizer_parameters_lookup",
"=",
"(",
"optimizer_type",
",",
"self",
".",
"_route_name",
")",
"return",
"OPTIMIZER_TYPE_AND_OBJECTIVE_TO_DEFAULT_PARAMETERS",
"[",
"optimizer_parameters_lookup",
"]"
] |
https://github.com/Yelp/MOE/blob/5b5a6a2c6c3cf47320126f7f5894e2a83e347f5c/moe/views/optimizable_gp_pretty_view.py#L26-L38
|
|
rrwick/Porechop
|
109e437280436d1ec27e5a5b7a34ffb752176390
|
ez_setup.py
|
python
|
_parse_args
|
()
|
return options
|
Parse the command line for options.
|
Parse the command line for options.
|
[
"Parse",
"the",
"command",
"line",
"for",
"options",
"."
] |
def _parse_args():
"""Parse the command line for options."""
parser = optparse.OptionParser()
parser.add_option(
'--user', dest='user_install', action='store_true', default=False,
help='install in user site package')
parser.add_option(
'--download-base', dest='download_base', metavar="URL",
default=DEFAULT_URL,
help='alternative URL from where to download the setuptools package')
parser.add_option(
'--insecure', dest='downloader_factory', action='store_const',
const=lambda: download_file_insecure, default=get_best_downloader,
help='Use internal, non-validating downloader'
)
parser.add_option(
'--version', help="Specify which version to download",
default=DEFAULT_VERSION,
)
parser.add_option(
'--to-dir',
help="Directory to save (and re-use) package",
default=DEFAULT_SAVE_DIR,
)
options, args = parser.parse_args()
# positional arguments are ignored
return options
|
[
"def",
"_parse_args",
"(",
")",
":",
"parser",
"=",
"optparse",
".",
"OptionParser",
"(",
")",
"parser",
".",
"add_option",
"(",
"'--user'",
",",
"dest",
"=",
"'user_install'",
",",
"action",
"=",
"'store_true'",
",",
"default",
"=",
"False",
",",
"help",
"=",
"'install in user site package'",
")",
"parser",
".",
"add_option",
"(",
"'--download-base'",
",",
"dest",
"=",
"'download_base'",
",",
"metavar",
"=",
"\"URL\"",
",",
"default",
"=",
"DEFAULT_URL",
",",
"help",
"=",
"'alternative URL from where to download the setuptools package'",
")",
"parser",
".",
"add_option",
"(",
"'--insecure'",
",",
"dest",
"=",
"'downloader_factory'",
",",
"action",
"=",
"'store_const'",
",",
"const",
"=",
"lambda",
":",
"download_file_insecure",
",",
"default",
"=",
"get_best_downloader",
",",
"help",
"=",
"'Use internal, non-validating downloader'",
")",
"parser",
".",
"add_option",
"(",
"'--version'",
",",
"help",
"=",
"\"Specify which version to download\"",
",",
"default",
"=",
"DEFAULT_VERSION",
",",
")",
"parser",
".",
"add_option",
"(",
"'--to-dir'",
",",
"help",
"=",
"\"Directory to save (and re-use) package\"",
",",
"default",
"=",
"DEFAULT_SAVE_DIR",
",",
")",
"options",
",",
"args",
"=",
"parser",
".",
"parse_args",
"(",
")",
"# positional arguments are ignored",
"return",
"options"
] |
https://github.com/rrwick/Porechop/blob/109e437280436d1ec27e5a5b7a34ffb752176390/ez_setup.py#L368-L394
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/wx.py
|
python
|
inputhook_wx1
|
(context)
|
return 0
|
Run the wx event loop by processing pending events only.
This approach seems to work, but its performance is not great as it
relies on having PyOS_InputHook called regularly.
|
Run the wx event loop by processing pending events only.
|
[
"Run",
"the",
"wx",
"event",
"loop",
"by",
"processing",
"pending",
"events",
"only",
"."
] |
def inputhook_wx1(context):
"""Run the wx event loop by processing pending events only.
This approach seems to work, but its performance is not great as it
relies on having PyOS_InputHook called regularly.
"""
app = wx.GetApp()
if app is not None:
assert wx.Thread_IsMain()
# Make a temporary event loop and process system events until
# there are no more waiting, then allow idle events (which
# will also deal with pending or posted wx events.)
evtloop = wx.EventLoop()
ea = wx.EventLoopActivator(evtloop)
while evtloop.Pending():
evtloop.Dispatch()
app.ProcessIdle()
del ea
return 0
|
[
"def",
"inputhook_wx1",
"(",
"context",
")",
":",
"app",
"=",
"wx",
".",
"GetApp",
"(",
")",
"if",
"app",
"is",
"not",
"None",
":",
"assert",
"wx",
".",
"Thread_IsMain",
"(",
")",
"# Make a temporary event loop and process system events until",
"# there are no more waiting, then allow idle events (which",
"# will also deal with pending or posted wx events.)",
"evtloop",
"=",
"wx",
".",
"EventLoop",
"(",
")",
"ea",
"=",
"wx",
".",
"EventLoopActivator",
"(",
"evtloop",
")",
"while",
"evtloop",
".",
"Pending",
"(",
")",
":",
"evtloop",
".",
"Dispatch",
"(",
")",
"app",
".",
"ProcessIdle",
"(",
")",
"del",
"ea",
"return",
"0"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/ipython/py3/IPython/terminal/pt_inputhooks/wx.py#L28-L47
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/tools/python/src/Lib/fractions.py
|
python
|
Fraction.__new__
|
(cls, numerator=0, denominator=None)
|
return self
|
Constructs a Fraction.
Takes a string like '3/2' or '1.5', another Rational instance, a
numerator/denominator pair, or a float.
Examples
--------
>>> Fraction(10, -8)
Fraction(-5, 4)
>>> Fraction(Fraction(1, 7), 5)
Fraction(1, 35)
>>> Fraction(Fraction(1, 7), Fraction(2, 3))
Fraction(3, 14)
>>> Fraction('314')
Fraction(314, 1)
>>> Fraction('-35/4')
Fraction(-35, 4)
>>> Fraction('3.1415') # conversion from numeric string
Fraction(6283, 2000)
>>> Fraction('-47e-2') # string may include a decimal exponent
Fraction(-47, 100)
>>> Fraction(1.47) # direct construction from float (exact conversion)
Fraction(6620291452234629, 4503599627370496)
>>> Fraction(2.25)
Fraction(9, 4)
>>> Fraction(Decimal('1.47'))
Fraction(147, 100)
|
Constructs a Fraction.
|
[
"Constructs",
"a",
"Fraction",
"."
] |
def __new__(cls, numerator=0, denominator=None):
"""Constructs a Fraction.
Takes a string like '3/2' or '1.5', another Rational instance, a
numerator/denominator pair, or a float.
Examples
--------
>>> Fraction(10, -8)
Fraction(-5, 4)
>>> Fraction(Fraction(1, 7), 5)
Fraction(1, 35)
>>> Fraction(Fraction(1, 7), Fraction(2, 3))
Fraction(3, 14)
>>> Fraction('314')
Fraction(314, 1)
>>> Fraction('-35/4')
Fraction(-35, 4)
>>> Fraction('3.1415') # conversion from numeric string
Fraction(6283, 2000)
>>> Fraction('-47e-2') # string may include a decimal exponent
Fraction(-47, 100)
>>> Fraction(1.47) # direct construction from float (exact conversion)
Fraction(6620291452234629, 4503599627370496)
>>> Fraction(2.25)
Fraction(9, 4)
>>> Fraction(Decimal('1.47'))
Fraction(147, 100)
"""
self = super(Fraction, cls).__new__(cls)
if denominator is None:
if isinstance(numerator, Rational):
self._numerator = numerator.numerator
self._denominator = numerator.denominator
return self
elif isinstance(numerator, float):
# Exact conversion from float
value = Fraction.from_float(numerator)
self._numerator = value._numerator
self._denominator = value._denominator
return self
elif isinstance(numerator, Decimal):
value = Fraction.from_decimal(numerator)
self._numerator = value._numerator
self._denominator = value._denominator
return self
elif isinstance(numerator, basestring):
# Handle construction from strings.
m = _RATIONAL_FORMAT.match(numerator)
if m is None:
raise ValueError('Invalid literal for Fraction: %r' %
numerator)
numerator = int(m.group('num') or '0')
denom = m.group('denom')
if denom:
denominator = int(denom)
else:
denominator = 1
decimal = m.group('decimal')
if decimal:
scale = 10**len(decimal)
numerator = numerator * scale + int(decimal)
denominator *= scale
exp = m.group('exp')
if exp:
exp = int(exp)
if exp >= 0:
numerator *= 10**exp
else:
denominator *= 10**-exp
if m.group('sign') == '-':
numerator = -numerator
else:
raise TypeError("argument should be a string "
"or a Rational instance")
elif (isinstance(numerator, Rational) and
isinstance(denominator, Rational)):
numerator, denominator = (
numerator.numerator * denominator.denominator,
denominator.numerator * numerator.denominator
)
else:
raise TypeError("both arguments should be "
"Rational instances")
if denominator == 0:
raise ZeroDivisionError('Fraction(%s, 0)' % numerator)
g = gcd(numerator, denominator)
self._numerator = numerator // g
self._denominator = denominator // g
return self
|
[
"def",
"__new__",
"(",
"cls",
",",
"numerator",
"=",
"0",
",",
"denominator",
"=",
"None",
")",
":",
"self",
"=",
"super",
"(",
"Fraction",
",",
"cls",
")",
".",
"__new__",
"(",
"cls",
")",
"if",
"denominator",
"is",
"None",
":",
"if",
"isinstance",
"(",
"numerator",
",",
"Rational",
")",
":",
"self",
".",
"_numerator",
"=",
"numerator",
".",
"numerator",
"self",
".",
"_denominator",
"=",
"numerator",
".",
"denominator",
"return",
"self",
"elif",
"isinstance",
"(",
"numerator",
",",
"float",
")",
":",
"# Exact conversion from float",
"value",
"=",
"Fraction",
".",
"from_float",
"(",
"numerator",
")",
"self",
".",
"_numerator",
"=",
"value",
".",
"_numerator",
"self",
".",
"_denominator",
"=",
"value",
".",
"_denominator",
"return",
"self",
"elif",
"isinstance",
"(",
"numerator",
",",
"Decimal",
")",
":",
"value",
"=",
"Fraction",
".",
"from_decimal",
"(",
"numerator",
")",
"self",
".",
"_numerator",
"=",
"value",
".",
"_numerator",
"self",
".",
"_denominator",
"=",
"value",
".",
"_denominator",
"return",
"self",
"elif",
"isinstance",
"(",
"numerator",
",",
"basestring",
")",
":",
"# Handle construction from strings.",
"m",
"=",
"_RATIONAL_FORMAT",
".",
"match",
"(",
"numerator",
")",
"if",
"m",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'Invalid literal for Fraction: %r'",
"%",
"numerator",
")",
"numerator",
"=",
"int",
"(",
"m",
".",
"group",
"(",
"'num'",
")",
"or",
"'0'",
")",
"denom",
"=",
"m",
".",
"group",
"(",
"'denom'",
")",
"if",
"denom",
":",
"denominator",
"=",
"int",
"(",
"denom",
")",
"else",
":",
"denominator",
"=",
"1",
"decimal",
"=",
"m",
".",
"group",
"(",
"'decimal'",
")",
"if",
"decimal",
":",
"scale",
"=",
"10",
"**",
"len",
"(",
"decimal",
")",
"numerator",
"=",
"numerator",
"*",
"scale",
"+",
"int",
"(",
"decimal",
")",
"denominator",
"*=",
"scale",
"exp",
"=",
"m",
".",
"group",
"(",
"'exp'",
")",
"if",
"exp",
":",
"exp",
"=",
"int",
"(",
"exp",
")",
"if",
"exp",
">=",
"0",
":",
"numerator",
"*=",
"10",
"**",
"exp",
"else",
":",
"denominator",
"*=",
"10",
"**",
"-",
"exp",
"if",
"m",
".",
"group",
"(",
"'sign'",
")",
"==",
"'-'",
":",
"numerator",
"=",
"-",
"numerator",
"else",
":",
"raise",
"TypeError",
"(",
"\"argument should be a string \"",
"\"or a Rational instance\"",
")",
"elif",
"(",
"isinstance",
"(",
"numerator",
",",
"Rational",
")",
"and",
"isinstance",
"(",
"denominator",
",",
"Rational",
")",
")",
":",
"numerator",
",",
"denominator",
"=",
"(",
"numerator",
".",
"numerator",
"*",
"denominator",
".",
"denominator",
",",
"denominator",
".",
"numerator",
"*",
"numerator",
".",
"denominator",
")",
"else",
":",
"raise",
"TypeError",
"(",
"\"both arguments should be \"",
"\"Rational instances\"",
")",
"if",
"denominator",
"==",
"0",
":",
"raise",
"ZeroDivisionError",
"(",
"'Fraction(%s, 0)'",
"%",
"numerator",
")",
"g",
"=",
"gcd",
"(",
"numerator",
",",
"denominator",
")",
"self",
".",
"_numerator",
"=",
"numerator",
"//",
"g",
"self",
".",
"_denominator",
"=",
"denominator",
"//",
"g",
"return",
"self"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/fractions.py#L68-L166
|
|
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/x86/toolchain/lib/python2.7/chunk.py
|
python
|
Chunk.getsize
|
(self)
|
return self.chunksize
|
Return the size of the current chunk.
|
Return the size of the current chunk.
|
[
"Return",
"the",
"size",
"of",
"the",
"current",
"chunk",
"."
] |
def getsize(self):
"""Return the size of the current chunk."""
return self.chunksize
|
[
"def",
"getsize",
"(",
"self",
")",
":",
"return",
"self",
".",
"chunksize"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/chunk.py#L82-L84
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/core/arrays/numpy_.py
|
python
|
PandasDtype.itemsize
|
(self)
|
return self._dtype.itemsize
|
The element size of this data-type object.
|
The element size of this data-type object.
|
[
"The",
"element",
"size",
"of",
"this",
"data",
"-",
"type",
"object",
"."
] |
def itemsize(self):
"""The element size of this data-type object."""
return self._dtype.itemsize
|
[
"def",
"itemsize",
"(",
"self",
")",
":",
"return",
"self",
".",
"_dtype",
".",
"itemsize"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/core/arrays/numpy_.py#L97-L99
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/targets/dictimpl.py
|
python
|
impl_dict
|
(context, builder, sig, args)
|
return context.compile_internal(builder, call_ctor, sig, args)
|
The `dict()` implementation simply forwards the work to `Dict.empty()`.
|
The `dict()` implementation simply forwards the work to `Dict.empty()`.
|
[
"The",
"dict",
"()",
"implementation",
"simply",
"forwards",
"the",
"work",
"to",
"Dict",
".",
"empty",
"()",
"."
] |
def impl_dict(context, builder, sig, args):
"""
The `dict()` implementation simply forwards the work to `Dict.empty()`.
"""
from numba.typed import Dict
dicttype = sig.return_type
kt, vt = dicttype.key_type, dicttype.value_type
def call_ctor():
return Dict.empty(kt, vt)
return context.compile_internal(builder, call_ctor, sig, args)
|
[
"def",
"impl_dict",
"(",
"context",
",",
"builder",
",",
"sig",
",",
"args",
")",
":",
"from",
"numba",
".",
"typed",
"import",
"Dict",
"dicttype",
"=",
"sig",
".",
"return_type",
"kt",
",",
"vt",
"=",
"dicttype",
".",
"key_type",
",",
"dicttype",
".",
"value_type",
"def",
"call_ctor",
"(",
")",
":",
"return",
"Dict",
".",
"empty",
"(",
"kt",
",",
"vt",
")",
"return",
"context",
".",
"compile_internal",
"(",
"builder",
",",
"call_ctor",
",",
"sig",
",",
"args",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/targets/dictimpl.py#L8-L20
|
|
Xilinx/Vitis-AI
|
fc74d404563d9951b57245443c73bef389f3657f
|
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/saved_model/loader_impl.py
|
python
|
SavedModelLoader.load
|
(self, sess, tags, import_scope=None, **saver_kwargs)
|
return self.get_meta_graph_def_from_tags(tags)
|
Load the MetaGraphDef graph and restore variable values into the session.
Args:
sess: tf.compat.v1.Session to restore variable values.
tags: a set of string tags identifying a MetaGraphDef.
import_scope: Optional `string` -- if specified, prepend this string
followed by '/' to all loaded tensor names. This scope is applied to
tensor instances loaded into the passed session, but it is *not* written
through to the static `MetaGraphDef` protocol buffer that is returned.
**saver_kwargs: keyword arguments to pass to tf.train.import_meta_graph.
Returns:
`MetagraphDef` proto of the graph that was loaded.
|
Load the MetaGraphDef graph and restore variable values into the session.
|
[
"Load",
"the",
"MetaGraphDef",
"graph",
"and",
"restore",
"variable",
"values",
"into",
"the",
"session",
"."
] |
def load(self, sess, tags, import_scope=None, **saver_kwargs):
"""Load the MetaGraphDef graph and restore variable values into the session.
Args:
sess: tf.compat.v1.Session to restore variable values.
tags: a set of string tags identifying a MetaGraphDef.
import_scope: Optional `string` -- if specified, prepend this string
followed by '/' to all loaded tensor names. This scope is applied to
tensor instances loaded into the passed session, but it is *not* written
through to the static `MetaGraphDef` protocol buffer that is returned.
**saver_kwargs: keyword arguments to pass to tf.train.import_meta_graph.
Returns:
`MetagraphDef` proto of the graph that was loaded.
"""
with sess.graph.as_default():
saver, _ = self.load_graph(sess.graph, tags, import_scope,
**saver_kwargs)
self.restore_variables(sess, saver, import_scope)
self.run_init_ops(sess, tags, import_scope)
return self.get_meta_graph_def_from_tags(tags)
|
[
"def",
"load",
"(",
"self",
",",
"sess",
",",
"tags",
",",
"import_scope",
"=",
"None",
",",
"*",
"*",
"saver_kwargs",
")",
":",
"with",
"sess",
".",
"graph",
".",
"as_default",
"(",
")",
":",
"saver",
",",
"_",
"=",
"self",
".",
"load_graph",
"(",
"sess",
".",
"graph",
",",
"tags",
",",
"import_scope",
",",
"*",
"*",
"saver_kwargs",
")",
"self",
".",
"restore_variables",
"(",
"sess",
",",
"saver",
",",
"import_scope",
")",
"self",
".",
"run_init_ops",
"(",
"sess",
",",
"tags",
",",
"import_scope",
")",
"return",
"self",
".",
"get_meta_graph_def_from_tags",
"(",
"tags",
")"
] |
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/saved_model/loader_impl.py#L405-L425
|
|
apple/turicreate
|
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
|
deps/src/libxml2-2.9.1/python/libxml2class.py
|
python
|
xmlDoc.newDocTextLen
|
(self, content, len)
|
return __tmp
|
Creation of a new text node with an extra content length
parameter. The text node pertain to a given document.
|
Creation of a new text node with an extra content length
parameter. The text node pertain to a given document.
|
[
"Creation",
"of",
"a",
"new",
"text",
"node",
"with",
"an",
"extra",
"content",
"length",
"parameter",
".",
"The",
"text",
"node",
"pertain",
"to",
"a",
"given",
"document",
"."
] |
def newDocTextLen(self, content, len):
"""Creation of a new text node with an extra content length
parameter. The text node pertain to a given document. """
ret = libxml2mod.xmlNewDocTextLen(self._o, content, len)
if ret is None:raise treeError('xmlNewDocTextLen() failed')
__tmp = xmlNode(_obj=ret)
return __tmp
|
[
"def",
"newDocTextLen",
"(",
"self",
",",
"content",
",",
"len",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlNewDocTextLen",
"(",
"self",
".",
"_o",
",",
"content",
",",
"len",
")",
"if",
"ret",
"is",
"None",
":",
"raise",
"treeError",
"(",
"'xmlNewDocTextLen() failed'",
")",
"__tmp",
"=",
"xmlNode",
"(",
"_obj",
"=",
"ret",
")",
"return",
"__tmp"
] |
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2class.py#L3601-L3607
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/setuptools/package_index.py
|
python
|
find_external_links
|
(url, page)
|
Find rel="homepage" and rel="download" links in `page`, yielding URLs
|
Find rel="homepage" and rel="download" links in `page`, yielding URLs
|
[
"Find",
"rel",
"=",
"homepage",
"and",
"rel",
"=",
"download",
"links",
"in",
"page",
"yielding",
"URLs"
] |
def find_external_links(url, page):
"""Find rel="homepage" and rel="download" links in `page`, yielding URLs"""
for match in REL.finditer(page):
tag, rel = match.groups()
rels = set(map(str.strip, rel.lower().split(',')))
if 'homepage' in rels or 'download' in rels:
for match in HREF.finditer(tag):
yield urllib.parse.urljoin(url, htmldecode(match.group(1)))
for tag in ("<th>Home Page", "<th>Download URL"):
pos = page.find(tag)
if pos != -1:
match = HREF.search(page, pos)
if match:
yield urllib.parse.urljoin(url, htmldecode(match.group(1)))
|
[
"def",
"find_external_links",
"(",
"url",
",",
"page",
")",
":",
"for",
"match",
"in",
"REL",
".",
"finditer",
"(",
"page",
")",
":",
"tag",
",",
"rel",
"=",
"match",
".",
"groups",
"(",
")",
"rels",
"=",
"set",
"(",
"map",
"(",
"str",
".",
"strip",
",",
"rel",
".",
"lower",
"(",
")",
".",
"split",
"(",
"','",
")",
")",
")",
"if",
"'homepage'",
"in",
"rels",
"or",
"'download'",
"in",
"rels",
":",
"for",
"match",
"in",
"HREF",
".",
"finditer",
"(",
"tag",
")",
":",
"yield",
"urllib",
".",
"parse",
".",
"urljoin",
"(",
"url",
",",
"htmldecode",
"(",
"match",
".",
"group",
"(",
"1",
")",
")",
")",
"for",
"tag",
"in",
"(",
"\"<th>Home Page\"",
",",
"\"<th>Download URL\"",
")",
":",
"pos",
"=",
"page",
".",
"find",
"(",
"tag",
")",
"if",
"pos",
"!=",
"-",
"1",
":",
"match",
"=",
"HREF",
".",
"search",
"(",
"page",
",",
"pos",
")",
"if",
"match",
":",
"yield",
"urllib",
".",
"parse",
".",
"urljoin",
"(",
"url",
",",
"htmldecode",
"(",
"match",
".",
"group",
"(",
"1",
")",
")",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/setuptools/package_index.py#L223-L238
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/lib2to3/pytree.py
|
python
|
Node.clone
|
(self)
|
return Node(self.type, [ch.clone() for ch in self.children],
fixers_applied=self.fixers_applied)
|
Return a cloned (deep) copy of self.
|
Return a cloned (deep) copy of self.
|
[
"Return",
"a",
"cloned",
"(",
"deep",
")",
"copy",
"of",
"self",
"."
] |
def clone(self):
"""Return a cloned (deep) copy of self."""
return Node(self.type, [ch.clone() for ch in self.children],
fixers_applied=self.fixers_applied)
|
[
"def",
"clone",
"(",
"self",
")",
":",
"return",
"Node",
"(",
"self",
".",
"type",
",",
"[",
"ch",
".",
"clone",
"(",
")",
"for",
"ch",
"in",
"self",
".",
"children",
"]",
",",
"fixers_applied",
"=",
"self",
".",
"fixers_applied",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/lib2to3/pytree.py#L257-L260
|
|
mapsme/omim
|
1892903b63f2c85b16ed4966d21fe76aba06b9ba
|
tools/python/stylesheet/webcolors/webcolors.py
|
python
|
rgb_percent_to_rgb
|
(rgb_percent_triplet)
|
return tuple(map(_percent_to_integer, rgb_percent_triplet))
|
Convert a 3-tuple of percentages, suitable for use in an ``rgb()``
color triplet, to a 3-tuple of integers suitable for use in
representing that color.
Some precision may be lost in this conversion. See the note
regarding precision for ``rgb_to_rgb_percent()`` for details;
generally speaking, the following is true for any 3-tuple ``t`` of
integers in the range 0...255 inclusive::
t == rgb_percent_to_rgb(rgb_to_rgb_percent(t))
Examples:
>>> rgb_percent_to_rgb(('100%', '100%', '100%'))
(255, 255, 255)
>>> rgb_percent_to_rgb(('0%', '0%', '50%'))
(0, 0, 128)
>>> rgb_percent_to_rgb(('25%', '12.5%', '6.25%'))
(64, 32, 16)
>>> rgb_percent_to_rgb(('12.94%', '21.96%', '75.29%'))
(33, 56, 192)
|
Convert a 3-tuple of percentages, suitable for use in an ``rgb()``
color triplet, to a 3-tuple of integers suitable for use in
representing that color.
|
[
"Convert",
"a",
"3",
"-",
"tuple",
"of",
"percentages",
"suitable",
"for",
"use",
"in",
"an",
"rgb",
"()",
"color",
"triplet",
"to",
"a",
"3",
"-",
"tuple",
"of",
"integers",
"suitable",
"for",
"use",
"in",
"representing",
"that",
"color",
"."
] |
def rgb_percent_to_rgb(rgb_percent_triplet):
"""
Convert a 3-tuple of percentages, suitable for use in an ``rgb()``
color triplet, to a 3-tuple of integers suitable for use in
representing that color.
Some precision may be lost in this conversion. See the note
regarding precision for ``rgb_to_rgb_percent()`` for details;
generally speaking, the following is true for any 3-tuple ``t`` of
integers in the range 0...255 inclusive::
t == rgb_percent_to_rgb(rgb_to_rgb_percent(t))
Examples:
>>> rgb_percent_to_rgb(('100%', '100%', '100%'))
(255, 255, 255)
>>> rgb_percent_to_rgb(('0%', '0%', '50%'))
(0, 0, 128)
>>> rgb_percent_to_rgb(('25%', '12.5%', '6.25%'))
(64, 32, 16)
>>> rgb_percent_to_rgb(('12.94%', '21.96%', '75.29%'))
(33, 56, 192)
"""
return tuple(map(_percent_to_integer, rgb_percent_triplet))
|
[
"def",
"rgb_percent_to_rgb",
"(",
"rgb_percent_triplet",
")",
":",
"return",
"tuple",
"(",
"map",
"(",
"_percent_to_integer",
",",
"rgb_percent_triplet",
")",
")"
] |
https://github.com/mapsme/omim/blob/1892903b63f2c85b16ed4966d21fe76aba06b9ba/tools/python/stylesheet/webcolors/webcolors.py#L818-L843
|
|
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/plistlib.py
|
python
|
Plist.write
|
(self, pathOrFile)
|
Deprecated. Use the writePlist() function instead.
|
Deprecated. Use the writePlist() function instead.
|
[
"Deprecated",
".",
"Use",
"the",
"writePlist",
"()",
"function",
"instead",
"."
] |
def write(self, pathOrFile):
"""Deprecated. Use the writePlist() function instead."""
writePlist(self, pathOrFile)
|
[
"def",
"write",
"(",
"self",
",",
"pathOrFile",
")",
":",
"writePlist",
"(",
"self",
",",
"pathOrFile",
")"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/plistlib.py#L351-L353
|
||
mingchen/protobuf-ios
|
0958df34558cd54cb7b6e6ca5c8855bf3d475046
|
compiler/python/mox.py
|
python
|
MockMethod._VerifyMethodCall
|
(self)
|
return expected
|
Verify the called method is expected.
This can be an ordered method, or part of an unordered set.
Returns:
The expected mock method.
Raises:
UnexpectedMethodCall if the method called was not expected.
|
Verify the called method is expected.
|
[
"Verify",
"the",
"called",
"method",
"is",
"expected",
"."
] |
def _VerifyMethodCall(self):
"""Verify the called method is expected.
This can be an ordered method, or part of an unordered set.
Returns:
The expected mock method.
Raises:
UnexpectedMethodCall if the method called was not expected.
"""
expected = self._PopNextMethod()
# Loop here, because we might have a MethodGroup followed by another
# group.
while isinstance(expected, MethodGroup):
expected, method = expected.MethodCalled(self)
if method is not None:
return method
# This is a mock method, so just check equality.
if expected != self:
raise UnexpectedMethodCallError(self, expected)
return expected
|
[
"def",
"_VerifyMethodCall",
"(",
"self",
")",
":",
"expected",
"=",
"self",
".",
"_PopNextMethod",
"(",
")",
"# Loop here, because we might have a MethodGroup followed by another",
"# group.",
"while",
"isinstance",
"(",
"expected",
",",
"MethodGroup",
")",
":",
"expected",
",",
"method",
"=",
"expected",
".",
"MethodCalled",
"(",
"self",
")",
"if",
"method",
"is",
"not",
"None",
":",
"return",
"method",
"# This is a mock method, so just check equality.",
"if",
"expected",
"!=",
"self",
":",
"raise",
"UnexpectedMethodCallError",
"(",
"self",
",",
"expected",
")",
"return",
"expected"
] |
https://github.com/mingchen/protobuf-ios/blob/0958df34558cd54cb7b6e6ca5c8855bf3d475046/compiler/python/mox.py#L588-L613
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/_misc.py
|
python
|
Log_GetRepetitionCounting
|
(*args)
|
return _misc_.Log_GetRepetitionCounting(*args)
|
Log_GetRepetitionCounting() -> bool
|
Log_GetRepetitionCounting() -> bool
|
[
"Log_GetRepetitionCounting",
"()",
"-",
">",
"bool"
] |
def Log_GetRepetitionCounting(*args):
"""Log_GetRepetitionCounting() -> bool"""
return _misc_.Log_GetRepetitionCounting(*args)
|
[
"def",
"Log_GetRepetitionCounting",
"(",
"*",
"args",
")",
":",
"return",
"_misc_",
".",
"Log_GetRepetitionCounting",
"(",
"*",
"args",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_misc.py#L1692-L1694
|
|
apple/swift-lldb
|
d74be846ef3e62de946df343e8c234bde93a8912
|
scripts/Python/static-binding/lldb.py
|
python
|
SBThread.StepUsingScriptedThreadPlan
|
(self, *args)
|
return _lldb.SBThread_StepUsingScriptedThreadPlan(self, *args)
|
StepUsingScriptedThreadPlan(SBThread self, char const * script_class_name) -> SBError
StepUsingScriptedThreadPlan(SBThread self, char const * script_class_name, bool resume_immediately) -> SBError
|
StepUsingScriptedThreadPlan(SBThread self, char const * script_class_name) -> SBError
StepUsingScriptedThreadPlan(SBThread self, char const * script_class_name, bool resume_immediately) -> SBError
|
[
"StepUsingScriptedThreadPlan",
"(",
"SBThread",
"self",
"char",
"const",
"*",
"script_class_name",
")",
"-",
">",
"SBError",
"StepUsingScriptedThreadPlan",
"(",
"SBThread",
"self",
"char",
"const",
"*",
"script_class_name",
"bool",
"resume_immediately",
")",
"-",
">",
"SBError"
] |
def StepUsingScriptedThreadPlan(self, *args):
"""
StepUsingScriptedThreadPlan(SBThread self, char const * script_class_name) -> SBError
StepUsingScriptedThreadPlan(SBThread self, char const * script_class_name, bool resume_immediately) -> SBError
"""
return _lldb.SBThread_StepUsingScriptedThreadPlan(self, *args)
|
[
"def",
"StepUsingScriptedThreadPlan",
"(",
"self",
",",
"*",
"args",
")",
":",
"return",
"_lldb",
".",
"SBThread_StepUsingScriptedThreadPlan",
"(",
"self",
",",
"*",
"args",
")"
] |
https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/scripts/Python/static-binding/lldb.py#L11742-L11747
|
|
facebookresearch/ELF
|
1f790173095cd910976d9f651b80beb872ec5d12
|
vendor/pybind11/tools/clang/cindex.py
|
python
|
Cursor.enum_value
|
(self)
|
return self._enum_value
|
Return the value of an enum constant.
|
Return the value of an enum constant.
|
[
"Return",
"the",
"value",
"of",
"an",
"enum",
"constant",
"."
] |
def enum_value(self):
"""Return the value of an enum constant."""
if not hasattr(self, '_enum_value'):
assert self.kind == CursorKind.ENUM_CONSTANT_DECL
# Figure out the underlying type of the enum to know if it
# is a signed or unsigned quantity.
underlying_type = self.type
if underlying_type.kind == TypeKind.ENUM:
underlying_type = underlying_type.get_declaration().enum_type
if underlying_type.kind in (TypeKind.CHAR_U,
TypeKind.UCHAR,
TypeKind.CHAR16,
TypeKind.CHAR32,
TypeKind.USHORT,
TypeKind.UINT,
TypeKind.ULONG,
TypeKind.ULONGLONG,
TypeKind.UINT128):
self._enum_value = \
conf.lib.clang_getEnumConstantDeclUnsignedValue(self)
else:
self._enum_value = conf.lib.clang_getEnumConstantDeclValue(self)
return self._enum_value
|
[
"def",
"enum_value",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'_enum_value'",
")",
":",
"assert",
"self",
".",
"kind",
"==",
"CursorKind",
".",
"ENUM_CONSTANT_DECL",
"# Figure out the underlying type of the enum to know if it",
"# is a signed or unsigned quantity.",
"underlying_type",
"=",
"self",
".",
"type",
"if",
"underlying_type",
".",
"kind",
"==",
"TypeKind",
".",
"ENUM",
":",
"underlying_type",
"=",
"underlying_type",
".",
"get_declaration",
"(",
")",
".",
"enum_type",
"if",
"underlying_type",
".",
"kind",
"in",
"(",
"TypeKind",
".",
"CHAR_U",
",",
"TypeKind",
".",
"UCHAR",
",",
"TypeKind",
".",
"CHAR16",
",",
"TypeKind",
".",
"CHAR32",
",",
"TypeKind",
".",
"USHORT",
",",
"TypeKind",
".",
"UINT",
",",
"TypeKind",
".",
"ULONG",
",",
"TypeKind",
".",
"ULONGLONG",
",",
"TypeKind",
".",
"UINT128",
")",
":",
"self",
".",
"_enum_value",
"=",
"conf",
".",
"lib",
".",
"clang_getEnumConstantDeclUnsignedValue",
"(",
"self",
")",
"else",
":",
"self",
".",
"_enum_value",
"=",
"conf",
".",
"lib",
".",
"clang_getEnumConstantDeclValue",
"(",
"self",
")",
"return",
"self",
".",
"_enum_value"
] |
https://github.com/facebookresearch/ELF/blob/1f790173095cd910976d9f651b80beb872ec5d12/vendor/pybind11/tools/clang/cindex.py#L1531-L1553
|
|
krishauser/Klampt
|
972cc83ea5befac3f653c1ba20f80155768ad519
|
Python/python2_version/klampt/src/robotsim.py
|
python
|
RobotModelLink.getAngularAcceleration
|
(self, ddq)
|
return _robotsim.RobotModelLink_getAngularAcceleration(self, ddq)
|
getAngularAcceleration(RobotModelLink self, doubleVector ddq)
Returns the angular acceleration of the link given the robot's current joint
configuration and velocities, and the joint accelerations ddq.
Returns:
(list of 3 floats): the angular acceleration of the link, in
world coordinates.
|
getAngularAcceleration(RobotModelLink self, doubleVector ddq)
|
[
"getAngularAcceleration",
"(",
"RobotModelLink",
"self",
"doubleVector",
"ddq",
")"
] |
def getAngularAcceleration(self, ddq):
"""
getAngularAcceleration(RobotModelLink self, doubleVector ddq)
Returns the angular acceleration of the link given the robot's current joint
configuration and velocities, and the joint accelerations ddq.
Returns:
(list of 3 floats): the angular acceleration of the link, in
world coordinates.
"""
return _robotsim.RobotModelLink_getAngularAcceleration(self, ddq)
|
[
"def",
"getAngularAcceleration",
"(",
"self",
",",
"ddq",
")",
":",
"return",
"_robotsim",
".",
"RobotModelLink_getAngularAcceleration",
"(",
"self",
",",
"ddq",
")"
] |
https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/python2_version/klampt/src/robotsim.py#L4166-L4181
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/windows/Lib/site-packages/urllib3/response.py
|
python
|
HTTPResponse._init_decoder
|
(self)
|
Set-up the _decoder attribute if necessary.
|
Set-up the _decoder attribute if necessary.
|
[
"Set",
"-",
"up",
"the",
"_decoder",
"attribute",
"if",
"necessary",
"."
] |
def _init_decoder(self):
"""
Set-up the _decoder attribute if necessary.
"""
# Note: content-encoding value should be case-insensitive, per RFC 7230
# Section 3.2
content_encoding = self.headers.get("content-encoding", "").lower()
if self._decoder is None:
if content_encoding in self.CONTENT_DECODERS:
self._decoder = _get_decoder(content_encoding)
elif "," in content_encoding:
encodings = [
e.strip()
for e in content_encoding.split(",")
if e.strip() in self.CONTENT_DECODERS
]
if len(encodings):
self._decoder = _get_decoder(content_encoding)
|
[
"def",
"_init_decoder",
"(",
"self",
")",
":",
"# Note: content-encoding value should be case-insensitive, per RFC 7230",
"# Section 3.2",
"content_encoding",
"=",
"self",
".",
"headers",
".",
"get",
"(",
"\"content-encoding\"",
",",
"\"\"",
")",
".",
"lower",
"(",
")",
"if",
"self",
".",
"_decoder",
"is",
"None",
":",
"if",
"content_encoding",
"in",
"self",
".",
"CONTENT_DECODERS",
":",
"self",
".",
"_decoder",
"=",
"_get_decoder",
"(",
"content_encoding",
")",
"elif",
"\",\"",
"in",
"content_encoding",
":",
"encodings",
"=",
"[",
"e",
".",
"strip",
"(",
")",
"for",
"e",
"in",
"content_encoding",
".",
"split",
"(",
"\",\"",
")",
"if",
"e",
".",
"strip",
"(",
")",
"in",
"self",
".",
"CONTENT_DECODERS",
"]",
"if",
"len",
"(",
"encodings",
")",
":",
"self",
".",
"_decoder",
"=",
"_get_decoder",
"(",
"content_encoding",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/urllib3/response.py#L356-L373
|
||
windystrife/UnrealEngine_NVIDIAGameWorks
|
b50e6338a7c5b26374d66306ebc7807541ff815e
|
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/email/encoders.py
|
python
|
encode_base64
|
(msg)
|
Encode the message's payload in Base64.
Also, add an appropriate Content-Transfer-Encoding header.
|
Encode the message's payload in Base64.
|
[
"Encode",
"the",
"message",
"s",
"payload",
"in",
"Base64",
"."
] |
def encode_base64(msg):
"""Encode the message's payload in Base64.
Also, add an appropriate Content-Transfer-Encoding header.
"""
orig = msg.get_payload()
encdata = _bencode(orig)
msg.set_payload(encdata)
msg['Content-Transfer-Encoding'] = 'base64'
|
[
"def",
"encode_base64",
"(",
"msg",
")",
":",
"orig",
"=",
"msg",
".",
"get_payload",
"(",
")",
"encdata",
"=",
"_bencode",
"(",
"orig",
")",
"msg",
".",
"set_payload",
"(",
"encdata",
")",
"msg",
"[",
"'Content-Transfer-Encoding'",
"]",
"=",
"'base64'"
] |
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/email/encoders.py#L39-L47
|
||
GJDuck/LowFat
|
ecf6a0f0fa1b73a27a626cf493cc39e477b6faea
|
llvm-4.0.0.src/projects/compiler-rt/lib/sanitizer_common/scripts/cpplint.py
|
python
|
_DropCommonSuffixes
|
(filename)
|
return os.path.splitext(filename)[0]
|
Drops common suffixes like _test.cc or -inl.h from filename.
For example:
>>> _DropCommonSuffixes('foo/foo-inl.h')
'foo/foo'
>>> _DropCommonSuffixes('foo/bar/foo.cc')
'foo/bar/foo'
>>> _DropCommonSuffixes('foo/foo_internal.h')
'foo/foo'
>>> _DropCommonSuffixes('foo/foo_unusualinternal.h')
'foo/foo_unusualinternal'
Args:
filename: The input filename.
Returns:
The filename with the common suffix removed.
|
Drops common suffixes like _test.cc or -inl.h from filename.
|
[
"Drops",
"common",
"suffixes",
"like",
"_test",
".",
"cc",
"or",
"-",
"inl",
".",
"h",
"from",
"filename",
"."
] |
def _DropCommonSuffixes(filename):
"""Drops common suffixes like _test.cc or -inl.h from filename.
For example:
>>> _DropCommonSuffixes('foo/foo-inl.h')
'foo/foo'
>>> _DropCommonSuffixes('foo/bar/foo.cc')
'foo/bar/foo'
>>> _DropCommonSuffixes('foo/foo_internal.h')
'foo/foo'
>>> _DropCommonSuffixes('foo/foo_unusualinternal.h')
'foo/foo_unusualinternal'
Args:
filename: The input filename.
Returns:
The filename with the common suffix removed.
"""
for suffix in ('test.cc', 'regtest.cc', 'unittest.cc',
'inl.h', 'impl.h', 'internal.h'):
if (filename.endswith(suffix) and len(filename) > len(suffix) and
filename[-len(suffix) - 1] in ('-', '_')):
return filename[:-len(suffix) - 1]
return os.path.splitext(filename)[0]
|
[
"def",
"_DropCommonSuffixes",
"(",
"filename",
")",
":",
"for",
"suffix",
"in",
"(",
"'test.cc'",
",",
"'regtest.cc'",
",",
"'unittest.cc'",
",",
"'inl.h'",
",",
"'impl.h'",
",",
"'internal.h'",
")",
":",
"if",
"(",
"filename",
".",
"endswith",
"(",
"suffix",
")",
"and",
"len",
"(",
"filename",
")",
">",
"len",
"(",
"suffix",
")",
"and",
"filename",
"[",
"-",
"len",
"(",
"suffix",
")",
"-",
"1",
"]",
"in",
"(",
"'-'",
",",
"'_'",
")",
")",
":",
"return",
"filename",
"[",
":",
"-",
"len",
"(",
"suffix",
")",
"-",
"1",
"]",
"return",
"os",
".",
"path",
".",
"splitext",
"(",
"filename",
")",
"[",
"0",
"]"
] |
https://github.com/GJDuck/LowFat/blob/ecf6a0f0fa1b73a27a626cf493cc39e477b6faea/llvm-4.0.0.src/projects/compiler-rt/lib/sanitizer_common/scripts/cpplint.py#L2913-L2937
|
|
BitMEX/api-connectors
|
37a3a5b806ad5d0e0fc975ab86d9ed43c3bcd812
|
auto-generated/python/swagger_client/models/position.py
|
python
|
Position.pos_init
|
(self)
|
return self._pos_init
|
Gets the pos_init of this Position. # noqa: E501
:return: The pos_init of this Position. # noqa: E501
:rtype: float
|
Gets the pos_init of this Position. # noqa: E501
|
[
"Gets",
"the",
"pos_init",
"of",
"this",
"Position",
".",
"#",
"noqa",
":",
"E501"
] |
def pos_init(self):
"""Gets the pos_init of this Position. # noqa: E501
:return: The pos_init of this Position. # noqa: E501
:rtype: float
"""
return self._pos_init
|
[
"def",
"pos_init",
"(",
"self",
")",
":",
"return",
"self",
".",
"_pos_init"
] |
https://github.com/BitMEX/api-connectors/blob/37a3a5b806ad5d0e0fc975ab86d9ed43c3bcd812/auto-generated/python/swagger_client/models/position.py#L1596-L1603
|
|
Xilinx/Vitis-AI
|
fc74d404563d9951b57245443c73bef389f3657f
|
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/debug/lib/grpc_debug_server.py
|
python
|
EventListenerBaseStreamHandler.on_value_event
|
(self, event)
|
Callback for Event proto received through the gRPC stream.
This Event proto carries a Tensor in its summary.value[0] field.
Args:
event: The Event proto from the stream to be processed.
|
Callback for Event proto received through the gRPC stream.
|
[
"Callback",
"for",
"Event",
"proto",
"received",
"through",
"the",
"gRPC",
"stream",
"."
] |
def on_value_event(self, event):
"""Callback for Event proto received through the gRPC stream.
This Event proto carries a Tensor in its summary.value[0] field.
Args:
event: The Event proto from the stream to be processed.
"""
raise NotImplementedError(
"on_value_event() is not implemented in the base servicer class")
|
[
"def",
"on_value_event",
"(",
"self",
",",
"event",
")",
":",
"raise",
"NotImplementedError",
"(",
"\"on_value_event() is not implemented in the base servicer class\"",
")"
] |
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/debug/lib/grpc_debug_server.py#L91-L100
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.