nwo
stringlengths 5
86
| sha
stringlengths 40
40
| path
stringlengths 4
189
| language
stringclasses 1
value | identifier
stringlengths 1
94
| parameters
stringlengths 2
4.03k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
11.5k
| docstring
stringlengths 1
33.2k
| docstring_summary
stringlengths 0
5.15k
| docstring_tokens
list | function
stringlengths 34
151k
| function_tokens
list | url
stringlengths 90
278
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
gem5/gem5
|
141cc37c2d4b93959d4c249b8f7e6a8b2ef75338
|
src/python/gem5/simulate/simulator.py
|
python
|
Simulator.get_current_tick
|
(self)
|
return m5.curTick()
|
Returns the current tick.
|
Returns the current tick.
|
[
"Returns",
"the",
"current",
"tick",
"."
] |
def get_current_tick(self) -> int:
"""
Returns the current tick.
"""
return m5.curTick()
|
[
"def",
"get_current_tick",
"(",
"self",
")",
"->",
"int",
":",
"return",
"m5",
".",
"curTick",
"(",
")"
] |
https://github.com/gem5/gem5/blob/141cc37c2d4b93959d4c249b8f7e6a8b2ef75338/src/python/gem5/simulate/simulator.py#L231-L235
|
|
BlzFans/wke
|
b0fa21158312e40c5fbd84682d643022b6c34a93
|
cygwin/lib/python2.6/imaplib.py
|
python
|
IMAP4.shutdown
|
(self)
|
Close I/O established in "open".
|
Close I/O established in "open".
|
[
"Close",
"I",
"/",
"O",
"established",
"in",
"open",
"."
] |
def shutdown(self):
"""Close I/O established in "open"."""
self.file.close()
self.sock.close()
|
[
"def",
"shutdown",
"(",
"self",
")",
":",
"self",
".",
"file",
".",
"close",
"(",
")",
"self",
".",
"sock",
".",
"close",
"(",
")"
] |
https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/imaplib.py#L248-L251
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/windows/Lib/webbrowser.py
|
python
|
get
|
(using=None)
|
Return a browser launcher instance appropriate for the environment.
|
Return a browser launcher instance appropriate for the environment.
|
[
"Return",
"a",
"browser",
"launcher",
"instance",
"appropriate",
"for",
"the",
"environment",
"."
] |
def get(using=None):
"""Return a browser launcher instance appropriate for the environment."""
if _tryorder is None:
with _lock:
if _tryorder is None:
register_standard_browsers()
if using is not None:
alternatives = [using]
else:
alternatives = _tryorder
for browser in alternatives:
if '%s' in browser:
# User gave us a command line, split it into name and args
browser = shlex.split(browser)
if browser[-1] == '&':
return BackgroundBrowser(browser[:-1])
else:
return GenericBrowser(browser)
else:
# User gave us a browser name or path.
try:
command = _browsers[browser.lower()]
except KeyError:
command = _synthesize(browser)
if command[1] is not None:
return command[1]
elif command[0] is not None:
return command[0]()
raise Error("could not locate runnable browser")
|
[
"def",
"get",
"(",
"using",
"=",
"None",
")",
":",
"if",
"_tryorder",
"is",
"None",
":",
"with",
"_lock",
":",
"if",
"_tryorder",
"is",
"None",
":",
"register_standard_browsers",
"(",
")",
"if",
"using",
"is",
"not",
"None",
":",
"alternatives",
"=",
"[",
"using",
"]",
"else",
":",
"alternatives",
"=",
"_tryorder",
"for",
"browser",
"in",
"alternatives",
":",
"if",
"'%s'",
"in",
"browser",
":",
"# User gave us a command line, split it into name and args",
"browser",
"=",
"shlex",
".",
"split",
"(",
"browser",
")",
"if",
"browser",
"[",
"-",
"1",
"]",
"==",
"'&'",
":",
"return",
"BackgroundBrowser",
"(",
"browser",
"[",
":",
"-",
"1",
"]",
")",
"else",
":",
"return",
"GenericBrowser",
"(",
"browser",
")",
"else",
":",
"# User gave us a browser name or path.",
"try",
":",
"command",
"=",
"_browsers",
"[",
"browser",
".",
"lower",
"(",
")",
"]",
"except",
"KeyError",
":",
"command",
"=",
"_synthesize",
"(",
"browser",
")",
"if",
"command",
"[",
"1",
"]",
"is",
"not",
"None",
":",
"return",
"command",
"[",
"1",
"]",
"elif",
"command",
"[",
"0",
"]",
"is",
"not",
"None",
":",
"return",
"command",
"[",
"0",
"]",
"(",
")",
"raise",
"Error",
"(",
"\"could not locate runnable browser\"",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/webbrowser.py#L37-L65
|
||
google/or-tools
|
2cb85b4eead4c38e1c54b48044f92087cf165bce
|
ortools/constraint_solver/doc/routing_svg.py
|
python
|
GoogleColorPalette.__init__
|
(self)
|
Initialize Google ColorPalette.
|
Initialize Google ColorPalette.
|
[
"Initialize",
"Google",
"ColorPalette",
"."
] |
def __init__(self):
"""Initialize Google ColorPalette."""
self._colors = [('blue', r'#4285F4'), ('red', r'#EA4335'),
('yellow', r'#FBBC05'), ('green', r'#34A853'),
('black', r'#101010'), ('white', r'#FFFFFF')]
|
[
"def",
"__init__",
"(",
"self",
")",
":",
"self",
".",
"_colors",
"=",
"[",
"(",
"'blue'",
",",
"r'#4285F4'",
")",
",",
"(",
"'red'",
",",
"r'#EA4335'",
")",
",",
"(",
"'yellow'",
",",
"r'#FBBC05'",
")",
",",
"(",
"'green'",
",",
"r'#34A853'",
")",
",",
"(",
"'black'",
",",
"r'#101010'",
")",
",",
"(",
"'white'",
",",
"r'#FFFFFF'",
")",
"]"
] |
https://github.com/google/or-tools/blob/2cb85b4eead4c38e1c54b48044f92087cf165bce/ortools/constraint_solver/doc/routing_svg.py#L257-L261
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/gtk/_gdi.py
|
python
|
GraphicsContext.DrawRotatedText
|
(*args, **kwargs)
|
return _gdi_.GraphicsContext_DrawRotatedText(*args, **kwargs)
|
DrawRotatedText(self, String str, Double x, Double y, Double angle, GraphicsBrush backgroundBrush=NullGraphicsBrush)
Draws a text string at the defined position, at the specified angle,
which is given in radians.
|
DrawRotatedText(self, String str, Double x, Double y, Double angle, GraphicsBrush backgroundBrush=NullGraphicsBrush)
|
[
"DrawRotatedText",
"(",
"self",
"String",
"str",
"Double",
"x",
"Double",
"y",
"Double",
"angle",
"GraphicsBrush",
"backgroundBrush",
"=",
"NullGraphicsBrush",
")"
] |
def DrawRotatedText(*args, **kwargs):
"""
DrawRotatedText(self, String str, Double x, Double y, Double angle, GraphicsBrush backgroundBrush=NullGraphicsBrush)
Draws a text string at the defined position, at the specified angle,
which is given in radians.
"""
return _gdi_.GraphicsContext_DrawRotatedText(*args, **kwargs)
|
[
"def",
"DrawRotatedText",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_gdi_",
".",
"GraphicsContext_DrawRotatedText",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_gdi.py#L6369-L6376
|
|
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/ConfigParser.py
|
python
|
RawConfigParser.remove_section
|
(self, section)
|
return existed
|
Remove a file section.
|
Remove a file section.
|
[
"Remove",
"a",
"file",
"section",
"."
] |
def remove_section(self, section):
"""Remove a file section."""
existed = section in self._sections
if existed:
del self._sections[section]
return existed
|
[
"def",
"remove_section",
"(",
"self",
",",
"section",
")",
":",
"existed",
"=",
"section",
"in",
"self",
".",
"_sections",
"if",
"existed",
":",
"del",
"self",
".",
"_sections",
"[",
"section",
"]",
"return",
"existed"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/ConfigParser.py#L431-L436
|
|
SoarGroup/Soar
|
a1c5e249499137a27da60533c72969eef3b8ab6b
|
scons/scons-local-4.1.0/SCons/Node/FS.py
|
python
|
Dir.sconsign
|
(self)
|
return _sconsign_map[self._func_sconsign](self)
|
Return the .sconsign file info for this directory.
|
Return the .sconsign file info for this directory.
|
[
"Return",
"the",
".",
"sconsign",
"file",
"info",
"for",
"this",
"directory",
"."
] |
def sconsign(self):
"""Return the .sconsign file info for this directory. """
return _sconsign_map[self._func_sconsign](self)
|
[
"def",
"sconsign",
"(",
"self",
")",
":",
"return",
"_sconsign_map",
"[",
"self",
".",
"_func_sconsign",
"]",
"(",
"self",
")"
] |
https://github.com/SoarGroup/Soar/blob/a1c5e249499137a27da60533c72969eef3b8ab6b/scons/scons-local-4.1.0/SCons/Node/FS.py#L1895-L1897
|
|
rethinkdb/rethinkdb_rebirth
|
54a76551512bebfe1ab1071d9b19dec2cd9c40e6
|
packaging/osx/biplist/__init__.py
|
python
|
readPlist
|
(pathOrFile)
|
return result
|
Raises NotBinaryPlistException, InvalidPlistException
|
Raises NotBinaryPlistException, InvalidPlistException
|
[
"Raises",
"NotBinaryPlistException",
"InvalidPlistException"
] |
def readPlist(pathOrFile):
"""Raises NotBinaryPlistException, InvalidPlistException"""
didOpen = False
result = None
if isinstance(pathOrFile, (bytes, unicode)):
pathOrFile = open(pathOrFile, 'rb')
didOpen = True
try:
reader = PlistReader(pathOrFile)
result = reader.parse()
except NotBinaryPlistException as e:
try:
pathOrFile.seek(0)
result = None
if hasattr(plistlib, 'loads'):
contents = None
if isinstance(pathOrFile, (bytes, unicode)):
with open(pathOrFile, 'rb') as f:
contents = f.read()
else:
contents = pathOrFile.read()
result = plistlib.loads(contents)
else:
result = plistlib.readPlist(pathOrFile)
result = wrapDataObject(result, for_binary=True)
except Exception as e:
raise InvalidPlistException(e)
finally:
if didOpen:
pathOrFile.close()
return result
|
[
"def",
"readPlist",
"(",
"pathOrFile",
")",
":",
"didOpen",
"=",
"False",
"result",
"=",
"None",
"if",
"isinstance",
"(",
"pathOrFile",
",",
"(",
"bytes",
",",
"unicode",
")",
")",
":",
"pathOrFile",
"=",
"open",
"(",
"pathOrFile",
",",
"'rb'",
")",
"didOpen",
"=",
"True",
"try",
":",
"reader",
"=",
"PlistReader",
"(",
"pathOrFile",
")",
"result",
"=",
"reader",
".",
"parse",
"(",
")",
"except",
"NotBinaryPlistException",
"as",
"e",
":",
"try",
":",
"pathOrFile",
".",
"seek",
"(",
"0",
")",
"result",
"=",
"None",
"if",
"hasattr",
"(",
"plistlib",
",",
"'loads'",
")",
":",
"contents",
"=",
"None",
"if",
"isinstance",
"(",
"pathOrFile",
",",
"(",
"bytes",
",",
"unicode",
")",
")",
":",
"with",
"open",
"(",
"pathOrFile",
",",
"'rb'",
")",
"as",
"f",
":",
"contents",
"=",
"f",
".",
"read",
"(",
")",
"else",
":",
"contents",
"=",
"pathOrFile",
".",
"read",
"(",
")",
"result",
"=",
"plistlib",
".",
"loads",
"(",
"contents",
")",
"else",
":",
"result",
"=",
"plistlib",
".",
"readPlist",
"(",
"pathOrFile",
")",
"result",
"=",
"wrapDataObject",
"(",
"result",
",",
"for_binary",
"=",
"True",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"InvalidPlistException",
"(",
"e",
")",
"finally",
":",
"if",
"didOpen",
":",
"pathOrFile",
".",
"close",
"(",
")",
"return",
"result"
] |
https://github.com/rethinkdb/rethinkdb_rebirth/blob/54a76551512bebfe1ab1071d9b19dec2cd9c40e6/packaging/osx/biplist/__init__.py#L117-L147
|
|
NVIDIA/MDL-SDK
|
aa9642b2546ad7b6236b5627385d882c2ed83c5d
|
src/mdl/jit/llvm/dist/utils/lit/lit/discovery.py
|
python
|
getTestSuite
|
(item, litConfig, cache)
|
return ts, tuple(relative + tuple(components))
|
getTestSuite(item, litConfig, cache) -> (suite, relative_path)
Find the test suite containing @arg item.
@retval (None, ...) - Indicates no test suite contains @arg item.
@retval (suite, relative_path) - The suite that @arg item is in, and its
relative path inside that suite.
|
getTestSuite(item, litConfig, cache) -> (suite, relative_path)
|
[
"getTestSuite",
"(",
"item",
"litConfig",
"cache",
")",
"-",
">",
"(",
"suite",
"relative_path",
")"
] |
def getTestSuite(item, litConfig, cache):
"""getTestSuite(item, litConfig, cache) -> (suite, relative_path)
Find the test suite containing @arg item.
@retval (None, ...) - Indicates no test suite contains @arg item.
@retval (suite, relative_path) - The suite that @arg item is in, and its
relative path inside that suite.
"""
def search1(path):
# Check for a site config or a lit config.
cfgpath = dirContainsTestSuite(path, litConfig)
# If we didn't find a config file, keep looking.
if not cfgpath:
parent,base = os.path.split(path)
if parent == path:
return (None, ())
ts, relative = search(parent)
return (ts, relative + (base,))
# This is a private builtin parameter which can be used to perform
# translation of configuration paths. Specifically, this parameter
# can be set to a dictionary that the discovery process will consult
# when it finds a configuration it is about to load. If the given
# path is in the map, the value of that key is a path to the
# configuration to load instead.
config_map = litConfig.params.get('config_map')
if config_map:
cfgpath = os.path.realpath(cfgpath)
cfgpath = os.path.normcase(cfgpath)
target = config_map.get(cfgpath)
if target:
cfgpath = target
# We found a test suite, create a new config for it and load it.
if litConfig.debug:
litConfig.note('loading suite config %r' % cfgpath)
cfg = TestingConfig.fromdefaults(litConfig)
cfg.load_from_path(cfgpath, litConfig)
source_root = os.path.realpath(cfg.test_source_root or path)
exec_root = os.path.realpath(cfg.test_exec_root or path)
return Test.TestSuite(cfg.name, source_root, exec_root, cfg), ()
def search(path):
# Check for an already instantiated test suite.
real_path = os.path.realpath(path)
res = cache.get(real_path)
if res is None:
cache[real_path] = res = search1(path)
return res
# Canonicalize the path.
item = os.path.normpath(os.path.join(os.getcwd(), item))
# Skip files and virtual components.
components = []
while not os.path.isdir(item):
parent,base = os.path.split(item)
if parent == item:
return (None, ())
components.append(base)
item = parent
components.reverse()
ts, relative = search(item)
return ts, tuple(relative + tuple(components))
|
[
"def",
"getTestSuite",
"(",
"item",
",",
"litConfig",
",",
"cache",
")",
":",
"def",
"search1",
"(",
"path",
")",
":",
"# Check for a site config or a lit config.",
"cfgpath",
"=",
"dirContainsTestSuite",
"(",
"path",
",",
"litConfig",
")",
"# If we didn't find a config file, keep looking.",
"if",
"not",
"cfgpath",
":",
"parent",
",",
"base",
"=",
"os",
".",
"path",
".",
"split",
"(",
"path",
")",
"if",
"parent",
"==",
"path",
":",
"return",
"(",
"None",
",",
"(",
")",
")",
"ts",
",",
"relative",
"=",
"search",
"(",
"parent",
")",
"return",
"(",
"ts",
",",
"relative",
"+",
"(",
"base",
",",
")",
")",
"# This is a private builtin parameter which can be used to perform",
"# translation of configuration paths. Specifically, this parameter",
"# can be set to a dictionary that the discovery process will consult",
"# when it finds a configuration it is about to load. If the given",
"# path is in the map, the value of that key is a path to the",
"# configuration to load instead.",
"config_map",
"=",
"litConfig",
".",
"params",
".",
"get",
"(",
"'config_map'",
")",
"if",
"config_map",
":",
"cfgpath",
"=",
"os",
".",
"path",
".",
"realpath",
"(",
"cfgpath",
")",
"cfgpath",
"=",
"os",
".",
"path",
".",
"normcase",
"(",
"cfgpath",
")",
"target",
"=",
"config_map",
".",
"get",
"(",
"cfgpath",
")",
"if",
"target",
":",
"cfgpath",
"=",
"target",
"# We found a test suite, create a new config for it and load it.",
"if",
"litConfig",
".",
"debug",
":",
"litConfig",
".",
"note",
"(",
"'loading suite config %r'",
"%",
"cfgpath",
")",
"cfg",
"=",
"TestingConfig",
".",
"fromdefaults",
"(",
"litConfig",
")",
"cfg",
".",
"load_from_path",
"(",
"cfgpath",
",",
"litConfig",
")",
"source_root",
"=",
"os",
".",
"path",
".",
"realpath",
"(",
"cfg",
".",
"test_source_root",
"or",
"path",
")",
"exec_root",
"=",
"os",
".",
"path",
".",
"realpath",
"(",
"cfg",
".",
"test_exec_root",
"or",
"path",
")",
"return",
"Test",
".",
"TestSuite",
"(",
"cfg",
".",
"name",
",",
"source_root",
",",
"exec_root",
",",
"cfg",
")",
",",
"(",
")",
"def",
"search",
"(",
"path",
")",
":",
"# Check for an already instantiated test suite.",
"real_path",
"=",
"os",
".",
"path",
".",
"realpath",
"(",
"path",
")",
"res",
"=",
"cache",
".",
"get",
"(",
"real_path",
")",
"if",
"res",
"is",
"None",
":",
"cache",
"[",
"real_path",
"]",
"=",
"res",
"=",
"search1",
"(",
"path",
")",
"return",
"res",
"# Canonicalize the path.",
"item",
"=",
"os",
".",
"path",
".",
"normpath",
"(",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"getcwd",
"(",
")",
",",
"item",
")",
")",
"# Skip files and virtual components.",
"components",
"=",
"[",
"]",
"while",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"item",
")",
":",
"parent",
",",
"base",
"=",
"os",
".",
"path",
".",
"split",
"(",
"item",
")",
"if",
"parent",
"==",
"item",
":",
"return",
"(",
"None",
",",
"(",
")",
")",
"components",
".",
"append",
"(",
"base",
")",
"item",
"=",
"parent",
"components",
".",
"reverse",
"(",
")",
"ts",
",",
"relative",
"=",
"search",
"(",
"item",
")",
"return",
"ts",
",",
"tuple",
"(",
"relative",
"+",
"tuple",
"(",
"components",
")",
")"
] |
https://github.com/NVIDIA/MDL-SDK/blob/aa9642b2546ad7b6236b5627385d882c2ed83c5d/src/mdl/jit/llvm/dist/utils/lit/lit/discovery.py#L26-L94
|
|
PlatformLab/RAMCloud
|
b1866af19124325a6dfd8cbc267e2e3ef1f965d1
|
scripts/recoverymetrics.py
|
python
|
makeReport
|
(data)
|
return report
|
Generate ASCII report
|
Generate ASCII report
|
[
"Generate",
"ASCII",
"report"
] |
def makeReport(data):
"""Generate ASCII report"""
coord = data.coordinator
masters = data.masters
backups = data.backups
servers = data.servers
recoveryTime = data.client.recoveryNs / 1e9
failureDetectionTime = data.client.failureDetectionNs / 1e9
report = Report()
# TODO(ongaro): Size distributions of filtered segments
def make_fail_fun(fun, fail):
"""Wrap fun to return fail instead of throwing ZeroDivisionError."""
def fun2(x):
try:
return fun(x)
except ZeroDivisionError:
return fail
return fun2
def on_masters(fun, fail=0):
"""Call a function on each master,
replacing ZeroDivisionErrors with 'fail'."""
fun2 = make_fail_fun(fun, fail)
return [(master.serverId, fun2(master)) for master in masters]
def on_backups(fun, fail=0):
"""Call a function on each backup,
replacing ZeroDivisionErrors with 'fail'."""
fun2 = make_fail_fun(fun, fail)
return [(backup.serverId, fun2(backup)) for backup in backups]
summary = report.add(Section('Summary'))
summary.line('Recovery time', recoveryTime, 's')
summary.line('Failure detection time', failureDetectionTime, 's')
summary.line('Recovery + detection time',
recoveryTime + failureDetectionTime, 's')
summary.line('Masters', len(masters))
summary.line('Backups', len(backups))
summary.line('Total nodes', data.totalNodes)
summary.line('Replicas',
masters[0].master.replicas)
summary.line('Objects per master',
on_masters(lambda m: m.master.liveObjectCount))
summary.line('Object size',
on_masters(lambda m: m.master.liveObjectBytes /
m.master.liveObjectCount),
'bytes')
summary.line('Total recovery segment entries',
sum([master.master.recoverySegmentEntryCount
for master in masters]))
summary.line('Total live object space',
sum([master.master.liveObjectBytes
for master in masters]) / 1024.0 / 1024.0,
'MB')
summary.line('Total recovery segment space w/ overhead',
sum([master.master.segmentReadByteCount
for master in masters]) / 1024.0 / 1024.0,
'MB')
if backups:
storageTypes = set([backup.backup.storageType for backup in backups])
if len(storageTypes) > 1:
storageTypeStr = 'mixed'
else:
storageType = storageTypes.pop()
if storageType == 1:
storageTypeStr = 'memory'
elif storageType == 2:
storageTypeStr = 'disk'
else:
storageTypeStr = 'unknown (%s)' % storageType
summary.line('Storage type', storageTypeStr)
summary.line('Log directory', data.log_dir)
coordSection = report.add(Section('Coordinator Time'))
coordSection.ms('Total',
coord.coordinator.recoveryTicks /
coord.clockFrequency,
total=recoveryTime)
coordSection.ms('Starting recovery on backups',
coord.coordinator.recoveryBuildReplicaMapTicks / coord.clockFrequency,
total=recoveryTime)
coordSection.ms('Starting recovery on masters',
coord.coordinator.recoveryStartTicks / coord.clockFrequency,
total=recoveryTime)
coordSection.ms('Tablets recovered',
coord.rpc.recovery_master_finishedTicks / coord.clockFrequency,
total=recoveryTime)
coordSection.ms('Completing recovery on backups',
coord.coordinator.recoveryCompleteTicks / coord.clockFrequency,
total=recoveryTime)
coordSection.ms('Get table config',
coord.rpc.get_table_configTicks / coord.clockFrequency,
total=recoveryTime)
coordSection.ms('Other',
((coord.coordinator.recoveryTicks -
coord.coordinator.recoveryBuildReplicaMapTicks -
coord.coordinator.recoveryStartTicks -
coord.rpc.get_table_configTicks -
coord.rpc.recovery_master_finishedTicks) /
coord.clockFrequency),
total=recoveryTime)
coordSection.ms('Receiving in transport',
coord.transport.receive.ticks / coord.clockFrequency,
total=recoveryTime)
masterSection = report.add(Section('Recovery Master Time'))
recoveryMasterTime = sum([m.master.recoveryTicks / m.clockFrequency for m in masters]) / len(masters)
def master_ticks(label, field):
"""This is a shortcut for adding to the masterSection a recorded number
of ticks that are a fraction of the total recovery.
@type label: string
@param label: the key for the line
@type field: string
@param field: the field within a master's metrics that collected ticks
"""
masterSection.ms(label,
on_masters(lambda m: eval('m.' + field) /
m.clockFrequency),
total=recoveryMasterTime)
masterSection.ms('Total (versus end-to-end recovery time)',
on_masters(lambda m: m.master.recoveryTicks /
m.clockFrequency),
total=recoveryTime)
master_ticks('Total',
'master.recoveryTicks')
master_ticks('Waiting for incoming segments',
'master.segmentReadStallTicks')
master_ticks('Inside recoverSegment',
'master.recoverSegmentTicks')
master_ticks('Final log sync time',
'master.logSyncTicks')
master_ticks('Removing tombstones',
'master.removeTombstoneTicks')
masterSection.ms('Other',
on_masters(lambda m: (m.master.recoveryTicks -
m.master.segmentReadStallTicks -
m.master.recoverSegmentTicks -
m.master.logSyncTicks -
m.master.removeTombstoneTicks) /
m.clockFrequency),
total=recoveryTime)
recoverSegmentTime = sum([m.master.recoverSegmentTicks / m.clockFrequency for m in masters]) / len(masters)
recoverSegmentSection = report.add(Section('Recovery Master recoverSegment Time'))
def recoverSegment_ticks(label, field):
recoverSegmentSection.ms(label,
on_masters(lambda m: eval('m.' + field) /
m.clockFrequency),
total=recoverSegmentTime)
recoverSegmentSection.ms('Total (versus end-to-end recovery time)',
on_masters(lambda m: m.master.recoverSegmentTicks /
m.clockFrequency),
total=recoveryTime)
recoverSegment_ticks('Total',
'master.recoverSegmentTicks')
recoverSegment_ticks('Managing replication',
'master.backupInRecoverTicks')
recoverSegment_ticks('Verify checksum',
'master.verifyChecksumTicks')
recoverSegment_ticks('Segment append',
'master.segmentAppendTicks')
# No longer measured: could be useful in the future. Make sure to add it to Other if used again.
# recoverSegment_ticks('Segment append copy',
# 'master.segmentAppendCopyTicks')
recoverSegmentSection.ms('Other',
on_masters(lambda m: (m.master.recoverSegmentTicks -
m.master.backupInRecoverTicks -
m.master.verifyChecksumTicks -
m.master.segmentAppendTicks) /
m.clockFrequency),
total=recoverSegmentTime)
replicaManagerTime = sum([m.master.backupInRecoverTicks / m.clockFrequency for m in masters]) / len(masters)
replicaManagerSection = report.add(Section('Recovery Master ReplicaManager Time during recoverSegment'))
def replicaManager_ticks(label, field):
replicaManagerSection.ms(label,
on_masters(lambda m: eval('m.' + field) /
m.clockFrequency),
total=replicaManagerTime)
replicaManagerSection.ms('Total (versus end-to-end recovery time)',
on_masters(lambda m: m.master.backupInRecoverTicks /
m.clockFrequency),
total=recoveryTime)
replicaManager_ticks('Total',
'master.backupInRecoverTicks')
replicaManagerSection.ms('Posting write RPCs for TX to transport',
on_masters(lambda m: (m.master.recoverSegmentPostingWriteRpcTicks) /
m.clockFrequency),
total=replicaManagerTime)
replicaManagerSection.ms('Other',
on_masters(lambda m: (m.master.backupInRecoverTicks -
m.master.recoverSegmentPostingWriteRpcTicks) /
m.clockFrequency),
total=replicaManagerTime)
masterStatsSection = report.add(Section('Recovery Master Stats'))
def masterStats_ticks(label, field):
masterStatsSection.ms(label,
on_masters(lambda m: eval('m.' + field) /
m.clockFrequency),
total=recoveryTime)
masterStatsSection.line('Final log sync amount',
on_masters(lambda m: (m.master.logSyncBytes / 2**20)),
unit='MB',
summaryFns=[AVG, MIN, SUM])
masterStatsSection.line('Total replication amount',
on_masters(lambda m: (m.master.replicationBytes / 2**20)),
unit='MB',
summaryFns=[AVG, MIN, SUM])
masterStatsSection.line('Total replication during replay',
on_masters(lambda m: ((m.master.replicationBytes - m.master.logSyncBytes) / 2**20)),
unit='MB',
summaryFns=[AVG, MIN, SUM])
masterStats_ticks('Opening sessions',
'transport.sessionOpenTicks')
masterStats_ticks('Receiving in transport',
'transport.receive.ticks')
masterStats_ticks('Transmitting in transport',
'transport.transmit.ticks')
masterStats_ticks('Client RPCs Active',
'transport.clientRpcsActiveTicks')
masterStatsSection.ms('Average GRD completion time',
on_masters(lambda m: (m.master.segmentReadTicks /
m.master.segmentReadCount /
m.clockFrequency)))
# There used to be a bunch of code here for analyzing the variance in
# session open times. We don't open sessions during recovery anymore, so
# I've deleted this code. Look in the git repo for mid-2011 if you want it
# back. -Diego
masterStatsSection.line('Log replication rate',
on_masters(lambda m: (m.master.replicationBytes / m.master.replicas / 2**20 /
(m.master.replicationTicks / m.clockFrequency))),
unit='MB/s',
summaryFns=[AVG, MIN, SUM])
masterStatsSection.line('Log replication rate during replay',
on_masters(lambda m: ((m.master.replicationBytes - m.master.logSyncBytes)
/ m.master.replicas / 2**20 /
((m.master.replicationTicks - m.master.logSyncTicks)/ m.clockFrequency))),
unit='MB/s',
summaryFns=[AVG, MIN, SUM])
masterStatsSection.line('Log replication rate during log sync',
on_masters(lambda m: (m.master.logSyncBytes / m.master.replicas / 2**20 /
(m.master.logSyncTicks / m.clockFrequency))),
unit='MB/s',
summaryFns=[AVG, MIN, SUM])
masterStats_ticks('Replication',
'master.replicationTicks')
masterStatsSection.ms('TX active',
on_masters(lambda m: (m.transport.infiniband.transmitActiveTicks /
m.clockFrequency)),
total=recoveryTime)
replicationTime = sum([m.master.replicationTicks / m.clockFrequency for m in masters]) / float(len(masters))
logSyncTime = sum([m.master.logSyncTicks / m.clockFrequency for m in masters]) / float(len(masters))
replayTime = sum([(m.master.replicationTicks - m.master.logSyncTicks)/ m.clockFrequency for m in masters]) / float(len(masters))
masterStatsSection.ms('TX active during replication',
on_masters(lambda m: (m.master.replicationTransmitActiveTicks /
m.clockFrequency)),
total=replicationTime)
masterStatsSection.ms('TX active during replay',
on_masters(lambda m: ((m.master.replicationTransmitActiveTicks - m.master.logSyncTransmitActiveTicks) /
m.clockFrequency)),
total=replayTime)
masterStatsSection.ms('TX active during log sync',
on_masters(lambda m: (m.master.logSyncTransmitActiveTicks /
m.clockFrequency)),
total=logSyncTime)
masterStatsSection.line('TX active rate during replication',
on_masters(lambda m: m.master.replicationBytes / 2**20 / (m.master.replicationTransmitActiveTicks /
m.clockFrequency)),
unit='MB/s',
summaryFns=[AVG, MIN, SUM])
masterStatsSection.line('TX active rate during replay',
on_masters(lambda m: (m.master.replicationBytes - m.master.logSyncBytes) / 2**20 / ((m.master.replicationTransmitActiveTicks - m.master.logSyncTransmitActiveTicks) /
m.clockFrequency)),
unit='MB/s',
summaryFns=[AVG, MIN, SUM])
masterStatsSection.line('TX active rate during log sync',
on_masters(lambda m: m.master.logSyncBytes / 2**20 / (m.master.logSyncTransmitActiveTicks /
m.clockFrequency)),
unit='MB/s',
summaryFns=[AVG, MIN, SUM])
masterStats_ticks('Copying for TX during replication',
'master.replicationTransmitCopyTicks')
masterStatsSection.ms('Copying for TX during replay',
on_masters(lambda m: (m.master.replicationTransmitCopyTicks -
m.master.logSyncTransmitCopyTicks) / m.clockFrequency),
total=recoveryTime)
masterStats_ticks('Copying for TX during log sync',
'master.logSyncTransmitCopyTicks')
masterStatsSection.line('Copying for tx during replication rate',
on_masters(lambda m: (m.master.replicationBytes / m.master.replicas / 2**20 /
(m.master.replicationTransmitCopyTicks / m.clockFrequency))),
unit='MB/s',
summaryFns=[AVG, MIN, SUM])
masterStatsSection.line('Copying for TX during replay rate',
on_masters(lambda m: ((m.master.replicationBytes - m.master.logSyncBytes) / m.master.replicas / 2**20 /
((m.master.replicationTransmitCopyTicks - m.master.logSyncTransmitCopyTicks) / m.clockFrequency))),
unit='MB/s',
summaryFns=[AVG, MIN, SUM])
masterStatsSection.line('Copying for TX during log sync rate',
on_masters(lambda m: (m.master.logSyncBytes / m.master.replicas / 2**20 /
(m.master.logSyncTransmitCopyTicks / m.clockFrequency))),
unit='MB/s',
summaryFns=[AVG, MIN, SUM])
masterStatsSection.line('Max active replication tasks',
on_masters(lambda m: m.master.replicationTasks),
unit='tasks',
summaryFns=[AVG, MIN, SUM])
masterStatsSection.line('Memory read bandwidth used during replay',
on_masters(lambda m: (m.master.replayMemoryReadBytes / 2**20 /
((m.master.recoveryTicks - m.master.logSyncTicks) / m.clockFrequency))),
unit='MB/s',
summaryFns=[AVG, MIN, SUM])
masterStatsSection.line('Memory write bandwidth used during replay',
on_masters(lambda m: (m.master.replayMemoryWrittenBytes / 2**20 /
((m.master.recoveryTicks - m.master.logSyncTicks) / m.clockFrequency))),
unit='MB/s',
summaryFns=[AVG, MIN, SUM])
backupSection = report.add(Section('Backup Time'))
def backup_ticks(label, field):
"""This is a shortcut for adding to the backupSection a recorded number
of ticks that are a fraction of the total recovery.
@type label: string
@param label: the key for the line
@type field: string
@param field: the field within a backup's metrics that collected ticks
"""
backupSection.ms(label,
on_backups(lambda b: eval('b.' + field) /
b.clockFrequency),
total=recoveryTime)
backup_ticks('RPC service time',
'backup.serviceTicks')
backup_ticks('startReadingData RPC',
'rpc.backup_startreadingdataTicks')
backup_ticks('write RPC',
'rpc.backup_writeTicks')
backup_ticks('Write copy',
'backup.writeCopyTicks')
backupSection.ms('Other write RPC',
on_backups(lambda b: (b.rpc.backup_writeTicks -
b.backup.writeCopyTicks) /
b.clockFrequency),
total=recoveryTime)
backup_ticks('getRecoveryData RPC',
'rpc.backup_getrecoverydataTicks')
backupSection.ms('Other',
on_backups(lambda b: (b.backup.serviceTicks -
b.rpc.backup_startreadingdataTicks -
b.rpc.backup_writeTicks -
b.rpc.backup_getrecoverydataTicks) /
b.clockFrequency),
total=recoveryTime)
backup_ticks('Transmitting in transport',
'transport.transmit.ticks')
backup_ticks('Filtering segments',
'backup.filterTicks')
backup_ticks('Reading+filtering replicas',
'backup.readingDataTicks')
backup_ticks('Reading replicas from disk',
'backup.storageReadTicks')
backupSection.line('getRecoveryData completions',
on_backups(lambda b: b.backup.readCompletionCount))
backupSection.line('getRecoveryData retry fraction',
on_backups(lambda b: (b.rpc.backup_getrecoverydataCount -
b.backup.readCompletionCount) /
b.rpc.backup_getrecoverydataCount))
efficiencySection = report.add(Section('Efficiency'))
efficiencySection.line('recoverSegment CPU',
(sum([m.master.recoverSegmentTicks / m.clockFrequency
for m in masters]) * 1000 /
sum([m.master.segmentReadCount
for m in masters])),
unit='ms avg')
efficiencySection.line('Writing a segment',
(sum([b.rpc.backup_writeTicks / b.clockFrequency
for b in backups]) * 1000 /
# Divide count by 2 since each segment does two writes:
# one to open the segment and one to write the data.
sum([b.rpc.backup_writeCount / 2
for b in backups])),
unit='ms avg')
#efficiencySection.line('Filtering a segment',
# sum([b.backup.filterTicks / b.clockFrequency * 1000
# for b in backups]) /
# sum([b.backup.storageReadCount
# for b in backups]),
# unit='ms avg')
efficiencySection.line('Memory bandwidth (backup copies)',
on_backups(lambda b: (
(b.backup.writeCopyBytes / 2**30) /
(b.backup.writeCopyTicks / b.clockFrequency))),
unit='GB/s',
summaryFns=[AVG, MIN])
networkSection = report.add(Section('Network Utilization'))
networkSection.line('Aggregate',
(sum([host.transport.transmit.byteCount
for host in [coord] + masters + backups]) *
8 / 2**30 / recoveryTime),
unit='Gb/s',
summaryFns=[AVG, FRAC(data.totalNodes*25)])
networkSection.line('Master in',
on_masters(lambda m: (m.transport.receive.byteCount * 8 / 2**30) /
recoveryTime),
unit='Gb/s',
summaryFns=[AVG, MIN, SUM])
networkSection.line('Master out',
on_masters(lambda m: (m.transport.transmit.byteCount * 8 / 2**30) /
recoveryTime),
unit='Gb/s',
summaryFns=[AVG, MIN, SUM])
networkSection.line('Master out during replication',
on_masters(lambda m: (m.master.replicationBytes * 8 / 2**30) /
(m.master.replicationTicks / m.clockFrequency)),
unit='Gb/s',
summaryFns=[AVG, MIN, SUM])
networkSection.line('Master out during log sync',
on_masters(lambda m: (m.master.logSyncBytes * 8 / 2**30) /
(m.master.logSyncTicks / m.clockFrequency)),
unit='Gb/s',
summaryFns=[AVG, MIN, SUM])
networkSection.line('Backup in',
on_backups(lambda b: (b.transport.receive.byteCount * 8 / 2**30) /
recoveryTime),
unit='Gb/s',
summaryFns=[AVG, MIN, SUM])
networkSection.line('Backup out',
on_backups(lambda b: (b.transport.transmit.byteCount * 8 / 2**30) /
recoveryTime),
unit='Gb/s',
summaryFns=[AVG, MIN, SUM])
diskSection = report.add(Section('Disk Utilization'))
diskSection.line('Effective bandwidth',
on_backups(lambda b: (b.backup.storageReadBytes +
b.backup.storageWriteBytes) /
2**20 / recoveryTime),
unit='MB/s',
summaryFns=[AVG, MIN, SUM])
def active_bandwidth(b):
totalBytes = b.backup.storageReadBytes + b.backup.storageWriteBytes
totalTicks = b.backup.storageReadTicks + b.backup.storageWriteTicks
return ((totalBytes / 2**20) /
(totalTicks / b.clockFrequency))
diskSection.line('Active bandwidth',
on_backups(active_bandwidth),
unit='MB/s',
summaryFns=[AVG, MIN, SUM])
diskSection.line('Active bandwidth reading',
on_backups(lambda b: (b.backup.storageReadBytes / 2**20) /
(b.backup.storageReadTicks / b.clockFrequency)),
unit='MB/s',
summaryFns=[AVG, MIN, SUM])
diskSection.line('Active bandwidth writing',
on_backups(lambda b: (b.backup.storageWriteBytes / 2**20) /
(b.backup.storageWriteTicks / b.clockFrequency)),
unit='MB/s',
summaryFns=[AVG, MIN, SUM])
diskSection.line('Disk active time',
on_backups(lambda b: 100 * (b.backup.storageReadTicks +
b.backup.storageWriteTicks) /
b.clockFrequency /
recoveryTime),
unit='%')
diskSection.line('Disk reading time',
on_backups(lambda b: 100 * b.backup.storageReadTicks /
b.clockFrequency /
recoveryTime),
unit='%')
diskSection.line('Disk writing time',
on_backups(lambda b: 100 * b.backup.storageWriteTicks /
b.clockFrequency /
recoveryTime),
unit='%')
backupSection = report.add(Section('Backup Events'))
backupSection.line('Segments read',
on_backups(lambda b: b.backup.storageReadCount))
backupSection.line('Primary segments loaded',
on_backups(lambda b: b.backup.primaryLoadCount))
backupSection.line('Secondary segments loaded',
on_backups(lambda b: b.backup.secondaryLoadCount))
slowSection = report.add(Section('Slowest Servers'))
slowest = maxTuple([
[1e03 * (master.master.replicaManagerTicks -
master.master.logSyncTicks) / master.clockFrequency,
master.server] for master in masters])
if slowest:
slowSection.line('Backup opens, writes',
slowest[0],
summaryFns=[CUSTOM(slowest[1]),
CUSTOM('{0:.1f} ms'.format(slowest[0]))])
slowest = maxTuple([
[1e03 * master.master.segmentReadStallTicks /
master.clockFrequency, master.server]
for master in masters])
if slowest:
slowSection.line('Stalled reading segs from backups',
slowest[0],
summaryFns=[CUSTOM(slowest[1]),
CUSTOM('{0:.1f} ms'.format(slowest[0]))])
slowest = minTuple([
[(backup.backup.storageReadBytes / 2**20) /
(backup.backup.storageReadTicks / backup.clockFrequency),
backup.server] for backup in backups
if (backup.backup.storageReadTicks > 0)])
if slowest:
slowSection.line('Reading from disk',
slowest[0],
summaryFns=[CUSTOM(slowest[1]),
CUSTOM('{0:.1f} MB/s'.format(slowest[0]))])
slowest = minTuple([
[(backup.backup.storageWriteBytes / 2**20) /
(backup.backup.storageWriteTicks / backup.clockFrequency),
backup.server] for backup in backups
if backup.backup.storageWriteTicks])
if slowest:
slowSection.line('Writing to disk',
slowest[0],
summaryFns=[CUSTOM(slowest[1]),
CUSTOM('{0:.1f} MB/s'.format(slowest[0]))])
tempSection = report.add(Section('Temporary Metrics'))
for i in range(10):
field = 'ticks{0:}'.format(i)
points = [(host.serverId, host.temp[field] / host.clockFrequency)
for host in servers]
if any(values(points)):
tempSection.ms('temp.%s' % field,
points,
total=recoveryTime)
for i in range(10):
field = 'count{0:}'.format(i)
points = [(host.serverId, host.temp[field])
for host in servers]
if any(values(points)):
tempSection.line('temp.%s' % field,
points)
return report
|
[
"def",
"makeReport",
"(",
"data",
")",
":",
"coord",
"=",
"data",
".",
"coordinator",
"masters",
"=",
"data",
".",
"masters",
"backups",
"=",
"data",
".",
"backups",
"servers",
"=",
"data",
".",
"servers",
"recoveryTime",
"=",
"data",
".",
"client",
".",
"recoveryNs",
"/",
"1e9",
"failureDetectionTime",
"=",
"data",
".",
"client",
".",
"failureDetectionNs",
"/",
"1e9",
"report",
"=",
"Report",
"(",
")",
"# TODO(ongaro): Size distributions of filtered segments",
"def",
"make_fail_fun",
"(",
"fun",
",",
"fail",
")",
":",
"\"\"\"Wrap fun to return fail instead of throwing ZeroDivisionError.\"\"\"",
"def",
"fun2",
"(",
"x",
")",
":",
"try",
":",
"return",
"fun",
"(",
"x",
")",
"except",
"ZeroDivisionError",
":",
"return",
"fail",
"return",
"fun2",
"def",
"on_masters",
"(",
"fun",
",",
"fail",
"=",
"0",
")",
":",
"\"\"\"Call a function on each master,\n replacing ZeroDivisionErrors with 'fail'.\"\"\"",
"fun2",
"=",
"make_fail_fun",
"(",
"fun",
",",
"fail",
")",
"return",
"[",
"(",
"master",
".",
"serverId",
",",
"fun2",
"(",
"master",
")",
")",
"for",
"master",
"in",
"masters",
"]",
"def",
"on_backups",
"(",
"fun",
",",
"fail",
"=",
"0",
")",
":",
"\"\"\"Call a function on each backup,\n replacing ZeroDivisionErrors with 'fail'.\"\"\"",
"fun2",
"=",
"make_fail_fun",
"(",
"fun",
",",
"fail",
")",
"return",
"[",
"(",
"backup",
".",
"serverId",
",",
"fun2",
"(",
"backup",
")",
")",
"for",
"backup",
"in",
"backups",
"]",
"summary",
"=",
"report",
".",
"add",
"(",
"Section",
"(",
"'Summary'",
")",
")",
"summary",
".",
"line",
"(",
"'Recovery time'",
",",
"recoveryTime",
",",
"'s'",
")",
"summary",
".",
"line",
"(",
"'Failure detection time'",
",",
"failureDetectionTime",
",",
"'s'",
")",
"summary",
".",
"line",
"(",
"'Recovery + detection time'",
",",
"recoveryTime",
"+",
"failureDetectionTime",
",",
"'s'",
")",
"summary",
".",
"line",
"(",
"'Masters'",
",",
"len",
"(",
"masters",
")",
")",
"summary",
".",
"line",
"(",
"'Backups'",
",",
"len",
"(",
"backups",
")",
")",
"summary",
".",
"line",
"(",
"'Total nodes'",
",",
"data",
".",
"totalNodes",
")",
"summary",
".",
"line",
"(",
"'Replicas'",
",",
"masters",
"[",
"0",
"]",
".",
"master",
".",
"replicas",
")",
"summary",
".",
"line",
"(",
"'Objects per master'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"m",
".",
"master",
".",
"liveObjectCount",
")",
")",
"summary",
".",
"line",
"(",
"'Object size'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"m",
".",
"master",
".",
"liveObjectBytes",
"/",
"m",
".",
"master",
".",
"liveObjectCount",
")",
",",
"'bytes'",
")",
"summary",
".",
"line",
"(",
"'Total recovery segment entries'",
",",
"sum",
"(",
"[",
"master",
".",
"master",
".",
"recoverySegmentEntryCount",
"for",
"master",
"in",
"masters",
"]",
")",
")",
"summary",
".",
"line",
"(",
"'Total live object space'",
",",
"sum",
"(",
"[",
"master",
".",
"master",
".",
"liveObjectBytes",
"for",
"master",
"in",
"masters",
"]",
")",
"/",
"1024.0",
"/",
"1024.0",
",",
"'MB'",
")",
"summary",
".",
"line",
"(",
"'Total recovery segment space w/ overhead'",
",",
"sum",
"(",
"[",
"master",
".",
"master",
".",
"segmentReadByteCount",
"for",
"master",
"in",
"masters",
"]",
")",
"/",
"1024.0",
"/",
"1024.0",
",",
"'MB'",
")",
"if",
"backups",
":",
"storageTypes",
"=",
"set",
"(",
"[",
"backup",
".",
"backup",
".",
"storageType",
"for",
"backup",
"in",
"backups",
"]",
")",
"if",
"len",
"(",
"storageTypes",
")",
">",
"1",
":",
"storageTypeStr",
"=",
"'mixed'",
"else",
":",
"storageType",
"=",
"storageTypes",
".",
"pop",
"(",
")",
"if",
"storageType",
"==",
"1",
":",
"storageTypeStr",
"=",
"'memory'",
"elif",
"storageType",
"==",
"2",
":",
"storageTypeStr",
"=",
"'disk'",
"else",
":",
"storageTypeStr",
"=",
"'unknown (%s)'",
"%",
"storageType",
"summary",
".",
"line",
"(",
"'Storage type'",
",",
"storageTypeStr",
")",
"summary",
".",
"line",
"(",
"'Log directory'",
",",
"data",
".",
"log_dir",
")",
"coordSection",
"=",
"report",
".",
"add",
"(",
"Section",
"(",
"'Coordinator Time'",
")",
")",
"coordSection",
".",
"ms",
"(",
"'Total'",
",",
"coord",
".",
"coordinator",
".",
"recoveryTicks",
"/",
"coord",
".",
"clockFrequency",
",",
"total",
"=",
"recoveryTime",
")",
"coordSection",
".",
"ms",
"(",
"'Starting recovery on backups'",
",",
"coord",
".",
"coordinator",
".",
"recoveryBuildReplicaMapTicks",
"/",
"coord",
".",
"clockFrequency",
",",
"total",
"=",
"recoveryTime",
")",
"coordSection",
".",
"ms",
"(",
"'Starting recovery on masters'",
",",
"coord",
".",
"coordinator",
".",
"recoveryStartTicks",
"/",
"coord",
".",
"clockFrequency",
",",
"total",
"=",
"recoveryTime",
")",
"coordSection",
".",
"ms",
"(",
"'Tablets recovered'",
",",
"coord",
".",
"rpc",
".",
"recovery_master_finishedTicks",
"/",
"coord",
".",
"clockFrequency",
",",
"total",
"=",
"recoveryTime",
")",
"coordSection",
".",
"ms",
"(",
"'Completing recovery on backups'",
",",
"coord",
".",
"coordinator",
".",
"recoveryCompleteTicks",
"/",
"coord",
".",
"clockFrequency",
",",
"total",
"=",
"recoveryTime",
")",
"coordSection",
".",
"ms",
"(",
"'Get table config'",
",",
"coord",
".",
"rpc",
".",
"get_table_configTicks",
"/",
"coord",
".",
"clockFrequency",
",",
"total",
"=",
"recoveryTime",
")",
"coordSection",
".",
"ms",
"(",
"'Other'",
",",
"(",
"(",
"coord",
".",
"coordinator",
".",
"recoveryTicks",
"-",
"coord",
".",
"coordinator",
".",
"recoveryBuildReplicaMapTicks",
"-",
"coord",
".",
"coordinator",
".",
"recoveryStartTicks",
"-",
"coord",
".",
"rpc",
".",
"get_table_configTicks",
"-",
"coord",
".",
"rpc",
".",
"recovery_master_finishedTicks",
")",
"/",
"coord",
".",
"clockFrequency",
")",
",",
"total",
"=",
"recoveryTime",
")",
"coordSection",
".",
"ms",
"(",
"'Receiving in transport'",
",",
"coord",
".",
"transport",
".",
"receive",
".",
"ticks",
"/",
"coord",
".",
"clockFrequency",
",",
"total",
"=",
"recoveryTime",
")",
"masterSection",
"=",
"report",
".",
"add",
"(",
"Section",
"(",
"'Recovery Master Time'",
")",
")",
"recoveryMasterTime",
"=",
"sum",
"(",
"[",
"m",
".",
"master",
".",
"recoveryTicks",
"/",
"m",
".",
"clockFrequency",
"for",
"m",
"in",
"masters",
"]",
")",
"/",
"len",
"(",
"masters",
")",
"def",
"master_ticks",
"(",
"label",
",",
"field",
")",
":",
"\"\"\"This is a shortcut for adding to the masterSection a recorded number\n of ticks that are a fraction of the total recovery.\n\n @type label: string\n @param label: the key for the line\n\n @type field: string\n @param field: the field within a master's metrics that collected ticks\n \"\"\"",
"masterSection",
".",
"ms",
"(",
"label",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"eval",
"(",
"'m.'",
"+",
"field",
")",
"/",
"m",
".",
"clockFrequency",
")",
",",
"total",
"=",
"recoveryMasterTime",
")",
"masterSection",
".",
"ms",
"(",
"'Total (versus end-to-end recovery time)'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"m",
".",
"master",
".",
"recoveryTicks",
"/",
"m",
".",
"clockFrequency",
")",
",",
"total",
"=",
"recoveryTime",
")",
"master_ticks",
"(",
"'Total'",
",",
"'master.recoveryTicks'",
")",
"master_ticks",
"(",
"'Waiting for incoming segments'",
",",
"'master.segmentReadStallTicks'",
")",
"master_ticks",
"(",
"'Inside recoverSegment'",
",",
"'master.recoverSegmentTicks'",
")",
"master_ticks",
"(",
"'Final log sync time'",
",",
"'master.logSyncTicks'",
")",
"master_ticks",
"(",
"'Removing tombstones'",
",",
"'master.removeTombstoneTicks'",
")",
"masterSection",
".",
"ms",
"(",
"'Other'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"master",
".",
"recoveryTicks",
"-",
"m",
".",
"master",
".",
"segmentReadStallTicks",
"-",
"m",
".",
"master",
".",
"recoverSegmentTicks",
"-",
"m",
".",
"master",
".",
"logSyncTicks",
"-",
"m",
".",
"master",
".",
"removeTombstoneTicks",
")",
"/",
"m",
".",
"clockFrequency",
")",
",",
"total",
"=",
"recoveryTime",
")",
"recoverSegmentTime",
"=",
"sum",
"(",
"[",
"m",
".",
"master",
".",
"recoverSegmentTicks",
"/",
"m",
".",
"clockFrequency",
"for",
"m",
"in",
"masters",
"]",
")",
"/",
"len",
"(",
"masters",
")",
"recoverSegmentSection",
"=",
"report",
".",
"add",
"(",
"Section",
"(",
"'Recovery Master recoverSegment Time'",
")",
")",
"def",
"recoverSegment_ticks",
"(",
"label",
",",
"field",
")",
":",
"recoverSegmentSection",
".",
"ms",
"(",
"label",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"eval",
"(",
"'m.'",
"+",
"field",
")",
"/",
"m",
".",
"clockFrequency",
")",
",",
"total",
"=",
"recoverSegmentTime",
")",
"recoverSegmentSection",
".",
"ms",
"(",
"'Total (versus end-to-end recovery time)'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"m",
".",
"master",
".",
"recoverSegmentTicks",
"/",
"m",
".",
"clockFrequency",
")",
",",
"total",
"=",
"recoveryTime",
")",
"recoverSegment_ticks",
"(",
"'Total'",
",",
"'master.recoverSegmentTicks'",
")",
"recoverSegment_ticks",
"(",
"'Managing replication'",
",",
"'master.backupInRecoverTicks'",
")",
"recoverSegment_ticks",
"(",
"'Verify checksum'",
",",
"'master.verifyChecksumTicks'",
")",
"recoverSegment_ticks",
"(",
"'Segment append'",
",",
"'master.segmentAppendTicks'",
")",
"# No longer measured: could be useful in the future. Make sure to add it to Other if used again.",
"# recoverSegment_ticks('Segment append copy',",
"# 'master.segmentAppendCopyTicks')",
"recoverSegmentSection",
".",
"ms",
"(",
"'Other'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"master",
".",
"recoverSegmentTicks",
"-",
"m",
".",
"master",
".",
"backupInRecoverTicks",
"-",
"m",
".",
"master",
".",
"verifyChecksumTicks",
"-",
"m",
".",
"master",
".",
"segmentAppendTicks",
")",
"/",
"m",
".",
"clockFrequency",
")",
",",
"total",
"=",
"recoverSegmentTime",
")",
"replicaManagerTime",
"=",
"sum",
"(",
"[",
"m",
".",
"master",
".",
"backupInRecoverTicks",
"/",
"m",
".",
"clockFrequency",
"for",
"m",
"in",
"masters",
"]",
")",
"/",
"len",
"(",
"masters",
")",
"replicaManagerSection",
"=",
"report",
".",
"add",
"(",
"Section",
"(",
"'Recovery Master ReplicaManager Time during recoverSegment'",
")",
")",
"def",
"replicaManager_ticks",
"(",
"label",
",",
"field",
")",
":",
"replicaManagerSection",
".",
"ms",
"(",
"label",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"eval",
"(",
"'m.'",
"+",
"field",
")",
"/",
"m",
".",
"clockFrequency",
")",
",",
"total",
"=",
"replicaManagerTime",
")",
"replicaManagerSection",
".",
"ms",
"(",
"'Total (versus end-to-end recovery time)'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"m",
".",
"master",
".",
"backupInRecoverTicks",
"/",
"m",
".",
"clockFrequency",
")",
",",
"total",
"=",
"recoveryTime",
")",
"replicaManager_ticks",
"(",
"'Total'",
",",
"'master.backupInRecoverTicks'",
")",
"replicaManagerSection",
".",
"ms",
"(",
"'Posting write RPCs for TX to transport'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"master",
".",
"recoverSegmentPostingWriteRpcTicks",
")",
"/",
"m",
".",
"clockFrequency",
")",
",",
"total",
"=",
"replicaManagerTime",
")",
"replicaManagerSection",
".",
"ms",
"(",
"'Other'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"master",
".",
"backupInRecoverTicks",
"-",
"m",
".",
"master",
".",
"recoverSegmentPostingWriteRpcTicks",
")",
"/",
"m",
".",
"clockFrequency",
")",
",",
"total",
"=",
"replicaManagerTime",
")",
"masterStatsSection",
"=",
"report",
".",
"add",
"(",
"Section",
"(",
"'Recovery Master Stats'",
")",
")",
"def",
"masterStats_ticks",
"(",
"label",
",",
"field",
")",
":",
"masterStatsSection",
".",
"ms",
"(",
"label",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"eval",
"(",
"'m.'",
"+",
"field",
")",
"/",
"m",
".",
"clockFrequency",
")",
",",
"total",
"=",
"recoveryTime",
")",
"masterStatsSection",
".",
"line",
"(",
"'Final log sync amount'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"master",
".",
"logSyncBytes",
"/",
"2",
"**",
"20",
")",
")",
",",
"unit",
"=",
"'MB'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"masterStatsSection",
".",
"line",
"(",
"'Total replication amount'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"master",
".",
"replicationBytes",
"/",
"2",
"**",
"20",
")",
")",
",",
"unit",
"=",
"'MB'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"masterStatsSection",
".",
"line",
"(",
"'Total replication during replay'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"(",
"m",
".",
"master",
".",
"replicationBytes",
"-",
"m",
".",
"master",
".",
"logSyncBytes",
")",
"/",
"2",
"**",
"20",
")",
")",
",",
"unit",
"=",
"'MB'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"masterStats_ticks",
"(",
"'Opening sessions'",
",",
"'transport.sessionOpenTicks'",
")",
"masterStats_ticks",
"(",
"'Receiving in transport'",
",",
"'transport.receive.ticks'",
")",
"masterStats_ticks",
"(",
"'Transmitting in transport'",
",",
"'transport.transmit.ticks'",
")",
"masterStats_ticks",
"(",
"'Client RPCs Active'",
",",
"'transport.clientRpcsActiveTicks'",
")",
"masterStatsSection",
".",
"ms",
"(",
"'Average GRD completion time'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"master",
".",
"segmentReadTicks",
"/",
"m",
".",
"master",
".",
"segmentReadCount",
"/",
"m",
".",
"clockFrequency",
")",
")",
")",
"# There used to be a bunch of code here for analyzing the variance in",
"# session open times. We don't open sessions during recovery anymore, so",
"# I've deleted this code. Look in the git repo for mid-2011 if you want it",
"# back. -Diego",
"masterStatsSection",
".",
"line",
"(",
"'Log replication rate'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"master",
".",
"replicationBytes",
"/",
"m",
".",
"master",
".",
"replicas",
"/",
"2",
"**",
"20",
"/",
"(",
"m",
".",
"master",
".",
"replicationTicks",
"/",
"m",
".",
"clockFrequency",
")",
")",
")",
",",
"unit",
"=",
"'MB/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"masterStatsSection",
".",
"line",
"(",
"'Log replication rate during replay'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"(",
"m",
".",
"master",
".",
"replicationBytes",
"-",
"m",
".",
"master",
".",
"logSyncBytes",
")",
"/",
"m",
".",
"master",
".",
"replicas",
"/",
"2",
"**",
"20",
"/",
"(",
"(",
"m",
".",
"master",
".",
"replicationTicks",
"-",
"m",
".",
"master",
".",
"logSyncTicks",
")",
"/",
"m",
".",
"clockFrequency",
")",
")",
")",
",",
"unit",
"=",
"'MB/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"masterStatsSection",
".",
"line",
"(",
"'Log replication rate during log sync'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"master",
".",
"logSyncBytes",
"/",
"m",
".",
"master",
".",
"replicas",
"/",
"2",
"**",
"20",
"/",
"(",
"m",
".",
"master",
".",
"logSyncTicks",
"/",
"m",
".",
"clockFrequency",
")",
")",
")",
",",
"unit",
"=",
"'MB/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"masterStats_ticks",
"(",
"'Replication'",
",",
"'master.replicationTicks'",
")",
"masterStatsSection",
".",
"ms",
"(",
"'TX active'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"transport",
".",
"infiniband",
".",
"transmitActiveTicks",
"/",
"m",
".",
"clockFrequency",
")",
")",
",",
"total",
"=",
"recoveryTime",
")",
"replicationTime",
"=",
"sum",
"(",
"[",
"m",
".",
"master",
".",
"replicationTicks",
"/",
"m",
".",
"clockFrequency",
"for",
"m",
"in",
"masters",
"]",
")",
"/",
"float",
"(",
"len",
"(",
"masters",
")",
")",
"logSyncTime",
"=",
"sum",
"(",
"[",
"m",
".",
"master",
".",
"logSyncTicks",
"/",
"m",
".",
"clockFrequency",
"for",
"m",
"in",
"masters",
"]",
")",
"/",
"float",
"(",
"len",
"(",
"masters",
")",
")",
"replayTime",
"=",
"sum",
"(",
"[",
"(",
"m",
".",
"master",
".",
"replicationTicks",
"-",
"m",
".",
"master",
".",
"logSyncTicks",
")",
"/",
"m",
".",
"clockFrequency",
"for",
"m",
"in",
"masters",
"]",
")",
"/",
"float",
"(",
"len",
"(",
"masters",
")",
")",
"masterStatsSection",
".",
"ms",
"(",
"'TX active during replication'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"master",
".",
"replicationTransmitActiveTicks",
"/",
"m",
".",
"clockFrequency",
")",
")",
",",
"total",
"=",
"replicationTime",
")",
"masterStatsSection",
".",
"ms",
"(",
"'TX active during replay'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"(",
"m",
".",
"master",
".",
"replicationTransmitActiveTicks",
"-",
"m",
".",
"master",
".",
"logSyncTransmitActiveTicks",
")",
"/",
"m",
".",
"clockFrequency",
")",
")",
",",
"total",
"=",
"replayTime",
")",
"masterStatsSection",
".",
"ms",
"(",
"'TX active during log sync'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"master",
".",
"logSyncTransmitActiveTicks",
"/",
"m",
".",
"clockFrequency",
")",
")",
",",
"total",
"=",
"logSyncTime",
")",
"masterStatsSection",
".",
"line",
"(",
"'TX active rate during replication'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"m",
".",
"master",
".",
"replicationBytes",
"/",
"2",
"**",
"20",
"/",
"(",
"m",
".",
"master",
".",
"replicationTransmitActiveTicks",
"/",
"m",
".",
"clockFrequency",
")",
")",
",",
"unit",
"=",
"'MB/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"masterStatsSection",
".",
"line",
"(",
"'TX active rate during replay'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"master",
".",
"replicationBytes",
"-",
"m",
".",
"master",
".",
"logSyncBytes",
")",
"/",
"2",
"**",
"20",
"/",
"(",
"(",
"m",
".",
"master",
".",
"replicationTransmitActiveTicks",
"-",
"m",
".",
"master",
".",
"logSyncTransmitActiveTicks",
")",
"/",
"m",
".",
"clockFrequency",
")",
")",
",",
"unit",
"=",
"'MB/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"masterStatsSection",
".",
"line",
"(",
"'TX active rate during log sync'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"m",
".",
"master",
".",
"logSyncBytes",
"/",
"2",
"**",
"20",
"/",
"(",
"m",
".",
"master",
".",
"logSyncTransmitActiveTicks",
"/",
"m",
".",
"clockFrequency",
")",
")",
",",
"unit",
"=",
"'MB/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"masterStats_ticks",
"(",
"'Copying for TX during replication'",
",",
"'master.replicationTransmitCopyTicks'",
")",
"masterStatsSection",
".",
"ms",
"(",
"'Copying for TX during replay'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"master",
".",
"replicationTransmitCopyTicks",
"-",
"m",
".",
"master",
".",
"logSyncTransmitCopyTicks",
")",
"/",
"m",
".",
"clockFrequency",
")",
",",
"total",
"=",
"recoveryTime",
")",
"masterStats_ticks",
"(",
"'Copying for TX during log sync'",
",",
"'master.logSyncTransmitCopyTicks'",
")",
"masterStatsSection",
".",
"line",
"(",
"'Copying for tx during replication rate'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"master",
".",
"replicationBytes",
"/",
"m",
".",
"master",
".",
"replicas",
"/",
"2",
"**",
"20",
"/",
"(",
"m",
".",
"master",
".",
"replicationTransmitCopyTicks",
"/",
"m",
".",
"clockFrequency",
")",
")",
")",
",",
"unit",
"=",
"'MB/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"masterStatsSection",
".",
"line",
"(",
"'Copying for TX during replay rate'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"(",
"m",
".",
"master",
".",
"replicationBytes",
"-",
"m",
".",
"master",
".",
"logSyncBytes",
")",
"/",
"m",
".",
"master",
".",
"replicas",
"/",
"2",
"**",
"20",
"/",
"(",
"(",
"m",
".",
"master",
".",
"replicationTransmitCopyTicks",
"-",
"m",
".",
"master",
".",
"logSyncTransmitCopyTicks",
")",
"/",
"m",
".",
"clockFrequency",
")",
")",
")",
",",
"unit",
"=",
"'MB/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"masterStatsSection",
".",
"line",
"(",
"'Copying for TX during log sync rate'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"master",
".",
"logSyncBytes",
"/",
"m",
".",
"master",
".",
"replicas",
"/",
"2",
"**",
"20",
"/",
"(",
"m",
".",
"master",
".",
"logSyncTransmitCopyTicks",
"/",
"m",
".",
"clockFrequency",
")",
")",
")",
",",
"unit",
"=",
"'MB/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"masterStatsSection",
".",
"line",
"(",
"'Max active replication tasks'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"m",
".",
"master",
".",
"replicationTasks",
")",
",",
"unit",
"=",
"'tasks'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"masterStatsSection",
".",
"line",
"(",
"'Memory read bandwidth used during replay'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"master",
".",
"replayMemoryReadBytes",
"/",
"2",
"**",
"20",
"/",
"(",
"(",
"m",
".",
"master",
".",
"recoveryTicks",
"-",
"m",
".",
"master",
".",
"logSyncTicks",
")",
"/",
"m",
".",
"clockFrequency",
")",
")",
")",
",",
"unit",
"=",
"'MB/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"masterStatsSection",
".",
"line",
"(",
"'Memory write bandwidth used during replay'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"master",
".",
"replayMemoryWrittenBytes",
"/",
"2",
"**",
"20",
"/",
"(",
"(",
"m",
".",
"master",
".",
"recoveryTicks",
"-",
"m",
".",
"master",
".",
"logSyncTicks",
")",
"/",
"m",
".",
"clockFrequency",
")",
")",
")",
",",
"unit",
"=",
"'MB/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"backupSection",
"=",
"report",
".",
"add",
"(",
"Section",
"(",
"'Backup Time'",
")",
")",
"def",
"backup_ticks",
"(",
"label",
",",
"field",
")",
":",
"\"\"\"This is a shortcut for adding to the backupSection a recorded number\n of ticks that are a fraction of the total recovery.\n\n @type label: string\n @param label: the key for the line\n\n @type field: string\n @param field: the field within a backup's metrics that collected ticks\n \"\"\"",
"backupSection",
".",
"ms",
"(",
"label",
",",
"on_backups",
"(",
"lambda",
"b",
":",
"eval",
"(",
"'b.'",
"+",
"field",
")",
"/",
"b",
".",
"clockFrequency",
")",
",",
"total",
"=",
"recoveryTime",
")",
"backup_ticks",
"(",
"'RPC service time'",
",",
"'backup.serviceTicks'",
")",
"backup_ticks",
"(",
"'startReadingData RPC'",
",",
"'rpc.backup_startreadingdataTicks'",
")",
"backup_ticks",
"(",
"'write RPC'",
",",
"'rpc.backup_writeTicks'",
")",
"backup_ticks",
"(",
"'Write copy'",
",",
"'backup.writeCopyTicks'",
")",
"backupSection",
".",
"ms",
"(",
"'Other write RPC'",
",",
"on_backups",
"(",
"lambda",
"b",
":",
"(",
"b",
".",
"rpc",
".",
"backup_writeTicks",
"-",
"b",
".",
"backup",
".",
"writeCopyTicks",
")",
"/",
"b",
".",
"clockFrequency",
")",
",",
"total",
"=",
"recoveryTime",
")",
"backup_ticks",
"(",
"'getRecoveryData RPC'",
",",
"'rpc.backup_getrecoverydataTicks'",
")",
"backupSection",
".",
"ms",
"(",
"'Other'",
",",
"on_backups",
"(",
"lambda",
"b",
":",
"(",
"b",
".",
"backup",
".",
"serviceTicks",
"-",
"b",
".",
"rpc",
".",
"backup_startreadingdataTicks",
"-",
"b",
".",
"rpc",
".",
"backup_writeTicks",
"-",
"b",
".",
"rpc",
".",
"backup_getrecoverydataTicks",
")",
"/",
"b",
".",
"clockFrequency",
")",
",",
"total",
"=",
"recoveryTime",
")",
"backup_ticks",
"(",
"'Transmitting in transport'",
",",
"'transport.transmit.ticks'",
")",
"backup_ticks",
"(",
"'Filtering segments'",
",",
"'backup.filterTicks'",
")",
"backup_ticks",
"(",
"'Reading+filtering replicas'",
",",
"'backup.readingDataTicks'",
")",
"backup_ticks",
"(",
"'Reading replicas from disk'",
",",
"'backup.storageReadTicks'",
")",
"backupSection",
".",
"line",
"(",
"'getRecoveryData completions'",
",",
"on_backups",
"(",
"lambda",
"b",
":",
"b",
".",
"backup",
".",
"readCompletionCount",
")",
")",
"backupSection",
".",
"line",
"(",
"'getRecoveryData retry fraction'",
",",
"on_backups",
"(",
"lambda",
"b",
":",
"(",
"b",
".",
"rpc",
".",
"backup_getrecoverydataCount",
"-",
"b",
".",
"backup",
".",
"readCompletionCount",
")",
"/",
"b",
".",
"rpc",
".",
"backup_getrecoverydataCount",
")",
")",
"efficiencySection",
"=",
"report",
".",
"add",
"(",
"Section",
"(",
"'Efficiency'",
")",
")",
"efficiencySection",
".",
"line",
"(",
"'recoverSegment CPU'",
",",
"(",
"sum",
"(",
"[",
"m",
".",
"master",
".",
"recoverSegmentTicks",
"/",
"m",
".",
"clockFrequency",
"for",
"m",
"in",
"masters",
"]",
")",
"*",
"1000",
"/",
"sum",
"(",
"[",
"m",
".",
"master",
".",
"segmentReadCount",
"for",
"m",
"in",
"masters",
"]",
")",
")",
",",
"unit",
"=",
"'ms avg'",
")",
"efficiencySection",
".",
"line",
"(",
"'Writing a segment'",
",",
"(",
"sum",
"(",
"[",
"b",
".",
"rpc",
".",
"backup_writeTicks",
"/",
"b",
".",
"clockFrequency",
"for",
"b",
"in",
"backups",
"]",
")",
"*",
"1000",
"/",
"# Divide count by 2 since each segment does two writes:",
"# one to open the segment and one to write the data.",
"sum",
"(",
"[",
"b",
".",
"rpc",
".",
"backup_writeCount",
"/",
"2",
"for",
"b",
"in",
"backups",
"]",
")",
")",
",",
"unit",
"=",
"'ms avg'",
")",
"#efficiencySection.line('Filtering a segment',",
"# sum([b.backup.filterTicks / b.clockFrequency * 1000",
"# for b in backups]) /",
"# sum([b.backup.storageReadCount",
"# for b in backups]),",
"# unit='ms avg')",
"efficiencySection",
".",
"line",
"(",
"'Memory bandwidth (backup copies)'",
",",
"on_backups",
"(",
"lambda",
"b",
":",
"(",
"(",
"b",
".",
"backup",
".",
"writeCopyBytes",
"/",
"2",
"**",
"30",
")",
"/",
"(",
"b",
".",
"backup",
".",
"writeCopyTicks",
"/",
"b",
".",
"clockFrequency",
")",
")",
")",
",",
"unit",
"=",
"'GB/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
"]",
")",
"networkSection",
"=",
"report",
".",
"add",
"(",
"Section",
"(",
"'Network Utilization'",
")",
")",
"networkSection",
".",
"line",
"(",
"'Aggregate'",
",",
"(",
"sum",
"(",
"[",
"host",
".",
"transport",
".",
"transmit",
".",
"byteCount",
"for",
"host",
"in",
"[",
"coord",
"]",
"+",
"masters",
"+",
"backups",
"]",
")",
"*",
"8",
"/",
"2",
"**",
"30",
"/",
"recoveryTime",
")",
",",
"unit",
"=",
"'Gb/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"FRAC",
"(",
"data",
".",
"totalNodes",
"*",
"25",
")",
"]",
")",
"networkSection",
".",
"line",
"(",
"'Master in'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"transport",
".",
"receive",
".",
"byteCount",
"*",
"8",
"/",
"2",
"**",
"30",
")",
"/",
"recoveryTime",
")",
",",
"unit",
"=",
"'Gb/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"networkSection",
".",
"line",
"(",
"'Master out'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"transport",
".",
"transmit",
".",
"byteCount",
"*",
"8",
"/",
"2",
"**",
"30",
")",
"/",
"recoveryTime",
")",
",",
"unit",
"=",
"'Gb/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"networkSection",
".",
"line",
"(",
"'Master out during replication'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"master",
".",
"replicationBytes",
"*",
"8",
"/",
"2",
"**",
"30",
")",
"/",
"(",
"m",
".",
"master",
".",
"replicationTicks",
"/",
"m",
".",
"clockFrequency",
")",
")",
",",
"unit",
"=",
"'Gb/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"networkSection",
".",
"line",
"(",
"'Master out during log sync'",
",",
"on_masters",
"(",
"lambda",
"m",
":",
"(",
"m",
".",
"master",
".",
"logSyncBytes",
"*",
"8",
"/",
"2",
"**",
"30",
")",
"/",
"(",
"m",
".",
"master",
".",
"logSyncTicks",
"/",
"m",
".",
"clockFrequency",
")",
")",
",",
"unit",
"=",
"'Gb/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"networkSection",
".",
"line",
"(",
"'Backup in'",
",",
"on_backups",
"(",
"lambda",
"b",
":",
"(",
"b",
".",
"transport",
".",
"receive",
".",
"byteCount",
"*",
"8",
"/",
"2",
"**",
"30",
")",
"/",
"recoveryTime",
")",
",",
"unit",
"=",
"'Gb/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"networkSection",
".",
"line",
"(",
"'Backup out'",
",",
"on_backups",
"(",
"lambda",
"b",
":",
"(",
"b",
".",
"transport",
".",
"transmit",
".",
"byteCount",
"*",
"8",
"/",
"2",
"**",
"30",
")",
"/",
"recoveryTime",
")",
",",
"unit",
"=",
"'Gb/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"diskSection",
"=",
"report",
".",
"add",
"(",
"Section",
"(",
"'Disk Utilization'",
")",
")",
"diskSection",
".",
"line",
"(",
"'Effective bandwidth'",
",",
"on_backups",
"(",
"lambda",
"b",
":",
"(",
"b",
".",
"backup",
".",
"storageReadBytes",
"+",
"b",
".",
"backup",
".",
"storageWriteBytes",
")",
"/",
"2",
"**",
"20",
"/",
"recoveryTime",
")",
",",
"unit",
"=",
"'MB/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"def",
"active_bandwidth",
"(",
"b",
")",
":",
"totalBytes",
"=",
"b",
".",
"backup",
".",
"storageReadBytes",
"+",
"b",
".",
"backup",
".",
"storageWriteBytes",
"totalTicks",
"=",
"b",
".",
"backup",
".",
"storageReadTicks",
"+",
"b",
".",
"backup",
".",
"storageWriteTicks",
"return",
"(",
"(",
"totalBytes",
"/",
"2",
"**",
"20",
")",
"/",
"(",
"totalTicks",
"/",
"b",
".",
"clockFrequency",
")",
")",
"diskSection",
".",
"line",
"(",
"'Active bandwidth'",
",",
"on_backups",
"(",
"active_bandwidth",
")",
",",
"unit",
"=",
"'MB/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"diskSection",
".",
"line",
"(",
"'Active bandwidth reading'",
",",
"on_backups",
"(",
"lambda",
"b",
":",
"(",
"b",
".",
"backup",
".",
"storageReadBytes",
"/",
"2",
"**",
"20",
")",
"/",
"(",
"b",
".",
"backup",
".",
"storageReadTicks",
"/",
"b",
".",
"clockFrequency",
")",
")",
",",
"unit",
"=",
"'MB/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"diskSection",
".",
"line",
"(",
"'Active bandwidth writing'",
",",
"on_backups",
"(",
"lambda",
"b",
":",
"(",
"b",
".",
"backup",
".",
"storageWriteBytes",
"/",
"2",
"**",
"20",
")",
"/",
"(",
"b",
".",
"backup",
".",
"storageWriteTicks",
"/",
"b",
".",
"clockFrequency",
")",
")",
",",
"unit",
"=",
"'MB/s'",
",",
"summaryFns",
"=",
"[",
"AVG",
",",
"MIN",
",",
"SUM",
"]",
")",
"diskSection",
".",
"line",
"(",
"'Disk active time'",
",",
"on_backups",
"(",
"lambda",
"b",
":",
"100",
"*",
"(",
"b",
".",
"backup",
".",
"storageReadTicks",
"+",
"b",
".",
"backup",
".",
"storageWriteTicks",
")",
"/",
"b",
".",
"clockFrequency",
"/",
"recoveryTime",
")",
",",
"unit",
"=",
"'%'",
")",
"diskSection",
".",
"line",
"(",
"'Disk reading time'",
",",
"on_backups",
"(",
"lambda",
"b",
":",
"100",
"*",
"b",
".",
"backup",
".",
"storageReadTicks",
"/",
"b",
".",
"clockFrequency",
"/",
"recoveryTime",
")",
",",
"unit",
"=",
"'%'",
")",
"diskSection",
".",
"line",
"(",
"'Disk writing time'",
",",
"on_backups",
"(",
"lambda",
"b",
":",
"100",
"*",
"b",
".",
"backup",
".",
"storageWriteTicks",
"/",
"b",
".",
"clockFrequency",
"/",
"recoveryTime",
")",
",",
"unit",
"=",
"'%'",
")",
"backupSection",
"=",
"report",
".",
"add",
"(",
"Section",
"(",
"'Backup Events'",
")",
")",
"backupSection",
".",
"line",
"(",
"'Segments read'",
",",
"on_backups",
"(",
"lambda",
"b",
":",
"b",
".",
"backup",
".",
"storageReadCount",
")",
")",
"backupSection",
".",
"line",
"(",
"'Primary segments loaded'",
",",
"on_backups",
"(",
"lambda",
"b",
":",
"b",
".",
"backup",
".",
"primaryLoadCount",
")",
")",
"backupSection",
".",
"line",
"(",
"'Secondary segments loaded'",
",",
"on_backups",
"(",
"lambda",
"b",
":",
"b",
".",
"backup",
".",
"secondaryLoadCount",
")",
")",
"slowSection",
"=",
"report",
".",
"add",
"(",
"Section",
"(",
"'Slowest Servers'",
")",
")",
"slowest",
"=",
"maxTuple",
"(",
"[",
"[",
"1e03",
"*",
"(",
"master",
".",
"master",
".",
"replicaManagerTicks",
"-",
"master",
".",
"master",
".",
"logSyncTicks",
")",
"/",
"master",
".",
"clockFrequency",
",",
"master",
".",
"server",
"]",
"for",
"master",
"in",
"masters",
"]",
")",
"if",
"slowest",
":",
"slowSection",
".",
"line",
"(",
"'Backup opens, writes'",
",",
"slowest",
"[",
"0",
"]",
",",
"summaryFns",
"=",
"[",
"CUSTOM",
"(",
"slowest",
"[",
"1",
"]",
")",
",",
"CUSTOM",
"(",
"'{0:.1f} ms'",
".",
"format",
"(",
"slowest",
"[",
"0",
"]",
")",
")",
"]",
")",
"slowest",
"=",
"maxTuple",
"(",
"[",
"[",
"1e03",
"*",
"master",
".",
"master",
".",
"segmentReadStallTicks",
"/",
"master",
".",
"clockFrequency",
",",
"master",
".",
"server",
"]",
"for",
"master",
"in",
"masters",
"]",
")",
"if",
"slowest",
":",
"slowSection",
".",
"line",
"(",
"'Stalled reading segs from backups'",
",",
"slowest",
"[",
"0",
"]",
",",
"summaryFns",
"=",
"[",
"CUSTOM",
"(",
"slowest",
"[",
"1",
"]",
")",
",",
"CUSTOM",
"(",
"'{0:.1f} ms'",
".",
"format",
"(",
"slowest",
"[",
"0",
"]",
")",
")",
"]",
")",
"slowest",
"=",
"minTuple",
"(",
"[",
"[",
"(",
"backup",
".",
"backup",
".",
"storageReadBytes",
"/",
"2",
"**",
"20",
")",
"/",
"(",
"backup",
".",
"backup",
".",
"storageReadTicks",
"/",
"backup",
".",
"clockFrequency",
")",
",",
"backup",
".",
"server",
"]",
"for",
"backup",
"in",
"backups",
"if",
"(",
"backup",
".",
"backup",
".",
"storageReadTicks",
">",
"0",
")",
"]",
")",
"if",
"slowest",
":",
"slowSection",
".",
"line",
"(",
"'Reading from disk'",
",",
"slowest",
"[",
"0",
"]",
",",
"summaryFns",
"=",
"[",
"CUSTOM",
"(",
"slowest",
"[",
"1",
"]",
")",
",",
"CUSTOM",
"(",
"'{0:.1f} MB/s'",
".",
"format",
"(",
"slowest",
"[",
"0",
"]",
")",
")",
"]",
")",
"slowest",
"=",
"minTuple",
"(",
"[",
"[",
"(",
"backup",
".",
"backup",
".",
"storageWriteBytes",
"/",
"2",
"**",
"20",
")",
"/",
"(",
"backup",
".",
"backup",
".",
"storageWriteTicks",
"/",
"backup",
".",
"clockFrequency",
")",
",",
"backup",
".",
"server",
"]",
"for",
"backup",
"in",
"backups",
"if",
"backup",
".",
"backup",
".",
"storageWriteTicks",
"]",
")",
"if",
"slowest",
":",
"slowSection",
".",
"line",
"(",
"'Writing to disk'",
",",
"slowest",
"[",
"0",
"]",
",",
"summaryFns",
"=",
"[",
"CUSTOM",
"(",
"slowest",
"[",
"1",
"]",
")",
",",
"CUSTOM",
"(",
"'{0:.1f} MB/s'",
".",
"format",
"(",
"slowest",
"[",
"0",
"]",
")",
")",
"]",
")",
"tempSection",
"=",
"report",
".",
"add",
"(",
"Section",
"(",
"'Temporary Metrics'",
")",
")",
"for",
"i",
"in",
"range",
"(",
"10",
")",
":",
"field",
"=",
"'ticks{0:}'",
".",
"format",
"(",
"i",
")",
"points",
"=",
"[",
"(",
"host",
".",
"serverId",
",",
"host",
".",
"temp",
"[",
"field",
"]",
"/",
"host",
".",
"clockFrequency",
")",
"for",
"host",
"in",
"servers",
"]",
"if",
"any",
"(",
"values",
"(",
"points",
")",
")",
":",
"tempSection",
".",
"ms",
"(",
"'temp.%s'",
"%",
"field",
",",
"points",
",",
"total",
"=",
"recoveryTime",
")",
"for",
"i",
"in",
"range",
"(",
"10",
")",
":",
"field",
"=",
"'count{0:}'",
".",
"format",
"(",
"i",
")",
"points",
"=",
"[",
"(",
"host",
".",
"serverId",
",",
"host",
".",
"temp",
"[",
"field",
"]",
")",
"for",
"host",
"in",
"servers",
"]",
"if",
"any",
"(",
"values",
"(",
"points",
")",
")",
":",
"tempSection",
".",
"line",
"(",
"'temp.%s'",
"%",
"field",
",",
"points",
")",
"return",
"report"
] |
https://github.com/PlatformLab/RAMCloud/blob/b1866af19124325a6dfd8cbc267e2e3ef1f965d1/scripts/recoverymetrics.py#L406-L995
|
|
goldeneye-source/ges-code
|
2630cd8ef3d015af53c72ec2e19fc1f7e7fe8d9d
|
thirdparty/protobuf-2.3.0/python/google/protobuf/internal/containers.py
|
python
|
RepeatedCompositeFieldContainer.__eq__
|
(self, other)
|
return self._values == other._values
|
Compares the current instance with another one.
|
Compares the current instance with another one.
|
[
"Compares",
"the",
"current",
"instance",
"with",
"another",
"one",
"."
] |
def __eq__(self, other):
"""Compares the current instance with another one."""
if self is other:
return True
if not isinstance(other, self.__class__):
raise TypeError('Can only compare repeated composite fields against '
'other repeated composite fields.')
return self._values == other._values
|
[
"def",
"__eq__",
"(",
"self",
",",
"other",
")",
":",
"if",
"self",
"is",
"other",
":",
"return",
"True",
"if",
"not",
"isinstance",
"(",
"other",
",",
"self",
".",
"__class__",
")",
":",
"raise",
"TypeError",
"(",
"'Can only compare repeated composite fields against '",
"'other repeated composite fields.'",
")",
"return",
"self",
".",
"_values",
"==",
"other",
".",
"_values"
] |
https://github.com/goldeneye-source/ges-code/blob/2630cd8ef3d015af53c72ec2e19fc1f7e7fe8d9d/thirdparty/protobuf-2.3.0/python/google/protobuf/internal/containers.py#L237-L244
|
|
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/x86/toolchain/lib/python2.7/stringold.py
|
python
|
join
|
(words, sep = ' ')
|
return sep.join(words)
|
join(list [,sep]) -> string
Return a string composed of the words in list, with
intervening occurrences of sep. The default separator is a
single space.
(joinfields and join are synonymous)
|
join(list [,sep]) -> string
|
[
"join",
"(",
"list",
"[",
"sep",
"]",
")",
"-",
">",
"string"
] |
def join(words, sep = ' '):
"""join(list [,sep]) -> string
Return a string composed of the words in list, with
intervening occurrences of sep. The default separator is a
single space.
(joinfields and join are synonymous)
"""
return sep.join(words)
|
[
"def",
"join",
"(",
"words",
",",
"sep",
"=",
"' '",
")",
":",
"return",
"sep",
".",
"join",
"(",
"words",
")"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/stringold.py#L119-L129
|
|
trilinos/Trilinos
|
6168be6dd51e35e1cd681e9c4b24433e709df140
|
packages/seacas/scripts/exodus2.in.py
|
python
|
exodus.get_node_variable_values
|
(self, name, step)
|
return values
|
nvar_vals = exo.get_node_variable_values(nvar_name, time_step)
-> get list of nodal variable values for a nodal variable name
and time step
input value(s):
<string> nvar_name name of nodal variable
<int> time_step 1-based index of time step
return value(s):
if array_type == 'ctype':
<list<c_double>> nvar_vals
if array_type == 'numpy':
<np_array<double>> nvar_vals
|
nvar_vals = exo.get_node_variable_values(nvar_name, time_step)
|
[
"nvar_vals",
"=",
"exo",
".",
"get_node_variable_values",
"(",
"nvar_name",
"time_step",
")"
] |
def get_node_variable_values(self, name, step):
"""
nvar_vals = exo.get_node_variable_values(nvar_name, time_step)
-> get list of nodal variable values for a nodal variable name
and time step
input value(s):
<string> nvar_name name of nodal variable
<int> time_step 1-based index of time step
return value(s):
if array_type == 'ctype':
<list<c_double>> nvar_vals
if array_type == 'numpy':
<np_array<double>> nvar_vals
"""
names = self.get_node_variable_names()
var_id = names.index(name) + 1
ndType = ex_entity_type("EX_NODAL")
numVals = self.num_nodes()
values = self.__ex_get_var(step, ndType, var_id, 0, numVals)
if self.use_numpy:
values = ctype_to_numpy(self, values)
return values
|
[
"def",
"get_node_variable_values",
"(",
"self",
",",
"name",
",",
"step",
")",
":",
"names",
"=",
"self",
".",
"get_node_variable_names",
"(",
")",
"var_id",
"=",
"names",
".",
"index",
"(",
"name",
")",
"+",
"1",
"ndType",
"=",
"ex_entity_type",
"(",
"\"EX_NODAL\"",
")",
"numVals",
"=",
"self",
".",
"num_nodes",
"(",
")",
"values",
"=",
"self",
".",
"__ex_get_var",
"(",
"step",
",",
"ndType",
",",
"var_id",
",",
"0",
",",
"numVals",
")",
"if",
"self",
".",
"use_numpy",
":",
"values",
"=",
"ctype_to_numpy",
"(",
"self",
",",
"values",
")",
"return",
"values"
] |
https://github.com/trilinos/Trilinos/blob/6168be6dd51e35e1cd681e9c4b24433e709df140/packages/seacas/scripts/exodus2.in.py#L1003-L1029
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
wx/tools/Editra/src/extern/pkg_resources.py
|
python
|
safe_extra
|
(extra)
|
return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
|
Convert an arbitrary string to a standard 'extra' name
Any runs of non-alphanumeric characters are replaced with a single '_',
and the result is always lowercased.
|
Convert an arbitrary string to a standard 'extra' name
|
[
"Convert",
"an",
"arbitrary",
"string",
"to",
"a",
"standard",
"extra",
"name"
] |
def safe_extra(extra):
"""Convert an arbitrary string to a standard 'extra' name
Any runs of non-alphanumeric characters are replaced with a single '_',
and the result is always lowercased.
"""
return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
|
[
"def",
"safe_extra",
"(",
"extra",
")",
":",
"return",
"re",
".",
"sub",
"(",
"'[^A-Za-z0-9.]+'",
",",
"'_'",
",",
"extra",
")",
".",
"lower",
"(",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/extern/pkg_resources.py#L1009-L1015
|
|
ricardoquesada/Spidermonkey
|
4a75ea2543408bd1b2c515aa95901523eeef7858
|
python/mozbuild/mozbuild/preprocessor.py
|
python
|
Expression.__get_logical_or
|
(self)
|
return rv
|
Production: and_cond ( '||' expression ) ?
|
Production: and_cond ( '||' expression ) ?
|
[
"Production",
":",
"and_cond",
"(",
"||",
"expression",
")",
"?"
] |
def __get_logical_or(self):
"""
Production: and_cond ( '||' expression ) ?
"""
if not len(self.content):
return None
rv = Expression.__AST("logical_op")
# test
rv.append(self.__get_logical_and())
self.__ignore_whitespace()
if self.content[:2] != '||':
# no logical op needed, short cut to our prime element
return rv[0]
# append operator
rv.append(Expression.__ASTLeaf('op', self.content[:2]))
self.__strip(2)
self.__ignore_whitespace()
rv.append(self.__get_logical_or())
self.__ignore_whitespace()
return rv
|
[
"def",
"__get_logical_or",
"(",
"self",
")",
":",
"if",
"not",
"len",
"(",
"self",
".",
"content",
")",
":",
"return",
"None",
"rv",
"=",
"Expression",
".",
"__AST",
"(",
"\"logical_op\"",
")",
"# test",
"rv",
".",
"append",
"(",
"self",
".",
"__get_logical_and",
"(",
")",
")",
"self",
".",
"__ignore_whitespace",
"(",
")",
"if",
"self",
".",
"content",
"[",
":",
"2",
"]",
"!=",
"'||'",
":",
"# no logical op needed, short cut to our prime element",
"return",
"rv",
"[",
"0",
"]",
"# append operator",
"rv",
".",
"append",
"(",
"Expression",
".",
"__ASTLeaf",
"(",
"'op'",
",",
"self",
".",
"content",
"[",
":",
"2",
"]",
")",
")",
"self",
".",
"__strip",
"(",
"2",
")",
"self",
".",
"__ignore_whitespace",
"(",
")",
"rv",
".",
"append",
"(",
"self",
".",
"__get_logical_or",
"(",
")",
")",
"self",
".",
"__ignore_whitespace",
"(",
")",
"return",
"rv"
] |
https://github.com/ricardoquesada/Spidermonkey/blob/4a75ea2543408bd1b2c515aa95901523eeef7858/python/mozbuild/mozbuild/preprocessor.py#L62-L81
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/gtk/_windows.py
|
python
|
PrintDialog.GetPrintDC
|
(*args, **kwargs)
|
return _windows_.PrintDialog_GetPrintDC(*args, **kwargs)
|
GetPrintDC(self) -> DC
|
GetPrintDC(self) -> DC
|
[
"GetPrintDC",
"(",
"self",
")",
"-",
">",
"DC"
] |
def GetPrintDC(*args, **kwargs):
"""GetPrintDC(self) -> DC"""
return _windows_.PrintDialog_GetPrintDC(*args, **kwargs)
|
[
"def",
"GetPrintDC",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_windows_",
".",
"PrintDialog_GetPrintDC",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_windows.py#L5193-L5195
|
|
hfinkel/llvm-project-cxxjit
|
91084ef018240bbb8e24235ff5cd8c355a9c1a1e
|
lldb/utils/lui/lldbutil.py
|
python
|
get_function_names
|
(thread)
|
return map(GetFuncName, range(thread.GetNumFrames()))
|
Returns a sequence of function names from the stack frames of this thread.
|
Returns a sequence of function names from the stack frames of this thread.
|
[
"Returns",
"a",
"sequence",
"of",
"function",
"names",
"from",
"the",
"stack",
"frames",
"of",
"this",
"thread",
"."
] |
def get_function_names(thread):
"""
Returns a sequence of function names from the stack frames of this thread.
"""
def GetFuncName(i):
return thread.GetFrameAtIndex(i).GetFunctionName()
return map(GetFuncName, range(thread.GetNumFrames()))
|
[
"def",
"get_function_names",
"(",
"thread",
")",
":",
"def",
"GetFuncName",
"(",
"i",
")",
":",
"return",
"thread",
".",
"GetFrameAtIndex",
"(",
"i",
")",
".",
"GetFunctionName",
"(",
")",
"return",
"map",
"(",
"GetFuncName",
",",
"range",
"(",
"thread",
".",
"GetNumFrames",
"(",
")",
")",
")"
] |
https://github.com/hfinkel/llvm-project-cxxjit/blob/91084ef018240bbb8e24235ff5cd8c355a9c1a1e/lldb/utils/lui/lldbutil.py#L702-L709
|
|
freeorion/freeorion
|
c266a40eccd3a99a17de8fe57c36ef6ba3771665
|
default/python/universe_generation/starsystems.py
|
python
|
pick_star_type
|
(galaxy_age)
|
return star_type
|
Picks and returns a star type based on universe tables distribution modifiers.
|
Picks and returns a star type based on universe tables distribution modifiers.
|
[
"Picks",
"and",
"returns",
"a",
"star",
"type",
"based",
"on",
"universe",
"tables",
"distribution",
"modifiers",
"."
] |
def pick_star_type(galaxy_age):
"""
Picks and returns a star type based on universe tables distribution modifiers.
"""
# try to pick a star type by making a series of "rolls" (1-100)
# for each star type, and take the highest modified roll
star_type = fo.starType.unknown
try:
max_roll = 0
for candidate in star_types:
roll = (
random.randint(1, 100)
+ universe_tables.UNIVERSE_AGE_MOD_TO_STAR_TYPE_DIST[galaxy_age][candidate]
+ universe_tables.BASE_STAR_TYPE_DIST[candidate]
)
if max_roll < roll:
max_roll = roll
star_type = candidate
except: # noqa: E722
# in case of an error play save and set star type to invalid
star_type = fo.starType.unknown
util.report_error("Python pick_star_type: Pick star type failed\n" + sys.exc_info()[1])
# if we got an invalid star type (for whatever reason),
# just select one randomly from the global tuple
if star_type == fo.starType.unknown:
star_type = random.choice(star_types)
return star_type
|
[
"def",
"pick_star_type",
"(",
"galaxy_age",
")",
":",
"# try to pick a star type by making a series of \"rolls\" (1-100)",
"# for each star type, and take the highest modified roll",
"star_type",
"=",
"fo",
".",
"starType",
".",
"unknown",
"try",
":",
"max_roll",
"=",
"0",
"for",
"candidate",
"in",
"star_types",
":",
"roll",
"=",
"(",
"random",
".",
"randint",
"(",
"1",
",",
"100",
")",
"+",
"universe_tables",
".",
"UNIVERSE_AGE_MOD_TO_STAR_TYPE_DIST",
"[",
"galaxy_age",
"]",
"[",
"candidate",
"]",
"+",
"universe_tables",
".",
"BASE_STAR_TYPE_DIST",
"[",
"candidate",
"]",
")",
"if",
"max_roll",
"<",
"roll",
":",
"max_roll",
"=",
"roll",
"star_type",
"=",
"candidate",
"except",
":",
"# noqa: E722",
"# in case of an error play save and set star type to invalid",
"star_type",
"=",
"fo",
".",
"starType",
".",
"unknown",
"util",
".",
"report_error",
"(",
"\"Python pick_star_type: Pick star type failed\\n\"",
"+",
"sys",
".",
"exc_info",
"(",
")",
"[",
"1",
"]",
")",
"# if we got an invalid star type (for whatever reason),",
"# just select one randomly from the global tuple",
"if",
"star_type",
"==",
"fo",
".",
"starType",
".",
"unknown",
":",
"star_type",
"=",
"random",
".",
"choice",
"(",
"star_types",
")",
"return",
"star_type"
] |
https://github.com/freeorion/freeorion/blob/c266a40eccd3a99a17de8fe57c36ef6ba3771665/default/python/universe_generation/starsystems.py#L27-L55
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/tarfile.py
|
python
|
ExFileObject.readline
|
(self, size=-1)
|
return buf
|
Read one entire line from the file. If size is present
and non-negative, return a string with at most that
size, which may be an incomplete line.
|
Read one entire line from the file. If size is present
|
[
"Read",
"one",
"entire",
"line",
"from",
"the",
"file",
".",
"If",
"size",
"is",
"present"
] |
def readline(self, size=-1):
"""Read one entire line from the file. If size is present
and non-negative, return a string with at most that
size, which may be an incomplete line.
"""
if self.closed:
raise ValueError("I/O operation on closed file")
pos = self.buffer.find(b"\n") + 1
if pos == 0:
# no newline found.
while True:
buf = self.fileobj.read(self.blocksize)
self.buffer += buf
if not buf or b"\n" in buf:
pos = self.buffer.find(b"\n") + 1
if pos == 0:
# no newline found.
pos = len(self.buffer)
break
if size != -1:
pos = min(size, pos)
buf = self.buffer[:pos]
self.buffer = self.buffer[pos:]
self.position += len(buf)
return buf
|
[
"def",
"readline",
"(",
"self",
",",
"size",
"=",
"-",
"1",
")",
":",
"if",
"self",
".",
"closed",
":",
"raise",
"ValueError",
"(",
"\"I/O operation on closed file\"",
")",
"pos",
"=",
"self",
".",
"buffer",
".",
"find",
"(",
"b\"\\n\"",
")",
"+",
"1",
"if",
"pos",
"==",
"0",
":",
"# no newline found.",
"while",
"True",
":",
"buf",
"=",
"self",
".",
"fileobj",
".",
"read",
"(",
"self",
".",
"blocksize",
")",
"self",
".",
"buffer",
"+=",
"buf",
"if",
"not",
"buf",
"or",
"b\"\\n\"",
"in",
"buf",
":",
"pos",
"=",
"self",
".",
"buffer",
".",
"find",
"(",
"b\"\\n\"",
")",
"+",
"1",
"if",
"pos",
"==",
"0",
":",
"# no newline found.",
"pos",
"=",
"len",
"(",
"self",
".",
"buffer",
")",
"break",
"if",
"size",
"!=",
"-",
"1",
":",
"pos",
"=",
"min",
"(",
"size",
",",
"pos",
")",
"buf",
"=",
"self",
".",
"buffer",
"[",
":",
"pos",
"]",
"self",
".",
"buffer",
"=",
"self",
".",
"buffer",
"[",
"pos",
":",
"]",
"self",
".",
"position",
"+=",
"len",
"(",
"buf",
")",
"return",
"buf"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/tarfile.py#L1673-L1727
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/build/waf-1.7.13/waflib/Tools/ldc2.py
|
python
|
common_flags_ldc2
|
(conf)
|
Set the D flags required by *ldc2*
|
Set the D flags required by *ldc2*
|
[
"Set",
"the",
"D",
"flags",
"required",
"by",
"*",
"ldc2",
"*"
] |
def common_flags_ldc2(conf):
"""
Set the D flags required by *ldc2*
"""
v = conf.env
v['D_SRC_F'] = ['-c']
v['D_TGT_F'] = '-of%s'
v['D_LINKER'] = v['D']
v['DLNK_SRC_F'] = ''
v['DLNK_TGT_F'] = '-of%s'
v['DINC_ST'] = '-I%s'
v['DSHLIB_MARKER'] = v['DSTLIB_MARKER'] = ''
v['DSTLIB_ST'] = v['DSHLIB_ST'] = '-L-l%s'
v['DSTLIBPATH_ST'] = v['DLIBPATH_ST'] = '-L-L%s'
v['LINKFLAGS_dshlib'] = ['-L-shared']
v['DHEADER_ext'] = '.di'
v['DFLAGS_d_with_header'] = ['-H', '-Hf']
v['D_HDR_F'] = '%s'
v['LINKFLAGS'] = []
v['DFLAGS_dshlib'] = ['-relocation-model=pic']
|
[
"def",
"common_flags_ldc2",
"(",
"conf",
")",
":",
"v",
"=",
"conf",
".",
"env",
"v",
"[",
"'D_SRC_F'",
"]",
"=",
"[",
"'-c'",
"]",
"v",
"[",
"'D_TGT_F'",
"]",
"=",
"'-of%s'",
"v",
"[",
"'D_LINKER'",
"]",
"=",
"v",
"[",
"'D'",
"]",
"v",
"[",
"'DLNK_SRC_F'",
"]",
"=",
"''",
"v",
"[",
"'DLNK_TGT_F'",
"]",
"=",
"'-of%s'",
"v",
"[",
"'DINC_ST'",
"]",
"=",
"'-I%s'",
"v",
"[",
"'DSHLIB_MARKER'",
"]",
"=",
"v",
"[",
"'DSTLIB_MARKER'",
"]",
"=",
"''",
"v",
"[",
"'DSTLIB_ST'",
"]",
"=",
"v",
"[",
"'DSHLIB_ST'",
"]",
"=",
"'-L-l%s'",
"v",
"[",
"'DSTLIBPATH_ST'",
"]",
"=",
"v",
"[",
"'DLIBPATH_ST'",
"]",
"=",
"'-L-L%s'",
"v",
"[",
"'LINKFLAGS_dshlib'",
"]",
"=",
"[",
"'-L-shared'",
"]",
"v",
"[",
"'DHEADER_ext'",
"]",
"=",
"'.di'",
"v",
"[",
"'DFLAGS_d_with_header'",
"]",
"=",
"[",
"'-H'",
",",
"'-Hf'",
"]",
"v",
"[",
"'D_HDR_F'",
"]",
"=",
"'%s'",
"v",
"[",
"'LINKFLAGS'",
"]",
"=",
"[",
"]",
"v",
"[",
"'DFLAGS_dshlib'",
"]",
"=",
"[",
"'-relocation-model=pic'",
"]"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/build/waf-1.7.13/waflib/Tools/ldc2.py#L22-L48
|
||
synfig/synfig
|
a5ec91db5b751dc12e4400ccfb5c063fd6d2d928
|
synfig-studio/plugins/lottie-exporter/sources/image.py
|
python
|
add_image_asset
|
(lottie, layer)
|
return st
|
Generates the dictionary corresponding to sources/image.json
Returns: st required in calling function
Args:
lottie (dict) : Lottie layer
layer (commong.Layer.Layer) : Synfig layer
Returns:
(dict) : Stores address of parameters: "tl", "br", "filename"
|
Generates the dictionary corresponding to sources/image.json
Returns: st required in calling function
|
[
"Generates",
"the",
"dictionary",
"corresponding",
"to",
"sources",
"/",
"image",
".",
"json",
"Returns",
":",
"st",
"required",
"in",
"calling",
"function"
] |
def add_image_asset(lottie, layer):
"""
Generates the dictionary corresponding to sources/image.json
Returns: st required in calling function
Args:
lottie (dict) : Lottie layer
layer (commong.Layer.Layer) : Synfig layer
Returns:
(dict) : Stores address of parameters: "tl", "br", "filename"
"""
lottie["id"] = "image_" + str(settings.num_images.inc())
st = {} # Store the address of children
st["tl"] = layer.get_param("tl")
st["br"] = layer.get_param("br")
st["filename"] = layer.get_param("filename")
file_path = os.path.join(settings.file_name["fd"], st["filename"][0].text)
file_path = os.path.abspath(file_path)
width, height = get_image_size(file_path)
lottie["w"] = width
lottie["h"] = height
images_dir = os.path.join(settings.file_name["fd"], "images")
images_dir = os.path.abspath(images_dir)
if not os.path.isdir(images_dir): # Create images directory if not present
try:
os.mkdir(images_dir)
except OSError:
print("Creation of the directory %s failed" % images_dir)
# copy original image to images directory
src = file_path
head, tail = os.path.split(file_path)
new_image_path = os.path.join(images_dir, tail)
# using shutil to make a copy of the image
shutil.copy(src, new_image_path)
# copy meta-data of the file
shutil.copystat(src, new_image_path)
lottie["u"] = "images/"
lottie["p"] = tail
return st
|
[
"def",
"add_image_asset",
"(",
"lottie",
",",
"layer",
")",
":",
"lottie",
"[",
"\"id\"",
"]",
"=",
"\"image_\"",
"+",
"str",
"(",
"settings",
".",
"num_images",
".",
"inc",
"(",
")",
")",
"st",
"=",
"{",
"}",
"# Store the address of children",
"st",
"[",
"\"tl\"",
"]",
"=",
"layer",
".",
"get_param",
"(",
"\"tl\"",
")",
"st",
"[",
"\"br\"",
"]",
"=",
"layer",
".",
"get_param",
"(",
"\"br\"",
")",
"st",
"[",
"\"filename\"",
"]",
"=",
"layer",
".",
"get_param",
"(",
"\"filename\"",
")",
"file_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"settings",
".",
"file_name",
"[",
"\"fd\"",
"]",
",",
"st",
"[",
"\"filename\"",
"]",
"[",
"0",
"]",
".",
"text",
")",
"file_path",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"file_path",
")",
"width",
",",
"height",
"=",
"get_image_size",
"(",
"file_path",
")",
"lottie",
"[",
"\"w\"",
"]",
"=",
"width",
"lottie",
"[",
"\"h\"",
"]",
"=",
"height",
"images_dir",
"=",
"os",
".",
"path",
".",
"join",
"(",
"settings",
".",
"file_name",
"[",
"\"fd\"",
"]",
",",
"\"images\"",
")",
"images_dir",
"=",
"os",
".",
"path",
".",
"abspath",
"(",
"images_dir",
")",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"images_dir",
")",
":",
"# Create images directory if not present",
"try",
":",
"os",
".",
"mkdir",
"(",
"images_dir",
")",
"except",
"OSError",
":",
"print",
"(",
"\"Creation of the directory %s failed\"",
"%",
"images_dir",
")",
"# copy original image to images directory",
"src",
"=",
"file_path",
"head",
",",
"tail",
"=",
"os",
".",
"path",
".",
"split",
"(",
"file_path",
")",
"new_image_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"images_dir",
",",
"tail",
")",
"# using shutil to make a copy of the image",
"shutil",
".",
"copy",
"(",
"src",
",",
"new_image_path",
")",
"# copy meta-data of the file",
"shutil",
".",
"copystat",
"(",
"src",
",",
"new_image_path",
")",
"lottie",
"[",
"\"u\"",
"]",
"=",
"\"images/\"",
"lottie",
"[",
"\"p\"",
"]",
"=",
"tail",
"return",
"st"
] |
https://github.com/synfig/synfig/blob/a5ec91db5b751dc12e4400ccfb5c063fd6d2d928/synfig-studio/plugins/lottie-exporter/sources/image.py#L60-L107
|
|
Xilinx/Vitis-AI
|
fc74d404563d9951b57245443c73bef389f3657f
|
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/eager/monitoring.py
|
python
|
BoolGaugeCell.set
|
(self, value)
|
Atomically set the value.
Args:
value: bool value.
|
Atomically set the value.
|
[
"Atomically",
"set",
"the",
"value",
"."
] |
def set(self, value):
"""Atomically set the value.
Args:
value: bool value.
"""
pywrap_tensorflow.TFE_MonitoringBoolGaugeCellSet(self._cell, value)
|
[
"def",
"set",
"(",
"self",
",",
"value",
")",
":",
"pywrap_tensorflow",
".",
"TFE_MonitoringBoolGaugeCellSet",
"(",
"self",
".",
"_cell",
",",
"value",
")"
] |
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/eager/monitoring.py#L300-L306
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_cocoa/grid.py
|
python
|
Grid.DisableDragColMove
|
(*args, **kwargs)
|
return _grid.Grid_DisableDragColMove(*args, **kwargs)
|
DisableDragColMove(self)
|
DisableDragColMove(self)
|
[
"DisableDragColMove",
"(",
"self",
")"
] |
def DisableDragColMove(*args, **kwargs):
"""DisableDragColMove(self)"""
return _grid.Grid_DisableDragColMove(*args, **kwargs)
|
[
"def",
"DisableDragColMove",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_grid",
".",
"Grid_DisableDragColMove",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/grid.py#L1634-L1636
|
|
mhammond/pywin32
|
44afd86ba8485194df93234639243252deeb40d5
|
win32/scripts/regsetup.py
|
python
|
FindRegisterPythonExe
|
(exeAlias, searchPaths, actualFileNames=None)
|
return fname
|
Find and Register a Python exe (not necessarily *the* python.exe)
Assumes the core registry setup correctly.
|
Find and Register a Python exe (not necessarily *the* python.exe)
|
[
"Find",
"and",
"Register",
"a",
"Python",
"exe",
"(",
"not",
"necessarily",
"*",
"the",
"*",
"python",
".",
"exe",
")"
] |
def FindRegisterPythonExe(exeAlias, searchPaths, actualFileNames=None):
"""Find and Register a Python exe (not necessarily *the* python.exe)
Assumes the core registry setup correctly.
"""
import regutil, string
fname, ok = FindPythonExe(exeAlias, actualFileNames, searchPaths)
if not ok:
regutil.RegisterPythonExe(fname, exeAlias)
return fname
|
[
"def",
"FindRegisterPythonExe",
"(",
"exeAlias",
",",
"searchPaths",
",",
"actualFileNames",
"=",
"None",
")",
":",
"import",
"regutil",
",",
"string",
"fname",
",",
"ok",
"=",
"FindPythonExe",
"(",
"exeAlias",
",",
"actualFileNames",
",",
"searchPaths",
")",
"if",
"not",
"ok",
":",
"regutil",
".",
"RegisterPythonExe",
"(",
"fname",
",",
"exeAlias",
")",
"return",
"fname"
] |
https://github.com/mhammond/pywin32/blob/44afd86ba8485194df93234639243252deeb40d5/win32/scripts/regsetup.py#L327-L337
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/windows/Lib/distutils/ccompiler.py
|
python
|
get_default_compiler
|
(osname=None, platform=None)
|
return 'unix'
|
Determine the default compiler to use for the given platform.
osname should be one of the standard Python OS names (i.e. the
ones returned by os.name) and platform the common value
returned by sys.platform for the platform in question.
The default values are os.name and sys.platform in case the
parameters are not given.
|
Determine the default compiler to use for the given platform.
|
[
"Determine",
"the",
"default",
"compiler",
"to",
"use",
"for",
"the",
"given",
"platform",
"."
] |
def get_default_compiler(osname=None, platform=None):
"""Determine the default compiler to use for the given platform.
osname should be one of the standard Python OS names (i.e. the
ones returned by os.name) and platform the common value
returned by sys.platform for the platform in question.
The default values are os.name and sys.platform in case the
parameters are not given.
"""
if osname is None:
osname = os.name
if platform is None:
platform = sys.platform
for pattern, compiler in _default_compilers:
if re.match(pattern, platform) is not None or \
re.match(pattern, osname) is not None:
return compiler
# Default to Unix compiler
return 'unix'
|
[
"def",
"get_default_compiler",
"(",
"osname",
"=",
"None",
",",
"platform",
"=",
"None",
")",
":",
"if",
"osname",
"is",
"None",
":",
"osname",
"=",
"os",
".",
"name",
"if",
"platform",
"is",
"None",
":",
"platform",
"=",
"sys",
".",
"platform",
"for",
"pattern",
",",
"compiler",
"in",
"_default_compilers",
":",
"if",
"re",
".",
"match",
"(",
"pattern",
",",
"platform",
")",
"is",
"not",
"None",
"or",
"re",
".",
"match",
"(",
"pattern",
",",
"osname",
")",
"is",
"not",
"None",
":",
"return",
"compiler",
"# Default to Unix compiler",
"return",
"'unix'"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/distutils/ccompiler.py#L937-L956
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/gtk/_controls.py
|
python
|
PreGauge
|
(*args, **kwargs)
|
return val
|
PreGauge() -> Gauge
|
PreGauge() -> Gauge
|
[
"PreGauge",
"()",
"-",
">",
"Gauge"
] |
def PreGauge(*args, **kwargs):
"""PreGauge() -> Gauge"""
val = _controls_.new_PreGauge(*args, **kwargs)
return val
|
[
"def",
"PreGauge",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"val",
"=",
"_controls_",
".",
"new_PreGauge",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"val"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_controls.py#L812-L815
|
|
lyxok1/Tiny-DSOD
|
94d15450699bea0dd3720e75e2d273e476174fba
|
python/caffe/io.py
|
python
|
Transformer.set_transpose
|
(self, in_, order)
|
Set the input channel order for e.g. RGB to BGR conversion
as needed for the reference ImageNet model.
Parameters
----------
in_ : which input to assign this channel order
order : the order to transpose the dimensions
|
Set the input channel order for e.g. RGB to BGR conversion
as needed for the reference ImageNet model.
|
[
"Set",
"the",
"input",
"channel",
"order",
"for",
"e",
".",
"g",
".",
"RGB",
"to",
"BGR",
"conversion",
"as",
"needed",
"for",
"the",
"reference",
"ImageNet",
"model",
"."
] |
def set_transpose(self, in_, order):
"""
Set the input channel order for e.g. RGB to BGR conversion
as needed for the reference ImageNet model.
Parameters
----------
in_ : which input to assign this channel order
order : the order to transpose the dimensions
"""
self.__check_input(in_)
if len(order) != len(self.inputs[in_]) - 1:
raise Exception('Transpose order needs to have the same number of '
'dimensions as the input.')
self.transpose[in_] = order
|
[
"def",
"set_transpose",
"(",
"self",
",",
"in_",
",",
"order",
")",
":",
"self",
".",
"__check_input",
"(",
"in_",
")",
"if",
"len",
"(",
"order",
")",
"!=",
"len",
"(",
"self",
".",
"inputs",
"[",
"in_",
"]",
")",
"-",
"1",
":",
"raise",
"Exception",
"(",
"'Transpose order needs to have the same number of '",
"'dimensions as the input.'",
")",
"self",
".",
"transpose",
"[",
"in_",
"]",
"=",
"order"
] |
https://github.com/lyxok1/Tiny-DSOD/blob/94d15450699bea0dd3720e75e2d273e476174fba/python/caffe/io.py#L188-L202
|
||
Freenove/Freenove_Ultimate_Starter_Kit_for_Raspberry_Pi
|
fd7b266e1f294dce7196879f944f343edfe88e4b
|
Code/Python_Code/26.1.1_WebIO/WebIO.py
|
python
|
MyServer.do_HEAD
|
(self)
|
do_HEAD() can be tested use curl command
'curl -I http://server-ip-address:port'
|
do_HEAD() can be tested use curl command
'curl -I http://server-ip-address:port'
|
[
"do_HEAD",
"()",
"can",
"be",
"tested",
"use",
"curl",
"command",
"curl",
"-",
"I",
"http",
":",
"//",
"server",
"-",
"ip",
"-",
"address",
":",
"port"
] |
def do_HEAD(self):
""" do_HEAD() can be tested use curl command
'curl -I http://server-ip-address:port'
"""
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
|
[
"def",
"do_HEAD",
"(",
"self",
")",
":",
"self",
".",
"send_response",
"(",
"200",
")",
"self",
".",
"send_header",
"(",
"'Content-type'",
",",
"'text/html'",
")",
"self",
".",
"end_headers",
"(",
")"
] |
https://github.com/Freenove/Freenove_Ultimate_Starter_Kit_for_Raspberry_Pi/blob/fd7b266e1f294dce7196879f944f343edfe88e4b/Code/Python_Code/26.1.1_WebIO/WebIO.py#L12-L18
|
||
zhaoweicai/cascade-rcnn
|
2252f46158ea6555868ca6fa5c221ea71d9b5e6c
|
python/caffe/coord_map.py
|
python
|
crop
|
(top_from, top_to)
|
return L.Crop(top_from, top_to,
crop_param=dict(axis=ax + 1, # +1 for first cropping dim.
offset=list(-np.round(b).astype(int))))
|
Define a Crop layer to crop a top (from) to another top (to) by
determining the coordinate mapping between the two and net spec'ing
the axis and shift parameters of the crop.
|
Define a Crop layer to crop a top (from) to another top (to) by
determining the coordinate mapping between the two and net spec'ing
the axis and shift parameters of the crop.
|
[
"Define",
"a",
"Crop",
"layer",
"to",
"crop",
"a",
"top",
"(",
"from",
")",
"to",
"another",
"top",
"(",
"to",
")",
"by",
"determining",
"the",
"coordinate",
"mapping",
"between",
"the",
"two",
"and",
"net",
"spec",
"ing",
"the",
"axis",
"and",
"shift",
"parameters",
"of",
"the",
"crop",
"."
] |
def crop(top_from, top_to):
"""
Define a Crop layer to crop a top (from) to another top (to) by
determining the coordinate mapping between the two and net spec'ing
the axis and shift parameters of the crop.
"""
ax, a, b = coord_map_from_to(top_from, top_to)
assert (a == 1).all(), 'scale mismatch on crop (a = {})'.format(a)
assert (b <= 0).all(), 'cannot crop negative offset (b = {})'.format(b)
assert (np.round(b) == b).all(), 'cannot crop noninteger offset ' \
'(b = {})'.format(b)
return L.Crop(top_from, top_to,
crop_param=dict(axis=ax + 1, # +1 for first cropping dim.
offset=list(-np.round(b).astype(int))))
|
[
"def",
"crop",
"(",
"top_from",
",",
"top_to",
")",
":",
"ax",
",",
"a",
",",
"b",
"=",
"coord_map_from_to",
"(",
"top_from",
",",
"top_to",
")",
"assert",
"(",
"a",
"==",
"1",
")",
".",
"all",
"(",
")",
",",
"'scale mismatch on crop (a = {})'",
".",
"format",
"(",
"a",
")",
"assert",
"(",
"b",
"<=",
"0",
")",
".",
"all",
"(",
")",
",",
"'cannot crop negative offset (b = {})'",
".",
"format",
"(",
"b",
")",
"assert",
"(",
"np",
".",
"round",
"(",
"b",
")",
"==",
"b",
")",
".",
"all",
"(",
")",
",",
"'cannot crop noninteger offset '",
"'(b = {})'",
".",
"format",
"(",
"b",
")",
"return",
"L",
".",
"Crop",
"(",
"top_from",
",",
"top_to",
",",
"crop_param",
"=",
"dict",
"(",
"axis",
"=",
"ax",
"+",
"1",
",",
"# +1 for first cropping dim.",
"offset",
"=",
"list",
"(",
"-",
"np",
".",
"round",
"(",
"b",
")",
".",
"astype",
"(",
"int",
")",
")",
")",
")"
] |
https://github.com/zhaoweicai/cascade-rcnn/blob/2252f46158ea6555868ca6fa5c221ea71d9b5e6c/python/caffe/coord_map.py#L172-L185
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numpy/distutils/fcompiler/__init__.py
|
python
|
load_all_fcompiler_classes
|
()
|
Cache all the FCompiler classes found in modules in the
numpy.distutils.fcompiler package.
|
Cache all the FCompiler classes found in modules in the
numpy.distutils.fcompiler package.
|
[
"Cache",
"all",
"the",
"FCompiler",
"classes",
"found",
"in",
"modules",
"in",
"the",
"numpy",
".",
"distutils",
".",
"fcompiler",
"package",
"."
] |
def load_all_fcompiler_classes():
"""Cache all the FCompiler classes found in modules in the
numpy.distutils.fcompiler package.
"""
from glob import glob
global fcompiler_class, fcompiler_aliases
if fcompiler_class is not None:
return
pys = os.path.join(os.path.dirname(__file__), '*.py')
fcompiler_class = {}
fcompiler_aliases = {}
for fname in glob(pys):
module_name, ext = os.path.splitext(os.path.basename(fname))
module_name = 'numpy.distutils.fcompiler.' + module_name
__import__ (module_name)
module = sys.modules[module_name]
if hasattr(module, 'compilers'):
for cname in module.compilers:
klass = getattr(module, cname)
desc = (klass.compiler_type, klass, klass.description)
fcompiler_class[klass.compiler_type] = desc
for alias in klass.compiler_aliases:
if alias in fcompiler_aliases:
raise ValueError("alias %r defined for both %s and %s"
% (alias, klass.__name__,
fcompiler_aliases[alias][1].__name__))
fcompiler_aliases[alias] = desc
|
[
"def",
"load_all_fcompiler_classes",
"(",
")",
":",
"from",
"glob",
"import",
"glob",
"global",
"fcompiler_class",
",",
"fcompiler_aliases",
"if",
"fcompiler_class",
"is",
"not",
"None",
":",
"return",
"pys",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
",",
"'*.py'",
")",
"fcompiler_class",
"=",
"{",
"}",
"fcompiler_aliases",
"=",
"{",
"}",
"for",
"fname",
"in",
"glob",
"(",
"pys",
")",
":",
"module_name",
",",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"os",
".",
"path",
".",
"basename",
"(",
"fname",
")",
")",
"module_name",
"=",
"'numpy.distutils.fcompiler.'",
"+",
"module_name",
"__import__",
"(",
"module_name",
")",
"module",
"=",
"sys",
".",
"modules",
"[",
"module_name",
"]",
"if",
"hasattr",
"(",
"module",
",",
"'compilers'",
")",
":",
"for",
"cname",
"in",
"module",
".",
"compilers",
":",
"klass",
"=",
"getattr",
"(",
"module",
",",
"cname",
")",
"desc",
"=",
"(",
"klass",
".",
"compiler_type",
",",
"klass",
",",
"klass",
".",
"description",
")",
"fcompiler_class",
"[",
"klass",
".",
"compiler_type",
"]",
"=",
"desc",
"for",
"alias",
"in",
"klass",
".",
"compiler_aliases",
":",
"if",
"alias",
"in",
"fcompiler_aliases",
":",
"raise",
"ValueError",
"(",
"\"alias %r defined for both %s and %s\"",
"%",
"(",
"alias",
",",
"klass",
".",
"__name__",
",",
"fcompiler_aliases",
"[",
"alias",
"]",
"[",
"1",
"]",
".",
"__name__",
")",
")",
"fcompiler_aliases",
"[",
"alias",
"]",
"=",
"desc"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numpy/distutils/fcompiler/__init__.py#L767-L793
|
||
cmu-db/bustub
|
fe1b9e984bd2967997b52df872c873d80f71cf7d
|
build_support/cpplint.py
|
python
|
_CppLintState.SetQuiet
|
(self, quiet)
|
return last_quiet
|
Sets the module's quiet settings, and returns the previous setting.
|
Sets the module's quiet settings, and returns the previous setting.
|
[
"Sets",
"the",
"module",
"s",
"quiet",
"settings",
"and",
"returns",
"the",
"previous",
"setting",
"."
] |
def SetQuiet(self, quiet):
"""Sets the module's quiet settings, and returns the previous setting."""
last_quiet = self.quiet
self.quiet = quiet
return last_quiet
|
[
"def",
"SetQuiet",
"(",
"self",
",",
"quiet",
")",
":",
"last_quiet",
"=",
"self",
".",
"quiet",
"self",
".",
"quiet",
"=",
"quiet",
"return",
"last_quiet"
] |
https://github.com/cmu-db/bustub/blob/fe1b9e984bd2967997b52df872c873d80f71cf7d/build_support/cpplint.py#L1034-L1038
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/_pyio.py
|
python
|
TextIOWrapper._rewind_decoded_chars
|
(self, n)
|
Rewind the _decoded_chars buffer.
|
Rewind the _decoded_chars buffer.
|
[
"Rewind",
"the",
"_decoded_chars",
"buffer",
"."
] |
def _rewind_decoded_chars(self, n):
"""Rewind the _decoded_chars buffer."""
if self._decoded_chars_used < n:
raise AssertionError("rewind decoded_chars out of bounds")
self._decoded_chars_used -= n
|
[
"def",
"_rewind_decoded_chars",
"(",
"self",
",",
"n",
")",
":",
"if",
"self",
".",
"_decoded_chars_used",
"<",
"n",
":",
"raise",
"AssertionError",
"(",
"\"rewind decoded_chars out of bounds\"",
")",
"self",
".",
"_decoded_chars_used",
"-=",
"n"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/_pyio.py#L2205-L2209
|
||
KratosMultiphysics/Kratos
|
0000833054ed0503424eb28205d6508d9ca6cbbc
|
applications/PfemFluidDynamicsApplication/python_scripts/pfem_fluid_dynamics_analysis.py
|
python
|
PfemFluidDynamicsAnalysis.GraphicalOutputPrintOutput
|
(self)
|
This function prints the output for this time step
|
This function prints the output for this time step
|
[
"This",
"function",
"prints",
"the",
"output",
"for",
"this",
"time",
"step"
] |
def GraphicalOutputPrintOutput(self):
"""This function prints the output for this time step
"""
if( self.project_parameters.Has("output_configuration") ):
self.post_process_model_part.ProcessInfo[KratosMultiphysics.TIME] = self.main_model_part.ProcessInfo[KratosMultiphysics.TIME]
if(self.graphical_output.IsOutputStep()):
time=self.main_model_part.ProcessInfo[KratosMultiphysics.TIME]
delta_time=self.main_model_part.ProcessInfo[KratosMultiphysics.DELTA_TIME]
step=self.main_model_part.ProcessInfo[KratosMultiphysics.STEP]
KratosMultiphysics.PfemFluidDynamicsApplication.PostProcessUtilities().RebuildPostProcessModelPart(self.post_process_model_part, self.main_model_part)
self.KratosPrintInfo("")
self.KratosPrintInfo("**********************************************************")
self.KratosPrintInfo("---> Print Output at [STEP:" + str(step) + " TIME:" + str(time) + " DT:" + str(delta_time) + "]")
self.KratosPrintInfo("**********************************************************")
self.KratosPrintInfo("")
self.graphical_output.PrintOutput()
|
[
"def",
"GraphicalOutputPrintOutput",
"(",
"self",
")",
":",
"if",
"(",
"self",
".",
"project_parameters",
".",
"Has",
"(",
"\"output_configuration\"",
")",
")",
":",
"self",
".",
"post_process_model_part",
".",
"ProcessInfo",
"[",
"KratosMultiphysics",
".",
"TIME",
"]",
"=",
"self",
".",
"main_model_part",
".",
"ProcessInfo",
"[",
"KratosMultiphysics",
".",
"TIME",
"]",
"if",
"(",
"self",
".",
"graphical_output",
".",
"IsOutputStep",
"(",
")",
")",
":",
"time",
"=",
"self",
".",
"main_model_part",
".",
"ProcessInfo",
"[",
"KratosMultiphysics",
".",
"TIME",
"]",
"delta_time",
"=",
"self",
".",
"main_model_part",
".",
"ProcessInfo",
"[",
"KratosMultiphysics",
".",
"DELTA_TIME",
"]",
"step",
"=",
"self",
".",
"main_model_part",
".",
"ProcessInfo",
"[",
"KratosMultiphysics",
".",
"STEP",
"]",
"KratosMultiphysics",
".",
"PfemFluidDynamicsApplication",
".",
"PostProcessUtilities",
"(",
")",
".",
"RebuildPostProcessModelPart",
"(",
"self",
".",
"post_process_model_part",
",",
"self",
".",
"main_model_part",
")",
"self",
".",
"KratosPrintInfo",
"(",
"\"\"",
")",
"self",
".",
"KratosPrintInfo",
"(",
"\"**********************************************************\"",
")",
"self",
".",
"KratosPrintInfo",
"(",
"\"---> Print Output at [STEP:\"",
"+",
"str",
"(",
"step",
")",
"+",
"\" TIME:\"",
"+",
"str",
"(",
"time",
")",
"+",
"\" DT:\"",
"+",
"str",
"(",
"delta_time",
")",
"+",
"\"]\"",
")",
"self",
".",
"KratosPrintInfo",
"(",
"\"**********************************************************\"",
")",
"self",
".",
"KratosPrintInfo",
"(",
"\"\"",
")",
"self",
".",
"graphical_output",
".",
"PrintOutput",
"(",
")"
] |
https://github.com/KratosMultiphysics/Kratos/blob/0000833054ed0503424eb28205d6508d9ca6cbbc/applications/PfemFluidDynamicsApplication/python_scripts/pfem_fluid_dynamics_analysis.py#L281-L296
|
||
tensorflow/tensorflow
|
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
|
tensorflow/python/eager/context.py
|
python
|
Context.set_optimizer_experimental_options
|
(self, options)
|
Set experimental options for the optimizer.
Args:
options: Dictionary of options to modify
|
Set experimental options for the optimizer.
|
[
"Set",
"experimental",
"options",
"for",
"the",
"optimizer",
"."
] |
def set_optimizer_experimental_options(self, options):
"""Set experimental options for the optimizer.
Args:
options: Dictionary of options to modify
"""
self._optimizer_experimental_options.update(options)
self._thread_local_data.function_call_options = None
|
[
"def",
"set_optimizer_experimental_options",
"(",
"self",
",",
"options",
")",
":",
"self",
".",
"_optimizer_experimental_options",
".",
"update",
"(",
"options",
")",
"self",
".",
"_thread_local_data",
".",
"function_call_options",
"=",
"None"
] |
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/eager/context.py#L1775-L1783
|
||
Polidea/SiriusObfuscator
|
b0e590d8130e97856afe578869b83a209e2b19be
|
SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py
|
python
|
SBDebugger.GetSelectedPlatform
|
(self)
|
return _lldb.SBDebugger_GetSelectedPlatform(self)
|
GetSelectedPlatform(self) -> SBPlatform
|
GetSelectedPlatform(self) -> SBPlatform
|
[
"GetSelectedPlatform",
"(",
"self",
")",
"-",
">",
"SBPlatform"
] |
def GetSelectedPlatform(self):
"""GetSelectedPlatform(self) -> SBPlatform"""
return _lldb.SBDebugger_GetSelectedPlatform(self)
|
[
"def",
"GetSelectedPlatform",
"(",
"self",
")",
":",
"return",
"_lldb",
".",
"SBDebugger_GetSelectedPlatform",
"(",
"self",
")"
] |
https://github.com/Polidea/SiriusObfuscator/blob/b0e590d8130e97856afe578869b83a209e2b19be/SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py#L3322-L3324
|
|
yujinrobot/yujin_ocs
|
17337e5a2d0a0f3711c55e272e656eb59174d657
|
yocs_joyop/scripts/magic_button_relay.py
|
python
|
MagicButtonRelay.joy_callback
|
(self, msg)
|
Processes the joy topic.
|
Processes the joy topic.
|
[
"Processes",
"the",
"joy",
"topic",
"."
] |
def joy_callback(self, msg):
"""
Processes the joy topic.
"""
timestamp = msg.header.stamp
for relay in self.relays:
relay.update(msg.buttons[relay.button_id], msg.header.stamp)
|
[
"def",
"joy_callback",
"(",
"self",
",",
"msg",
")",
":",
"timestamp",
"=",
"msg",
".",
"header",
".",
"stamp",
"for",
"relay",
"in",
"self",
".",
"relays",
":",
"relay",
".",
"update",
"(",
"msg",
".",
"buttons",
"[",
"relay",
".",
"button_id",
"]",
",",
"msg",
".",
"header",
".",
"stamp",
")"
] |
https://github.com/yujinrobot/yujin_ocs/blob/17337e5a2d0a0f3711c55e272e656eb59174d657/yocs_joyop/scripts/magic_button_relay.py#L90-L96
|
||
LLNL/blt
|
4eafa66ddb99ee5a4a0f75f3d7d790679add6e01
|
thirdparty_builtin/benchmark-1.5.0/mingw.py
|
python
|
unpack
|
(archive, location, log = EmptyLogger())
|
Unpacks a mingw-builds archive
|
Unpacks a mingw-builds archive
|
[
"Unpacks",
"a",
"mingw",
"-",
"builds",
"archive"
] |
def unpack(archive, location, log = EmptyLogger()):
'''
Unpacks a mingw-builds archive
'''
sevenzip = find_7zip(log)
log.info('unpacking %s', os.path.basename(archive))
cmd = [sevenzip, 'x', archive, '-o' + location, '-y']
log.debug(' - %r', cmd)
with open(os.devnull, 'w') as devnull:
subprocess.check_call(cmd, stdout = devnull)
|
[
"def",
"unpack",
"(",
"archive",
",",
"location",
",",
"log",
"=",
"EmptyLogger",
"(",
")",
")",
":",
"sevenzip",
"=",
"find_7zip",
"(",
"log",
")",
"log",
".",
"info",
"(",
"'unpacking %s'",
",",
"os",
".",
"path",
".",
"basename",
"(",
"archive",
")",
")",
"cmd",
"=",
"[",
"sevenzip",
",",
"'x'",
",",
"archive",
",",
"'-o'",
"+",
"location",
",",
"'-y'",
"]",
"log",
".",
"debug",
"(",
"' - %r'",
",",
"cmd",
")",
"with",
"open",
"(",
"os",
".",
"devnull",
",",
"'w'",
")",
"as",
"devnull",
":",
"subprocess",
".",
"check_call",
"(",
"cmd",
",",
"stdout",
"=",
"devnull",
")"
] |
https://github.com/LLNL/blt/blob/4eafa66ddb99ee5a4a0f75f3d7d790679add6e01/thirdparty_builtin/benchmark-1.5.0/mingw.py#L114-L123
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/joblib/joblib/parallel.py
|
python
|
Parallel._dispatch
|
(self, batch)
|
Queue the batch for computing, with or without multiprocessing
WARNING: this method is not thread-safe: it should be only called
indirectly via dispatch_one_batch.
|
Queue the batch for computing, with or without multiprocessing
|
[
"Queue",
"the",
"batch",
"for",
"computing",
"with",
"or",
"without",
"multiprocessing"
] |
def _dispatch(self, batch):
"""Queue the batch for computing, with or without multiprocessing
WARNING: this method is not thread-safe: it should be only called
indirectly via dispatch_one_batch.
"""
# If job.get() catches an exception, it closes the queue:
if self._aborting:
return
self.n_dispatched_tasks += len(batch)
self.n_dispatched_batches += 1
dispatch_timestamp = time.time()
cb = BatchCompletionCallBack(dispatch_timestamp, len(batch), self)
with self._lock:
job_idx = len(self._jobs)
job = self._backend.apply_async(batch, callback=cb)
# A job can complete so quickly than its callback is
# called before we get here, causing self._jobs to
# grow. To ensure correct results ordering, .insert is
# used (rather than .append) in the following line
self._jobs.insert(job_idx, job)
|
[
"def",
"_dispatch",
"(",
"self",
",",
"batch",
")",
":",
"# If job.get() catches an exception, it closes the queue:",
"if",
"self",
".",
"_aborting",
":",
"return",
"self",
".",
"n_dispatched_tasks",
"+=",
"len",
"(",
"batch",
")",
"self",
".",
"n_dispatched_batches",
"+=",
"1",
"dispatch_timestamp",
"=",
"time",
".",
"time",
"(",
")",
"cb",
"=",
"BatchCompletionCallBack",
"(",
"dispatch_timestamp",
",",
"len",
"(",
"batch",
")",
",",
"self",
")",
"with",
"self",
".",
"_lock",
":",
"job_idx",
"=",
"len",
"(",
"self",
".",
"_jobs",
")",
"job",
"=",
"self",
".",
"_backend",
".",
"apply_async",
"(",
"batch",
",",
"callback",
"=",
"cb",
")",
"# A job can complete so quickly than its callback is",
"# called before we get here, causing self._jobs to",
"# grow. To ensure correct results ordering, .insert is",
"# used (rather than .append) in the following line",
"self",
".",
"_jobs",
".",
"insert",
"(",
"job_idx",
",",
"job",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/joblib/joblib/parallel.py#L761-L784
|
||
Xilinx/Vitis-AI
|
fc74d404563d9951b57245443c73bef389f3657f
|
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/model_pruning/python/pruning.py
|
python
|
get_weight_sparsity
|
()
|
return [nn_impl.zero_fraction(mask) for mask in masks]
|
Get sparsity of the weights.
Args:
None
Returns:
A list containing the sparsity of each of the weight tensors
|
Get sparsity of the weights.
|
[
"Get",
"sparsity",
"of",
"the",
"weights",
"."
] |
def get_weight_sparsity():
"""Get sparsity of the weights.
Args:
None
Returns:
A list containing the sparsity of each of the weight tensors
"""
masks = get_masks()
return [nn_impl.zero_fraction(mask) for mask in masks]
|
[
"def",
"get_weight_sparsity",
"(",
")",
":",
"masks",
"=",
"get_masks",
"(",
")",
"return",
"[",
"nn_impl",
".",
"zero_fraction",
"(",
"mask",
")",
"for",
"mask",
"in",
"masks",
"]"
] |
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/model_pruning/python/pruning.py#L133-L143
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
wx/tools/Editra/src/style_editor.py
|
python
|
StyleEditor.OnClose
|
(self, evt)
|
Handles the window closer event
@param evt: event that called this handler
|
Handles the window closer event
@param evt: event that called this handler
|
[
"Handles",
"the",
"window",
"closer",
"event",
"@param",
"evt",
":",
"event",
"that",
"called",
"this",
"handler"
] |
def OnClose(self, evt):
"""Handles the window closer event
@param evt: event that called this handler
"""
self.LOG("[style_editor][evt] Dialog closing...")
self.OnOk(evt)
|
[
"def",
"OnClose",
"(",
"self",
",",
"evt",
")",
":",
"self",
".",
"LOG",
"(",
"\"[style_editor][evt] Dialog closing...\"",
")",
"self",
".",
"OnOk",
"(",
"evt",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/style_editor.py#L122-L128
|
||
osrf/gazebo
|
f570338107862253229a0514ffea10deab4f4517
|
tools/cpplint.py
|
python
|
Error
|
(filename, linenum, category, confidence, message)
|
Logs the fact we've found a lint error.
We log where the error was found, and also our confidence in the error,
that is, how certain we are this is a legitimate style regression, and
not a misidentification or a use that's sometimes justified.
False positives can be suppressed by the use of
"cpplint(category)" comments on the offending line. These are
parsed into _error_suppressions.
Args:
filename: The name of the file containing the error.
linenum: The number of the line containing the error.
category: A string used to describe the "category" this bug
falls under: "whitespace", say, or "runtime". Categories
may have a hierarchy separated by slashes: "whitespace/indent".
confidence: A number from 1-5 representing a confidence score for
the error, with 5 meaning that we are certain of the problem,
and 1 meaning that it could be a legitimate construct.
message: The error message.
|
Logs the fact we've found a lint error.
|
[
"Logs",
"the",
"fact",
"we",
"ve",
"found",
"a",
"lint",
"error",
"."
] |
def Error(filename, linenum, category, confidence, message):
"""Logs the fact we've found a lint error.
We log where the error was found, and also our confidence in the error,
that is, how certain we are this is a legitimate style regression, and
not a misidentification or a use that's sometimes justified.
False positives can be suppressed by the use of
"cpplint(category)" comments on the offending line. These are
parsed into _error_suppressions.
Args:
filename: The name of the file containing the error.
linenum: The number of the line containing the error.
category: A string used to describe the "category" this bug
falls under: "whitespace", say, or "runtime". Categories
may have a hierarchy separated by slashes: "whitespace/indent".
confidence: A number from 1-5 representing a confidence score for
the error, with 5 meaning that we are certain of the problem,
and 1 meaning that it could be a legitimate construct.
message: The error message.
"""
if _ShouldPrintError(category, confidence, linenum):
_cpplint_state.IncrementErrorCount(category)
if _cpplint_state.output_format == 'vs7':
sys.stderr.write('%s(%s): %s [%s] [%d]\n' % (
filename, linenum, message, category, confidence))
else:
sys.stderr.write('%s:%s: %s [%s] [%d]\n' % (
filename, linenum, message, category, confidence))
|
[
"def",
"Error",
"(",
"filename",
",",
"linenum",
",",
"category",
",",
"confidence",
",",
"message",
")",
":",
"if",
"_ShouldPrintError",
"(",
"category",
",",
"confidence",
",",
"linenum",
")",
":",
"_cpplint_state",
".",
"IncrementErrorCount",
"(",
"category",
")",
"if",
"_cpplint_state",
".",
"output_format",
"==",
"'vs7'",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'%s(%s): %s [%s] [%d]\\n'",
"%",
"(",
"filename",
",",
"linenum",
",",
"message",
",",
"category",
",",
"confidence",
")",
")",
"else",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'%s:%s: %s [%s] [%d]\\n'",
"%",
"(",
"filename",
",",
"linenum",
",",
"message",
",",
"category",
",",
"confidence",
")",
")"
] |
https://github.com/osrf/gazebo/blob/f570338107862253229a0514ffea10deab4f4517/tools/cpplint.py#L789-L818
|
||
Caffe-MPI/Caffe-MPI.github.io
|
df5992af571a2a19981b69635115c393f18d1c76
|
python/caffe/draw.py
|
python
|
choose_color_by_layertype
|
(layertype)
|
return color
|
Define colors for nodes based on the layer type.
|
Define colors for nodes based on the layer type.
|
[
"Define",
"colors",
"for",
"nodes",
"based",
"on",
"the",
"layer",
"type",
"."
] |
def choose_color_by_layertype(layertype):
"""Define colors for nodes based on the layer type.
"""
color = '#6495ED' # Default
if layertype == 'Convolution' or layertype == 'Deconvolution':
color = '#FF5050'
elif layertype == 'Pooling':
color = '#FF9900'
elif layertype == 'InnerProduct':
color = '#CC33FF'
return color
|
[
"def",
"choose_color_by_layertype",
"(",
"layertype",
")",
":",
"color",
"=",
"'#6495ED'",
"# Default",
"if",
"layertype",
"==",
"'Convolution'",
"or",
"layertype",
"==",
"'Deconvolution'",
":",
"color",
"=",
"'#FF5050'",
"elif",
"layertype",
"==",
"'Pooling'",
":",
"color",
"=",
"'#FF9900'",
"elif",
"layertype",
"==",
"'InnerProduct'",
":",
"color",
"=",
"'#CC33FF'",
"return",
"color"
] |
https://github.com/Caffe-MPI/Caffe-MPI.github.io/blob/df5992af571a2a19981b69635115c393f18d1c76/python/caffe/draw.py#L117-L127
|
|
nasa/fprime
|
595cf3682d8365943d86c1a6fe7c78f0a116acf0
|
Autocoders/Python/src/fprime_ac/models/Port.py
|
python
|
Port.set_return
|
(self, t, m)
|
Set the optional return type if one is specified here.
|
Set the optional return type if one is specified here.
|
[
"Set",
"the",
"optional",
"return",
"type",
"if",
"one",
"is",
"specified",
"here",
"."
] |
def set_return(self, t, m):
"""
Set the optional return type if one is specified here.
"""
self.__return_type = t
self.__return_modifier = m
|
[
"def",
"set_return",
"(",
"self",
",",
"t",
",",
"m",
")",
":",
"self",
".",
"__return_type",
"=",
"t",
"self",
".",
"__return_modifier",
"=",
"m"
] |
https://github.com/nasa/fprime/blob/595cf3682d8365943d86c1a6fe7c78f0a116acf0/Autocoders/Python/src/fprime_ac/models/Port.py#L113-L118
|
||
bulletphysics/bullet3
|
f0f2a952e146f016096db6f85cf0c44ed75b0b9a
|
examples/pybullet/gym/pybullet_envs/minitaur/agents/ppo/normalize.py
|
python
|
StreamingNormalize._std
|
(self)
|
return tf.sqrt(variance + 1e-4)
|
Computes the current estimate of the standard deviation.
Note that the standard deviation is not defined until at least two samples
were seen.
Returns:
Tensor of current variance.
|
Computes the current estimate of the standard deviation.
|
[
"Computes",
"the",
"current",
"estimate",
"of",
"the",
"standard",
"deviation",
"."
] |
def _std(self):
"""Computes the current estimate of the standard deviation.
Note that the standard deviation is not defined until at least two samples
were seen.
Returns:
Tensor of current variance.
"""
variance = tf.cond(self._count > 1, lambda: self._var_sum / tf.cast(
self._count - 1, tf.float32), lambda: tf.ones_like(self._var_sum) * float('nan'))
# The epsilon corrects for small negative variance values caused by
# the algorithm. It was empirically chosen to work with all environments
# tested.
return tf.sqrt(variance + 1e-4)
|
[
"def",
"_std",
"(",
"self",
")",
":",
"variance",
"=",
"tf",
".",
"cond",
"(",
"self",
".",
"_count",
">",
"1",
",",
"lambda",
":",
"self",
".",
"_var_sum",
"/",
"tf",
".",
"cast",
"(",
"self",
".",
"_count",
"-",
"1",
",",
"tf",
".",
"float32",
")",
",",
"lambda",
":",
"tf",
".",
"ones_like",
"(",
"self",
".",
"_var_sum",
")",
"*",
"float",
"(",
"'nan'",
")",
")",
"# The epsilon corrects for small negative variance values caused by",
"# the algorithm. It was empirically chosen to work with all environments",
"# tested.",
"return",
"tf",
".",
"sqrt",
"(",
"variance",
"+",
"1e-4",
")"
] |
https://github.com/bulletphysics/bullet3/blob/f0f2a952e146f016096db6f85cf0c44ed75b0b9a/examples/pybullet/gym/pybullet_envs/minitaur/agents/ppo/normalize.py#L129-L143
|
|
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
third_party/protobuf/python/google/protobuf/internal/python_message.py
|
python
|
_AddPropertiesForField
|
(field, cls)
|
Adds a public property for a protocol message field.
Clients can use this property to get and (in the case
of non-repeated scalar fields) directly set the value
of a protocol message field.
Args:
field: A FieldDescriptor for this field.
cls: The class we're constructing.
|
Adds a public property for a protocol message field.
Clients can use this property to get and (in the case
of non-repeated scalar fields) directly set the value
of a protocol message field.
|
[
"Adds",
"a",
"public",
"property",
"for",
"a",
"protocol",
"message",
"field",
".",
"Clients",
"can",
"use",
"this",
"property",
"to",
"get",
"and",
"(",
"in",
"the",
"case",
"of",
"non",
"-",
"repeated",
"scalar",
"fields",
")",
"directly",
"set",
"the",
"value",
"of",
"a",
"protocol",
"message",
"field",
"."
] |
def _AddPropertiesForField(field, cls):
"""Adds a public property for a protocol message field.
Clients can use this property to get and (in the case
of non-repeated scalar fields) directly set the value
of a protocol message field.
Args:
field: A FieldDescriptor for this field.
cls: The class we're constructing.
"""
# Catch it if we add other types that we should
# handle specially here.
assert _FieldDescriptor.MAX_CPPTYPE == 10
constant_name = field.name.upper() + "_FIELD_NUMBER"
setattr(cls, constant_name, field.number)
if field.label == _FieldDescriptor.LABEL_REPEATED:
_AddPropertiesForRepeatedField(field, cls)
elif field.cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
_AddPropertiesForNonRepeatedCompositeField(field, cls)
else:
_AddPropertiesForNonRepeatedScalarField(field, cls)
|
[
"def",
"_AddPropertiesForField",
"(",
"field",
",",
"cls",
")",
":",
"# Catch it if we add other types that we should",
"# handle specially here.",
"assert",
"_FieldDescriptor",
".",
"MAX_CPPTYPE",
"==",
"10",
"constant_name",
"=",
"field",
".",
"name",
".",
"upper",
"(",
")",
"+",
"\"_FIELD_NUMBER\"",
"setattr",
"(",
"cls",
",",
"constant_name",
",",
"field",
".",
"number",
")",
"if",
"field",
".",
"label",
"==",
"_FieldDescriptor",
".",
"LABEL_REPEATED",
":",
"_AddPropertiesForRepeatedField",
"(",
"field",
",",
"cls",
")",
"elif",
"field",
".",
"cpp_type",
"==",
"_FieldDescriptor",
".",
"CPPTYPE_MESSAGE",
":",
"_AddPropertiesForNonRepeatedCompositeField",
"(",
"field",
",",
"cls",
")",
"else",
":",
"_AddPropertiesForNonRepeatedScalarField",
"(",
"field",
",",
"cls",
")"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/protobuf/python/google/protobuf/internal/python_message.py#L574-L596
|
||
mapnik/mapnik
|
f3da900c355e1d15059c4a91b00203dcc9d9f0ef
|
scons/scons-local-4.1.0/SCons/Node/FS.py
|
python
|
Base.get_abspath
|
(self)
|
return self.dir.entry_abspath(self.name)
|
Get the absolute path of the file.
|
Get the absolute path of the file.
|
[
"Get",
"the",
"absolute",
"path",
"of",
"the",
"file",
"."
] |
def get_abspath(self):
"""Get the absolute path of the file."""
return self.dir.entry_abspath(self.name)
|
[
"def",
"get_abspath",
"(",
"self",
")",
":",
"return",
"self",
".",
"dir",
".",
"entry_abspath",
"(",
"self",
".",
"name",
")"
] |
https://github.com/mapnik/mapnik/blob/f3da900c355e1d15059c4a91b00203dcc9d9f0ef/scons/scons-local-4.1.0/SCons/Node/FS.py#L826-L828
|
|
plumonito/dtslam
|
5994bb9cf7a11981b830370db206bceb654c085d
|
3rdparty/opencv-git/3rdparty/jinja2/filters.py
|
python
|
do_last
|
(environment, seq)
|
Return the last item of a sequence.
|
Return the last item of a sequence.
|
[
"Return",
"the",
"last",
"item",
"of",
"a",
"sequence",
"."
] |
def do_last(environment, seq):
"""Return the last item of a sequence."""
try:
return next(iter(reversed(seq)))
except StopIteration:
return environment.undefined('No last item, sequence was empty.')
|
[
"def",
"do_last",
"(",
"environment",
",",
"seq",
")",
":",
"try",
":",
"return",
"next",
"(",
"iter",
"(",
"reversed",
"(",
"seq",
")",
")",
")",
"except",
"StopIteration",
":",
"return",
"environment",
".",
"undefined",
"(",
"'No last item, sequence was empty.'",
")"
] |
https://github.com/plumonito/dtslam/blob/5994bb9cf7a11981b830370db206bceb654c085d/3rdparty/opencv-git/3rdparty/jinja2/filters.py#L355-L360
|
||
lmnt-com/haste
|
5f704f6132c4aacf2310120b7a1c8d0eea441ab9
|
frameworks/pytorch/layer_norm_lstm.py
|
python
|
LayerNormLSTM.forward
|
(self, input, state=None, lengths=None)
|
return output, state
|
Runs a forward pass of the LSTM layer.
Arguments:
input: Tensor, a batch of input sequences to pass through the LSTM.
Dimensions (seq_len, batch_size, input_size) if `batch_first` is
`False`, otherwise (batch_size, seq_len, input_size).
lengths: (optional) Tensor, list of sequence lengths for each batch
element. Dimension (batch_size). This argument may be omitted if
all batch elements are unpadded and have the same sequence length.
Returns:
output: Tensor, the output of the LSTM layer. Dimensions
(seq_len, batch_size, hidden_size) if `batch_first` is `False` (default)
or (batch_size, seq_len, hidden_size) if `batch_first` is `True`. Note
that if `lengths` was specified, the `output` tensor will not be
masked. It's the caller's responsibility to either not use the invalid
entries or to mask them out before using them.
(h_n, c_n): the hidden and cell states, respectively, for the last
sequence item. Dimensions (1, batch_size, hidden_size).
|
Runs a forward pass of the LSTM layer.
|
[
"Runs",
"a",
"forward",
"pass",
"of",
"the",
"LSTM",
"layer",
"."
] |
def forward(self, input, state=None, lengths=None):
"""
Runs a forward pass of the LSTM layer.
Arguments:
input: Tensor, a batch of input sequences to pass through the LSTM.
Dimensions (seq_len, batch_size, input_size) if `batch_first` is
`False`, otherwise (batch_size, seq_len, input_size).
lengths: (optional) Tensor, list of sequence lengths for each batch
element. Dimension (batch_size). This argument may be omitted if
all batch elements are unpadded and have the same sequence length.
Returns:
output: Tensor, the output of the LSTM layer. Dimensions
(seq_len, batch_size, hidden_size) if `batch_first` is `False` (default)
or (batch_size, seq_len, hidden_size) if `batch_first` is `True`. Note
that if `lengths` was specified, the `output` tensor will not be
masked. It's the caller's responsibility to either not use the invalid
entries or to mask them out before using them.
(h_n, c_n): the hidden and cell states, respectively, for the last
sequence item. Dimensions (1, batch_size, hidden_size).
"""
input = self._permute(input)
state_shape = [1, input.shape[1], self.hidden_size]
state_shape = (state_shape, state_shape)
h0, c0 = self._get_state(input, state, state_shape)
h, c = self._impl(input, (h0[0], c0[0]), self._get_zoneout_mask(input))
state = self._get_final_state((h, c), lengths)
output = self._permute(h[1:])
return output, state
|
[
"def",
"forward",
"(",
"self",
",",
"input",
",",
"state",
"=",
"None",
",",
"lengths",
"=",
"None",
")",
":",
"input",
"=",
"self",
".",
"_permute",
"(",
"input",
")",
"state_shape",
"=",
"[",
"1",
",",
"input",
".",
"shape",
"[",
"1",
"]",
",",
"self",
".",
"hidden_size",
"]",
"state_shape",
"=",
"(",
"state_shape",
",",
"state_shape",
")",
"h0",
",",
"c0",
"=",
"self",
".",
"_get_state",
"(",
"input",
",",
"state",
",",
"state_shape",
")",
"h",
",",
"c",
"=",
"self",
".",
"_impl",
"(",
"input",
",",
"(",
"h0",
"[",
"0",
"]",
",",
"c0",
"[",
"0",
"]",
")",
",",
"self",
".",
"_get_zoneout_mask",
"(",
"input",
")",
")",
"state",
"=",
"self",
".",
"_get_final_state",
"(",
"(",
"h",
",",
"c",
")",
",",
"lengths",
")",
"output",
"=",
"self",
".",
"_permute",
"(",
"h",
"[",
"1",
":",
"]",
")",
"return",
"output",
",",
"state"
] |
https://github.com/lmnt-com/haste/blob/5f704f6132c4aacf2310120b7a1c8d0eea441ab9/frameworks/pytorch/layer_norm_lstm.py#L184-L213
|
|
Polidea/SiriusObfuscator
|
b0e590d8130e97856afe578869b83a209e2b19be
|
SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py
|
python
|
SBExecutionContext.GetProcess
|
(self)
|
return _lldb.SBExecutionContext_GetProcess(self)
|
GetProcess(self) -> SBProcess
|
GetProcess(self) -> SBProcess
|
[
"GetProcess",
"(",
"self",
")",
"-",
">",
"SBProcess"
] |
def GetProcess(self):
"""GetProcess(self) -> SBProcess"""
return _lldb.SBExecutionContext_GetProcess(self)
|
[
"def",
"GetProcess",
"(",
"self",
")",
":",
"return",
"_lldb",
".",
"SBExecutionContext_GetProcess",
"(",
"self",
")"
] |
https://github.com/Polidea/SiriusObfuscator/blob/b0e590d8130e97856afe578869b83a209e2b19be/SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py#L4021-L4023
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/windows/Lib/json/__init__.py
|
python
|
dumps
|
(obj, *, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
default=None, sort_keys=False, **kw)
|
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, default=default, sort_keys=sort_keys,
**kw).encode(obj)
|
Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is true then ``dict`` keys that are not basic types
(``str``, ``int``, ``float``, ``bool``, ``None``) will be skipped
instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the return value can contain non-ASCII
characters if they appear in strings contained in ``obj``. Otherwise, all
such characters are escaped in JSON strings.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and
object members will be pretty-printed with that indent level. An indent
level of 0 will only insert newlines. ``None`` is the most compact
representation.
If specified, ``separators`` should be an ``(item_separator, key_separator)``
tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and
``(',', ': ')`` otherwise. To get the most compact JSON representation,
you should specify ``(',', ':')`` to eliminate whitespace.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
If *sort_keys* is true (default: ``False``), then the output of
dictionaries will be sorted by key.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg; otherwise ``JSONEncoder`` is used.
|
Serialize ``obj`` to a JSON formatted ``str``.
|
[
"Serialize",
"obj",
"to",
"a",
"JSON",
"formatted",
"str",
"."
] |
def dumps(obj, *, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
default=None, sort_keys=False, **kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is true then ``dict`` keys that are not basic types
(``str``, ``int``, ``float``, ``bool``, ``None``) will be skipped
instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the return value can contain non-ASCII
characters if they appear in strings contained in ``obj``. Otherwise, all
such characters are escaped in JSON strings.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and
object members will be pretty-printed with that indent level. An indent
level of 0 will only insert newlines. ``None`` is the most compact
representation.
If specified, ``separators`` should be an ``(item_separator, key_separator)``
tuple. The default is ``(', ', ': ')`` if *indent* is ``None`` and
``(',', ': ')`` otherwise. To get the most compact JSON representation,
you should specify ``(',', ':')`` to eliminate whitespace.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
If *sort_keys* is true (default: ``False``), then the output of
dictionaries will be sorted by key.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg; otherwise ``JSONEncoder`` is used.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
default is None and not sort_keys and not kw):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, default=default, sort_keys=sort_keys,
**kw).encode(obj)
|
[
"def",
"dumps",
"(",
"obj",
",",
"*",
",",
"skipkeys",
"=",
"False",
",",
"ensure_ascii",
"=",
"True",
",",
"check_circular",
"=",
"True",
",",
"allow_nan",
"=",
"True",
",",
"cls",
"=",
"None",
",",
"indent",
"=",
"None",
",",
"separators",
"=",
"None",
",",
"default",
"=",
"None",
",",
"sort_keys",
"=",
"False",
",",
"*",
"*",
"kw",
")",
":",
"# cached encoder",
"if",
"(",
"not",
"skipkeys",
"and",
"ensure_ascii",
"and",
"check_circular",
"and",
"allow_nan",
"and",
"cls",
"is",
"None",
"and",
"indent",
"is",
"None",
"and",
"separators",
"is",
"None",
"and",
"default",
"is",
"None",
"and",
"not",
"sort_keys",
"and",
"not",
"kw",
")",
":",
"return",
"_default_encoder",
".",
"encode",
"(",
"obj",
")",
"if",
"cls",
"is",
"None",
":",
"cls",
"=",
"JSONEncoder",
"return",
"cls",
"(",
"skipkeys",
"=",
"skipkeys",
",",
"ensure_ascii",
"=",
"ensure_ascii",
",",
"check_circular",
"=",
"check_circular",
",",
"allow_nan",
"=",
"allow_nan",
",",
"indent",
"=",
"indent",
",",
"separators",
"=",
"separators",
",",
"default",
"=",
"default",
",",
"sort_keys",
"=",
"sort_keys",
",",
"*",
"*",
"kw",
")",
".",
"encode",
"(",
"obj",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/json/__init__.py#L183-L238
|
|
Z3Prover/z3
|
d745d03afdfdf638d66093e2bfbacaf87187f35b
|
src/api/python/z3/z3.py
|
python
|
Probe.__le__
|
(self, other)
|
return Probe(Z3_probe_le(self.ctx.ref(), self.probe, _to_probe(other, self.ctx).probe), self.ctx)
|
Return a probe that evaluates to "true" when the value returned by `self`
is less than or equal to the value returned by `other`.
>>> p = Probe('size') <= 2
>>> x = Int('x')
>>> g = Goal()
>>> g.add(x > 0)
>>> g.add(x < 10)
>>> p(g)
1.0
|
Return a probe that evaluates to "true" when the value returned by `self`
is less than or equal to the value returned by `other`.
|
[
"Return",
"a",
"probe",
"that",
"evaluates",
"to",
"true",
"when",
"the",
"value",
"returned",
"by",
"self",
"is",
"less",
"than",
"or",
"equal",
"to",
"the",
"value",
"returned",
"by",
"other",
"."
] |
def __le__(self, other):
"""Return a probe that evaluates to "true" when the value returned by `self`
is less than or equal to the value returned by `other`.
>>> p = Probe('size') <= 2
>>> x = Int('x')
>>> g = Goal()
>>> g.add(x > 0)
>>> g.add(x < 10)
>>> p(g)
1.0
"""
return Probe(Z3_probe_le(self.ctx.ref(), self.probe, _to_probe(other, self.ctx).probe), self.ctx)
|
[
"def",
"__le__",
"(",
"self",
",",
"other",
")",
":",
"return",
"Probe",
"(",
"Z3_probe_le",
"(",
"self",
".",
"ctx",
".",
"ref",
"(",
")",
",",
"self",
".",
"probe",
",",
"_to_probe",
"(",
"other",
",",
"self",
".",
"ctx",
")",
".",
"probe",
")",
",",
"self",
".",
"ctx",
")"
] |
https://github.com/Z3Prover/z3/blob/d745d03afdfdf638d66093e2bfbacaf87187f35b/src/api/python/z3/z3.py#L8465-L8477
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_cocoa/_gdi.py
|
python
|
DCClipper.__init__
|
(self, *args)
|
__init__(self, DC dc, Region r) -> DCClipper
__init__(self, DC dc, Rect r) -> DCClipper
__init__(self, DC dc, int x, int y, int w, int h) -> DCClipper
wx.wxDCClipper sets the DC's clipping region when it is constructed,
and then automatically destroys the clipping region when the clipper
goes out of scope.
|
__init__(self, DC dc, Region r) -> DCClipper
__init__(self, DC dc, Rect r) -> DCClipper
__init__(self, DC dc, int x, int y, int w, int h) -> DCClipper
|
[
"__init__",
"(",
"self",
"DC",
"dc",
"Region",
"r",
")",
"-",
">",
"DCClipper",
"__init__",
"(",
"self",
"DC",
"dc",
"Rect",
"r",
")",
"-",
">",
"DCClipper",
"__init__",
"(",
"self",
"DC",
"dc",
"int",
"x",
"int",
"y",
"int",
"w",
"int",
"h",
")",
"-",
">",
"DCClipper"
] |
def __init__(self, *args):
"""
__init__(self, DC dc, Region r) -> DCClipper
__init__(self, DC dc, Rect r) -> DCClipper
__init__(self, DC dc, int x, int y, int w, int h) -> DCClipper
wx.wxDCClipper sets the DC's clipping region when it is constructed,
and then automatically destroys the clipping region when the clipper
goes out of scope.
"""
_gdi_.DCClipper_swiginit(self,_gdi_.new_DCClipper(*args))
|
[
"def",
"__init__",
"(",
"self",
",",
"*",
"args",
")",
":",
"_gdi_",
".",
"DCClipper_swiginit",
"(",
"self",
",",
"_gdi_",
".",
"new_DCClipper",
"(",
"*",
"args",
")",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_gdi.py#L4941-L4951
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/s3transfer/utils.py
|
python
|
CountCallbackInvoker.increment
|
(self)
|
Increment the count by one
|
Increment the count by one
|
[
"Increment",
"the",
"count",
"by",
"one"
] |
def increment(self):
"""Increment the count by one"""
with self._lock:
if self._is_finalized:
raise RuntimeError(
'Counter has been finalized it can no longer be '
'incremented.'
)
self._count += 1
|
[
"def",
"increment",
"(",
"self",
")",
":",
"with",
"self",
".",
"_lock",
":",
"if",
"self",
".",
"_is_finalized",
":",
"raise",
"RuntimeError",
"(",
"'Counter has been finalized it can no longer be '",
"'incremented.'",
")",
"self",
".",
"_count",
"+=",
"1"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/site-packages/s3transfer/utils.py#L209-L217
|
||
benoitsteiner/tensorflow-opencl
|
cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5
|
tensorflow/contrib/specs/python/summaries.py
|
python
|
tf_print
|
(x, depth=0, finished=None, printer=print)
|
A simple print function for a TensorFlow graph.
Args:
x: a tf.Tensor or tf.Operation
depth: current printing depth
finished: set of nodes already output
printer: print function to use
Returns:
Total number of parameters found in the
subtree.
|
A simple print function for a TensorFlow graph.
|
[
"A",
"simple",
"print",
"function",
"for",
"a",
"TensorFlow",
"graph",
"."
] |
def tf_print(x, depth=0, finished=None, printer=print):
"""A simple print function for a TensorFlow graph.
Args:
x: a tf.Tensor or tf.Operation
depth: current printing depth
finished: set of nodes already output
printer: print function to use
Returns:
Total number of parameters found in the
subtree.
"""
if finished is None:
finished = set()
if isinstance(x, ops.Tensor):
shape = x.get_shape().as_list()
x = x.op
else:
shape = ""
if x.type == "Identity":
x = x.inputs[0].op
if x in finished:
printer("%s<%s> %s %s" % (" " * depth, x.name, x.type, shape))
return
finished |= {x}
printer("%s%s %s %s" % (" " * depth, x.name, x.type, shape))
if not _truncate_structure(x):
for y in x.inputs:
tf_print(y, depth + 1, finished, printer=printer)
|
[
"def",
"tf_print",
"(",
"x",
",",
"depth",
"=",
"0",
",",
"finished",
"=",
"None",
",",
"printer",
"=",
"print",
")",
":",
"if",
"finished",
"is",
"None",
":",
"finished",
"=",
"set",
"(",
")",
"if",
"isinstance",
"(",
"x",
",",
"ops",
".",
"Tensor",
")",
":",
"shape",
"=",
"x",
".",
"get_shape",
"(",
")",
".",
"as_list",
"(",
")",
"x",
"=",
"x",
".",
"op",
"else",
":",
"shape",
"=",
"\"\"",
"if",
"x",
".",
"type",
"==",
"\"Identity\"",
":",
"x",
"=",
"x",
".",
"inputs",
"[",
"0",
"]",
".",
"op",
"if",
"x",
"in",
"finished",
":",
"printer",
"(",
"\"%s<%s> %s %s\"",
"%",
"(",
"\" \"",
"*",
"depth",
",",
"x",
".",
"name",
",",
"x",
".",
"type",
",",
"shape",
")",
")",
"return",
"finished",
"|=",
"{",
"x",
"}",
"printer",
"(",
"\"%s%s %s %s\"",
"%",
"(",
"\" \"",
"*",
"depth",
",",
"x",
".",
"name",
",",
"x",
".",
"type",
",",
"shape",
")",
")",
"if",
"not",
"_truncate_structure",
"(",
"x",
")",
":",
"for",
"y",
"in",
"x",
".",
"inputs",
":",
"tf_print",
"(",
"y",
",",
"depth",
"+",
"1",
",",
"finished",
",",
"printer",
"=",
"printer",
")"
] |
https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/contrib/specs/python/summaries.py#L114-L144
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/msw/_controls.py
|
python
|
CommandLinkButton.Create
|
(*args, **kwargs)
|
return _controls_.CommandLinkButton_Create(*args, **kwargs)
|
Create(self, Window parent, int id=-1, String mainLabel=wxEmptyString,
String note=wxEmptyString, Point pos=DefaultPosition,
Size size=DefaultSize, long style=0,
Validator validator=DefaultValidator,
String name=wxButtonNameStr) -> bool
|
Create(self, Window parent, int id=-1, String mainLabel=wxEmptyString,
String note=wxEmptyString, Point pos=DefaultPosition,
Size size=DefaultSize, long style=0,
Validator validator=DefaultValidator,
String name=wxButtonNameStr) -> bool
|
[
"Create",
"(",
"self",
"Window",
"parent",
"int",
"id",
"=",
"-",
"1",
"String",
"mainLabel",
"=",
"wxEmptyString",
"String",
"note",
"=",
"wxEmptyString",
"Point",
"pos",
"=",
"DefaultPosition",
"Size",
"size",
"=",
"DefaultSize",
"long",
"style",
"=",
"0",
"Validator",
"validator",
"=",
"DefaultValidator",
"String",
"name",
"=",
"wxButtonNameStr",
")",
"-",
">",
"bool"
] |
def Create(*args, **kwargs):
"""
Create(self, Window parent, int id=-1, String mainLabel=wxEmptyString,
String note=wxEmptyString, Point pos=DefaultPosition,
Size size=DefaultSize, long style=0,
Validator validator=DefaultValidator,
String name=wxButtonNameStr) -> bool
"""
return _controls_.CommandLinkButton_Create(*args, **kwargs)
|
[
"def",
"Create",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_controls_",
".",
"CommandLinkButton_Create",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_controls.py#L7827-L7835
|
|
priyankchheda/algorithms
|
c361aa9071573fa9966d5b02d05e524815abcf2b
|
fenwick_tree/fenwick_tree.py
|
python
|
FenwickTree.get_range_sum
|
(self, left, right)
|
return self.get_sum(right) - self.get_sum(left - 1)
|
calculates the sum from the given range
|
calculates the sum from the given range
|
[
"calculates",
"the",
"sum",
"from",
"the",
"given",
"range"
] |
def get_range_sum(self, left, right):
""" calculates the sum from the given range """
return self.get_sum(right) - self.get_sum(left - 1)
|
[
"def",
"get_range_sum",
"(",
"self",
",",
"left",
",",
"right",
")",
":",
"return",
"self",
".",
"get_sum",
"(",
"right",
")",
"-",
"self",
".",
"get_sum",
"(",
"left",
"-",
"1",
")"
] |
https://github.com/priyankchheda/algorithms/blob/c361aa9071573fa9966d5b02d05e524815abcf2b/fenwick_tree/fenwick_tree.py#L46-L48
|
|
root-project/root
|
fcd3583bb14852bf2e8cd2415717cbaac0e75896
|
interpreter/llvm/src/tools/clang/bindings/python/clang/cindex.py
|
python
|
FileInclusion.is_input_file
|
(self)
|
return self.depth == 0
|
True if the included file is the input file.
|
True if the included file is the input file.
|
[
"True",
"if",
"the",
"included",
"file",
"is",
"the",
"input",
"file",
"."
] |
def is_input_file(self):
"""True if the included file is the input file."""
return self.depth == 0
|
[
"def",
"is_input_file",
"(",
"self",
")",
":",
"return",
"self",
".",
"depth",
"==",
"0"
] |
https://github.com/root-project/root/blob/fcd3583bb14852bf2e8cd2415717cbaac0e75896/interpreter/llvm/src/tools/clang/bindings/python/clang/cindex.py#L3140-L3142
|
|
openMSX/openMSX
|
c9cfbc0a2a2baaf2c4513c87543fe29bfe8cf806
|
build/extract.py
|
python
|
extract
|
(archivePath, destDir, rename = None)
|
Extract the given archive to the given directory.
If a rename function is given, it is called with the output path relative
to the destination directory; the value returned by the rename function is
used as the actual relative destination file path.
This function sets file ownership and permissions like is done in newly
created files and ignores the ownership and permissions from the archive,
since we are not restoring a backup.
|
Extract the given archive to the given directory.
If a rename function is given, it is called with the output path relative
to the destination directory; the value returned by the rename function is
used as the actual relative destination file path.
This function sets file ownership and permissions like is done in newly
created files and ignores the ownership and permissions from the archive,
since we are not restoring a backup.
|
[
"Extract",
"the",
"given",
"archive",
"to",
"the",
"given",
"directory",
".",
"If",
"a",
"rename",
"function",
"is",
"given",
"it",
"is",
"called",
"with",
"the",
"output",
"path",
"relative",
"to",
"the",
"destination",
"directory",
";",
"the",
"value",
"returned",
"by",
"the",
"rename",
"function",
"is",
"used",
"as",
"the",
"actual",
"relative",
"destination",
"file",
"path",
".",
"This",
"function",
"sets",
"file",
"ownership",
"and",
"permissions",
"like",
"is",
"done",
"in",
"newly",
"created",
"files",
"and",
"ignores",
"the",
"ownership",
"and",
"permissions",
"from",
"the",
"archive",
"since",
"we",
"are",
"not",
"restoring",
"a",
"backup",
"."
] |
def extract(archivePath, destDir, rename = None):
'''Extract the given archive to the given directory.
If a rename function is given, it is called with the output path relative
to the destination directory; the value returned by the rename function is
used as the actual relative destination file path.
This function sets file ownership and permissions like is done in newly
created files and ignores the ownership and permissions from the archive,
since we are not restoring a backup.
'''
absDestDir = abspath(destDir) + sep
if not isdir(absDestDir):
raise ValueError(
'Destination directory "%s" does not exist' % absDestDir
)
with TarFile.open(archivePath, errorlevel=2) as tar:
for member in tar.getmembers():
absMemberPath = abspath(joinpath(absDestDir, member.name))
if member.isdir():
absMemberPath += sep
if not absMemberPath.startswith(absDestDir):
raise ValueError(
'Refusing to extract tar entry "%s" '
'outside destination directory'
% member.name
)
if rename:
absMemberPath = absDestDir + rename(
absMemberPath[len(absDestDir) : ]
)
if member.isfile():
mode = S_IRWXU | S_IRWXG | S_IRWXO
if not (member.mode & S_IXUSR):
mode &= ~(S_IXUSR | S_IXGRP | S_IXOTH)
fd = osopen(absMemberPath, O_CREAT | O_WRONLY | O_BINARY, mode)
with fdopen(fd, 'wb') as out:
inp = tar.extractfile(member)
bytesLeft = member.size
while bytesLeft > 0:
buf = inp.read(bufSize)
out.write(buf)
bytesLeft -= len(buf)
elif member.isdir():
if not isdir(absMemberPath):
mkdir(absMemberPath)
elif member.issym():
try:
symlink(member.linkname, absMemberPath)
except OSError as ex:
print(
'WARNING: Skipping symlink creation: %s -> %s: %s'
% (absMemberPath, member.linkname, ex)
)
else:
raise ValueError(
'Cannot extract tar entry "%s": '
'not a regular file, symlink or directory'
% member.name
)
# Set file/directory modification time to match the archive.
# For example autotools track dependencies between archived files
# and will attempt to regenerate them if the time stamps indicate
# one is older than the other.
# Note: Apparently Python 2.5's utime() cannot set timestamps on
# directories in Windows.
if member.isfile() or (
member.isdir() and not hostOS.startswith('mingw')
):
utime(absMemberPath, (member.mtime, member.mtime))
|
[
"def",
"extract",
"(",
"archivePath",
",",
"destDir",
",",
"rename",
"=",
"None",
")",
":",
"absDestDir",
"=",
"abspath",
"(",
"destDir",
")",
"+",
"sep",
"if",
"not",
"isdir",
"(",
"absDestDir",
")",
":",
"raise",
"ValueError",
"(",
"'Destination directory \"%s\" does not exist'",
"%",
"absDestDir",
")",
"with",
"TarFile",
".",
"open",
"(",
"archivePath",
",",
"errorlevel",
"=",
"2",
")",
"as",
"tar",
":",
"for",
"member",
"in",
"tar",
".",
"getmembers",
"(",
")",
":",
"absMemberPath",
"=",
"abspath",
"(",
"joinpath",
"(",
"absDestDir",
",",
"member",
".",
"name",
")",
")",
"if",
"member",
".",
"isdir",
"(",
")",
":",
"absMemberPath",
"+=",
"sep",
"if",
"not",
"absMemberPath",
".",
"startswith",
"(",
"absDestDir",
")",
":",
"raise",
"ValueError",
"(",
"'Refusing to extract tar entry \"%s\" '",
"'outside destination directory'",
"%",
"member",
".",
"name",
")",
"if",
"rename",
":",
"absMemberPath",
"=",
"absDestDir",
"+",
"rename",
"(",
"absMemberPath",
"[",
"len",
"(",
"absDestDir",
")",
":",
"]",
")",
"if",
"member",
".",
"isfile",
"(",
")",
":",
"mode",
"=",
"S_IRWXU",
"|",
"S_IRWXG",
"|",
"S_IRWXO",
"if",
"not",
"(",
"member",
".",
"mode",
"&",
"S_IXUSR",
")",
":",
"mode",
"&=",
"~",
"(",
"S_IXUSR",
"|",
"S_IXGRP",
"|",
"S_IXOTH",
")",
"fd",
"=",
"osopen",
"(",
"absMemberPath",
",",
"O_CREAT",
"|",
"O_WRONLY",
"|",
"O_BINARY",
",",
"mode",
")",
"with",
"fdopen",
"(",
"fd",
",",
"'wb'",
")",
"as",
"out",
":",
"inp",
"=",
"tar",
".",
"extractfile",
"(",
"member",
")",
"bytesLeft",
"=",
"member",
".",
"size",
"while",
"bytesLeft",
">",
"0",
":",
"buf",
"=",
"inp",
".",
"read",
"(",
"bufSize",
")",
"out",
".",
"write",
"(",
"buf",
")",
"bytesLeft",
"-=",
"len",
"(",
"buf",
")",
"elif",
"member",
".",
"isdir",
"(",
")",
":",
"if",
"not",
"isdir",
"(",
"absMemberPath",
")",
":",
"mkdir",
"(",
"absMemberPath",
")",
"elif",
"member",
".",
"issym",
"(",
")",
":",
"try",
":",
"symlink",
"(",
"member",
".",
"linkname",
",",
"absMemberPath",
")",
"except",
"OSError",
"as",
"ex",
":",
"print",
"(",
"'WARNING: Skipping symlink creation: %s -> %s: %s'",
"%",
"(",
"absMemberPath",
",",
"member",
".",
"linkname",
",",
"ex",
")",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Cannot extract tar entry \"%s\": '",
"'not a regular file, symlink or directory'",
"%",
"member",
".",
"name",
")",
"# Set file/directory modification time to match the archive.",
"# For example autotools track dependencies between archived files",
"# and will attempt to regenerate them if the time stamps indicate",
"# one is older than the other.",
"# Note: Apparently Python 2.5's utime() cannot set timestamps on",
"# directories in Windows.",
"if",
"member",
".",
"isfile",
"(",
")",
"or",
"(",
"member",
".",
"isdir",
"(",
")",
"and",
"not",
"hostOS",
".",
"startswith",
"(",
"'mingw'",
")",
")",
":",
"utime",
"(",
"absMemberPath",
",",
"(",
"member",
".",
"mtime",
",",
"member",
".",
"mtime",
")",
")"
] |
https://github.com/openMSX/openMSX/blob/c9cfbc0a2a2baaf2c4513c87543fe29bfe8cf806/build/extract.py#L26-L95
|
||
ValveSoftware/source-sdk-2013
|
0d8dceea4310fde5706b3ce1c70609d72a38efdf
|
sp/src/thirdparty/protobuf-2.3.0/python/google/protobuf/service.py
|
python
|
RpcController.IsCanceled
|
(self)
|
Checks if the client cancelled the RPC.
If true, indicates that the client canceled the RPC, so the server may
as well give up on replying to it. The server should still call the
final "done" callback.
|
Checks if the client cancelled the RPC.
|
[
"Checks",
"if",
"the",
"client",
"cancelled",
"the",
"RPC",
"."
] |
def IsCanceled(self):
"""Checks if the client cancelled the RPC.
If true, indicates that the client canceled the RPC, so the server may
as well give up on replying to it. The server should still call the
final "done" callback.
"""
raise NotImplementedError
|
[
"def",
"IsCanceled",
"(",
"self",
")",
":",
"raise",
"NotImplementedError"
] |
https://github.com/ValveSoftware/source-sdk-2013/blob/0d8dceea4310fde5706b3ce1c70609d72a38efdf/sp/src/thirdparty/protobuf-2.3.0/python/google/protobuf/service.py#L178-L185
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/windows/Lib/datetime.py
|
python
|
timezone.__repr__
|
(self)
|
return "%s.%s(%r, %r)" % (self.__class__.__module__,
self.__class__.__qualname__,
self._offset, self._name)
|
Convert to formal string, for repr().
>>> tz = timezone.utc
>>> repr(tz)
'datetime.timezone.utc'
>>> tz = timezone(timedelta(hours=-5), 'EST')
>>> repr(tz)
"datetime.timezone(datetime.timedelta(-1, 68400), 'EST')"
|
Convert to formal string, for repr().
|
[
"Convert",
"to",
"formal",
"string",
"for",
"repr",
"()",
"."
] |
def __repr__(self):
"""Convert to formal string, for repr().
>>> tz = timezone.utc
>>> repr(tz)
'datetime.timezone.utc'
>>> tz = timezone(timedelta(hours=-5), 'EST')
>>> repr(tz)
"datetime.timezone(datetime.timedelta(-1, 68400), 'EST')"
"""
if self is self.utc:
return 'datetime.timezone.utc'
if self._name is None:
return "%s.%s(%r)" % (self.__class__.__module__,
self.__class__.__qualname__,
self._offset)
return "%s.%s(%r, %r)" % (self.__class__.__module__,
self.__class__.__qualname__,
self._offset, self._name)
|
[
"def",
"__repr__",
"(",
"self",
")",
":",
"if",
"self",
"is",
"self",
".",
"utc",
":",
"return",
"'datetime.timezone.utc'",
"if",
"self",
".",
"_name",
"is",
"None",
":",
"return",
"\"%s.%s(%r)\"",
"%",
"(",
"self",
".",
"__class__",
".",
"__module__",
",",
"self",
".",
"__class__",
".",
"__qualname__",
",",
"self",
".",
"_offset",
")",
"return",
"\"%s.%s(%r, %r)\"",
"%",
"(",
"self",
".",
"__class__",
".",
"__module__",
",",
"self",
".",
"__class__",
".",
"__qualname__",
",",
"self",
".",
"_offset",
",",
"self",
".",
"_name",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/datetime.py#L2177-L2195
|
|
hughperkins/tf-coriander
|
970d3df6c11400ad68405f22b0c42a52374e94ca
|
tensorflow/python/ops/math_ops.py
|
python
|
saturate_cast
|
(value, dtype, name=None)
|
Performs a safe saturating cast of `value` to `dtype`.
This function casts the input to `dtype` without applying any scaling. If
there is a danger that values would over or underflow in the cast, this op
applies the appropriate clamping before the cast.
Args:
value: A `Tensor`.
dtype: The desired output `DType`.
name: A name for the operation (optional).
Returns:
`value` safely cast to `dtype`.
|
Performs a safe saturating cast of `value` to `dtype`.
|
[
"Performs",
"a",
"safe",
"saturating",
"cast",
"of",
"value",
"to",
"dtype",
"."
] |
def saturate_cast(value, dtype, name=None):
"""Performs a safe saturating cast of `value` to `dtype`.
This function casts the input to `dtype` without applying any scaling. If
there is a danger that values would over or underflow in the cast, this op
applies the appropriate clamping before the cast.
Args:
value: A `Tensor`.
dtype: The desired output `DType`.
name: A name for the operation (optional).
Returns:
`value` safely cast to `dtype`.
"""
# When casting to a type with smaller representable range, clamp.
# Note that this covers casting to unsigned types as well.
with ops.name_scope(name, "saturate_cast", [value]) as name:
value = ops.convert_to_tensor(value, name="value")
dtype = dtypes.as_dtype(dtype).base_dtype
if value.dtype.min < dtype.min:
value = gen_math_ops.maximum(value, ops.convert_to_tensor(
dtype.min, dtype=value.dtype, name="min"))
if value.dtype.max > dtype.max:
value = gen_math_ops.minimum(value, ops.convert_to_tensor(
dtype.max, dtype=value.dtype, name="max"))
return cast(value, dtype, name=name)
|
[
"def",
"saturate_cast",
"(",
"value",
",",
"dtype",
",",
"name",
"=",
"None",
")",
":",
"# When casting to a type with smaller representable range, clamp.",
"# Note that this covers casting to unsigned types as well.",
"with",
"ops",
".",
"name_scope",
"(",
"name",
",",
"\"saturate_cast\"",
",",
"[",
"value",
"]",
")",
"as",
"name",
":",
"value",
"=",
"ops",
".",
"convert_to_tensor",
"(",
"value",
",",
"name",
"=",
"\"value\"",
")",
"dtype",
"=",
"dtypes",
".",
"as_dtype",
"(",
"dtype",
")",
".",
"base_dtype",
"if",
"value",
".",
"dtype",
".",
"min",
"<",
"dtype",
".",
"min",
":",
"value",
"=",
"gen_math_ops",
".",
"maximum",
"(",
"value",
",",
"ops",
".",
"convert_to_tensor",
"(",
"dtype",
".",
"min",
",",
"dtype",
"=",
"value",
".",
"dtype",
",",
"name",
"=",
"\"min\"",
")",
")",
"if",
"value",
".",
"dtype",
".",
"max",
">",
"dtype",
".",
"max",
":",
"value",
"=",
"gen_math_ops",
".",
"minimum",
"(",
"value",
",",
"ops",
".",
"convert_to_tensor",
"(",
"dtype",
".",
"max",
",",
"dtype",
"=",
"value",
".",
"dtype",
",",
"name",
"=",
"\"max\"",
")",
")",
"return",
"cast",
"(",
"value",
",",
"dtype",
",",
"name",
"=",
"name",
")"
] |
https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/python/ops/math_ops.py#L619-L645
|
||
windystrife/UnrealEngine_NVIDIAGameWorks
|
b50e6338a7c5b26374d66306ebc7807541ff815e
|
Engine/Source/ThirdParty/CEF3/pristine/cef_source/tools/cefbuilds/cef_html_builder.py
|
python
|
cef_html_builder.__init__
|
(self, branding='')
|
Create a new cef_html_builder object.
|
Create a new cef_html_builder object.
|
[
"Create",
"a",
"new",
"cef_html_builder",
"object",
"."
] |
def __init__(self, branding=''):
""" Create a new cef_html_builder object. """
self.clear()
self._branding = branding
|
[
"def",
"__init__",
"(",
"self",
",",
"branding",
"=",
"''",
")",
":",
"self",
".",
"clear",
"(",
")",
"self",
".",
"_branding",
"=",
"branding"
] |
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Source/ThirdParty/CEF3/pristine/cef_source/tools/cefbuilds/cef_html_builder.py#L42-L45
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numpy/polynomial/legendre.py
|
python
|
legmul
|
(c1, c2)
|
return legadd(c0, legmulx(c1))
|
Multiply one Legendre series by another.
Returns the product of two Legendre series `c1` * `c2`. The arguments
are sequences of coefficients, from lowest order "term" to highest,
e.g., [1,2,3] represents the series ``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Legendre series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Of Legendre series coefficients representing their product.
See Also
--------
legadd, legsub, legmulx, legdiv, legpow
Notes
-----
In general, the (polynomial) product of two C-series results in terms
that are not in the Legendre polynomial basis set. Thus, to express
the product as a Legendre series, it is necessary to "reproject" the
product onto said basis set, which may produce "unintuitive" (but
correct) results; see Examples section below.
Examples
--------
>>> from numpy.polynomial import legendre as L
>>> c1 = (1,2,3)
>>> c2 = (3,2)
>>> L.legmul(c1,c2) # multiplication requires "reprojection"
array([ 4.33333333, 10.4 , 11.66666667, 3.6 ]) # may vary
|
Multiply one Legendre series by another.
|
[
"Multiply",
"one",
"Legendre",
"series",
"by",
"another",
"."
] |
def legmul(c1, c2):
"""
Multiply one Legendre series by another.
Returns the product of two Legendre series `c1` * `c2`. The arguments
are sequences of coefficients, from lowest order "term" to highest,
e.g., [1,2,3] represents the series ``P_0 + 2*P_1 + 3*P_2``.
Parameters
----------
c1, c2 : array_like
1-D arrays of Legendre series coefficients ordered from low to
high.
Returns
-------
out : ndarray
Of Legendre series coefficients representing their product.
See Also
--------
legadd, legsub, legmulx, legdiv, legpow
Notes
-----
In general, the (polynomial) product of two C-series results in terms
that are not in the Legendre polynomial basis set. Thus, to express
the product as a Legendre series, it is necessary to "reproject" the
product onto said basis set, which may produce "unintuitive" (but
correct) results; see Examples section below.
Examples
--------
>>> from numpy.polynomial import legendre as L
>>> c1 = (1,2,3)
>>> c2 = (3,2)
>>> L.legmul(c1,c2) # multiplication requires "reprojection"
array([ 4.33333333, 10.4 , 11.66666667, 3.6 ]) # may vary
"""
# s1, s2 are trimmed copies
[c1, c2] = pu.as_series([c1, c2])
if len(c1) > len(c2):
c = c2
xs = c1
else:
c = c1
xs = c2
if len(c) == 1:
c0 = c[0]*xs
c1 = 0
elif len(c) == 2:
c0 = c[0]*xs
c1 = c[1]*xs
else:
nd = len(c)
c0 = c[-2]*xs
c1 = c[-1]*xs
for i in range(3, len(c) + 1):
tmp = c0
nd = nd - 1
c0 = legsub(c[-i]*xs, (c1*(nd - 1))/nd)
c1 = legadd(tmp, (legmulx(c1)*(2*nd - 1))/nd)
return legadd(c0, legmulx(c1))
|
[
"def",
"legmul",
"(",
"c1",
",",
"c2",
")",
":",
"# s1, s2 are trimmed copies",
"[",
"c1",
",",
"c2",
"]",
"=",
"pu",
".",
"as_series",
"(",
"[",
"c1",
",",
"c2",
"]",
")",
"if",
"len",
"(",
"c1",
")",
">",
"len",
"(",
"c2",
")",
":",
"c",
"=",
"c2",
"xs",
"=",
"c1",
"else",
":",
"c",
"=",
"c1",
"xs",
"=",
"c2",
"if",
"len",
"(",
"c",
")",
"==",
"1",
":",
"c0",
"=",
"c",
"[",
"0",
"]",
"*",
"xs",
"c1",
"=",
"0",
"elif",
"len",
"(",
"c",
")",
"==",
"2",
":",
"c0",
"=",
"c",
"[",
"0",
"]",
"*",
"xs",
"c1",
"=",
"c",
"[",
"1",
"]",
"*",
"xs",
"else",
":",
"nd",
"=",
"len",
"(",
"c",
")",
"c0",
"=",
"c",
"[",
"-",
"2",
"]",
"*",
"xs",
"c1",
"=",
"c",
"[",
"-",
"1",
"]",
"*",
"xs",
"for",
"i",
"in",
"range",
"(",
"3",
",",
"len",
"(",
"c",
")",
"+",
"1",
")",
":",
"tmp",
"=",
"c0",
"nd",
"=",
"nd",
"-",
"1",
"c0",
"=",
"legsub",
"(",
"c",
"[",
"-",
"i",
"]",
"*",
"xs",
",",
"(",
"c1",
"*",
"(",
"nd",
"-",
"1",
")",
")",
"/",
"nd",
")",
"c1",
"=",
"legadd",
"(",
"tmp",
",",
"(",
"legmulx",
"(",
"c1",
")",
"*",
"(",
"2",
"*",
"nd",
"-",
"1",
")",
")",
"/",
"nd",
")",
"return",
"legadd",
"(",
"c0",
",",
"legmulx",
"(",
"c1",
")",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numpy/polynomial/legendre.py#L461-L526
|
|
MegEngine/MegEngine
|
ce9ad07a27ec909fb8db4dd67943d24ba98fb93a
|
imperative/python/megengine/distributed/group.py
|
python
|
override_backend
|
(new_backend: str)
|
r"""Override distributed backend
Args:
new_backend: communicator backend set in this context.
|
r"""Override distributed backend
|
[
"r",
"Override",
"distributed",
"backend"
] |
def override_backend(new_backend: str):
r"""Override distributed backend
Args:
new_backend: communicator backend set in this context.
"""
global _sd
assert _sd, "please call init_process_group first"
old_backend = _sd.backend
_sd.backend = new_backend
try:
yield
finally:
_sd.backend = old_backend
|
[
"def",
"override_backend",
"(",
"new_backend",
":",
"str",
")",
":",
"global",
"_sd",
"assert",
"_sd",
",",
"\"please call init_process_group first\"",
"old_backend",
"=",
"_sd",
".",
"backend",
"_sd",
".",
"backend",
"=",
"new_backend",
"try",
":",
"yield",
"finally",
":",
"_sd",
".",
"backend",
"=",
"old_backend"
] |
https://github.com/MegEngine/MegEngine/blob/ce9ad07a27ec909fb8db4dd67943d24ba98fb93a/imperative/python/megengine/distributed/group.py#L179-L192
|
||
ChromiumWebApps/chromium
|
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
|
tools/telemetry/third_party/png/png.py
|
python
|
Writer.write_packed
|
(self, outfile, rows)
|
return self.write_passes(outfile, rows, packed=True)
|
Write PNG file to `outfile`. The pixel data comes from `rows`
which should be in boxed row packed format. Each row should be
a sequence of packed bytes.
Technically, this method does work for interlaced images but it
is best avoided. For interlaced images, the rows should be
presented in the order that they appear in the file.
This method should not be used when the source image bit depth
is not one naturally supported by PNG; the bit depth should be
1, 2, 4, 8, or 16.
|
Write PNG file to `outfile`. The pixel data comes from `rows`
which should be in boxed row packed format. Each row should be
a sequence of packed bytes.
|
[
"Write",
"PNG",
"file",
"to",
"outfile",
".",
"The",
"pixel",
"data",
"comes",
"from",
"rows",
"which",
"should",
"be",
"in",
"boxed",
"row",
"packed",
"format",
".",
"Each",
"row",
"should",
"be",
"a",
"sequence",
"of",
"packed",
"bytes",
"."
] |
def write_packed(self, outfile, rows):
"""
Write PNG file to `outfile`. The pixel data comes from `rows`
which should be in boxed row packed format. Each row should be
a sequence of packed bytes.
Technically, this method does work for interlaced images but it
is best avoided. For interlaced images, the rows should be
presented in the order that they appear in the file.
This method should not be used when the source image bit depth
is not one naturally supported by PNG; the bit depth should be
1, 2, 4, 8, or 16.
"""
if self.rescale:
raise Error("write_packed method not suitable for bit depth %d" %
self.rescale[0])
return self.write_passes(outfile, rows, packed=True)
|
[
"def",
"write_packed",
"(",
"self",
",",
"outfile",
",",
"rows",
")",
":",
"if",
"self",
".",
"rescale",
":",
"raise",
"Error",
"(",
"\"write_packed method not suitable for bit depth %d\"",
"%",
"self",
".",
"rescale",
"[",
"0",
"]",
")",
"return",
"self",
".",
"write_passes",
"(",
"outfile",
",",
"rows",
",",
"packed",
"=",
"True",
")"
] |
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/tools/telemetry/third_party/png/png.py#L833-L851
|
|
FreeCAD/FreeCAD
|
ba42231b9c6889b89e064d6d563448ed81e376ec
|
src/Mod/Draft/draftguitools/gui_snaps.py
|
python
|
Draft_Snap_Lock.Activated
|
(self)
|
Execute when the command is called.
|
Execute when the command is called.
|
[
"Execute",
"when",
"the",
"command",
"is",
"called",
"."
] |
def Activated(self):
"""Execute when the command is called."""
super(Draft_Snap_Lock, self).Activated()
if hasattr(Gui, "Snapper"):
status = Gui.Snapper.toggle_snap('Lock')
# change interface consistently
sync_snap_toolbar_button("Draft_Snap_Lock"+"_Button", status)
sync_snap_statusbar_button("Draft_Snap_Lock"+"_Statusbutton", status)
|
[
"def",
"Activated",
"(",
"self",
")",
":",
"super",
"(",
"Draft_Snap_Lock",
",",
"self",
")",
".",
"Activated",
"(",
")",
"if",
"hasattr",
"(",
"Gui",
",",
"\"Snapper\"",
")",
":",
"status",
"=",
"Gui",
".",
"Snapper",
".",
"toggle_snap",
"(",
"'Lock'",
")",
"# change interface consistently",
"sync_snap_toolbar_button",
"(",
"\"Draft_Snap_Lock\"",
"+",
"\"_Button\"",
",",
"status",
")",
"sync_snap_statusbar_button",
"(",
"\"Draft_Snap_Lock\"",
"+",
"\"_Statusbutton\"",
",",
"status",
")"
] |
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Draft/draftguitools/gui_snaps.py#L136-L144
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/msw/_misc.py
|
python
|
GenericFindWindowAtPoint
|
(*args, **kwargs)
|
return _misc_.GenericFindWindowAtPoint(*args, **kwargs)
|
GenericFindWindowAtPoint(Point pt) -> Window
|
GenericFindWindowAtPoint(Point pt) -> Window
|
[
"GenericFindWindowAtPoint",
"(",
"Point",
"pt",
")",
"-",
">",
"Window"
] |
def GenericFindWindowAtPoint(*args, **kwargs):
"""GenericFindWindowAtPoint(Point pt) -> Window"""
return _misc_.GenericFindWindowAtPoint(*args, **kwargs)
|
[
"def",
"GenericFindWindowAtPoint",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_misc_",
".",
"GenericFindWindowAtPoint",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_misc.py#L583-L585
|
|
ApolloAuto/apollo-platform
|
86d9dc6743b496ead18d597748ebabd34a513289
|
ros/ros/rosunit/src/rosunit/pmon.py
|
python
|
shutdown_process_monitor
|
(process_monitor)
|
@param process_monitor: process monitor to kill
@type process_monitor: L{ProcessMonitor}
@return: True if process_monitor was successfully
shutdown. False if it could not be shutdown cleanly or if there is
a problem with process_monitor
parameter. shutdown_process_monitor() does not throw any exceptions
as this is shutdown-critical code.
@rtype: bool
|
[] |
def shutdown_process_monitor(process_monitor):
"""
@param process_monitor: process monitor to kill
@type process_monitor: L{ProcessMonitor}
@return: True if process_monitor was successfully
shutdown. False if it could not be shutdown cleanly or if there is
a problem with process_monitor
parameter. shutdown_process_monitor() does not throw any exceptions
as this is shutdown-critical code.
@rtype: bool
"""
try:
if process_monitor is None or process_monitor.is_shutdown:
return False
process_monitor.shutdown()
process_monitor.join(20.0)
if process_monitor.isAlive():
return False
else:
return True
except Exception as e:
return False
|
[
"def",
"shutdown_process_monitor",
"(",
"process_monitor",
")",
":",
"try",
":",
"if",
"process_monitor",
"is",
"None",
"or",
"process_monitor",
".",
"is_shutdown",
":",
"return",
"False",
"process_monitor",
".",
"shutdown",
"(",
")",
"process_monitor",
".",
"join",
"(",
"20.0",
")",
"if",
"process_monitor",
".",
"isAlive",
"(",
")",
":",
"return",
"False",
"else",
":",
"return",
"True",
"except",
"Exception",
"as",
"e",
":",
"return",
"False"
] |
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/ros/rosunit/src/rosunit/pmon.py#L83-L105
|
|||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/tkinter/__init__.py
|
python
|
Menu.insert_command
|
(self, index, cnf={}, **kw)
|
Add command menu item at INDEX.
|
Add command menu item at INDEX.
|
[
"Add",
"command",
"menu",
"item",
"at",
"INDEX",
"."
] |
def insert_command(self, index, cnf={}, **kw):
"""Add command menu item at INDEX."""
self.insert(index, 'command', cnf or kw)
|
[
"def",
"insert_command",
"(",
"self",
",",
"index",
",",
"cnf",
"=",
"{",
"}",
",",
"*",
"*",
"kw",
")",
":",
"self",
".",
"insert",
"(",
"index",
",",
"'command'",
",",
"cnf",
"or",
"kw",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/tkinter/__init__.py#L2904-L2906
|
||
oracle/graaljs
|
36a56e8e993d45fc40939a3a4d9c0c24990720f1
|
graal-nodejs/tools/gyp/pylib/gyp/generator/msvs.py
|
python
|
_AddConfigurationToMSVSProject
|
(p, spec, config_type, config_name, config)
|
Adds a configuration to the MSVS project.
Many settings in a vcproj file are specific to a configuration. This
function the main part of the vcproj file that's configuration specific.
Arguments:
p: The target project being generated.
spec: The target dictionary containing the properties of the target.
config_type: The configuration type, a number as defined by Microsoft.
config_name: The name of the configuration.
config: The dictionary that defines the special processing to be done
for this configuration.
|
Adds a configuration to the MSVS project.
|
[
"Adds",
"a",
"configuration",
"to",
"the",
"MSVS",
"project",
"."
] |
def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
"""Adds a configuration to the MSVS project.
Many settings in a vcproj file are specific to a configuration. This
function the main part of the vcproj file that's configuration specific.
Arguments:
p: The target project being generated.
spec: The target dictionary containing the properties of the target.
config_type: The configuration type, a number as defined by Microsoft.
config_name: The name of the configuration.
config: The dictionary that defines the special processing to be done
for this configuration.
"""
# Get the information for this configuration
include_dirs, midl_include_dirs, resource_include_dirs = _GetIncludeDirs(config)
libraries = _GetLibraries(spec)
library_dirs = _GetLibraryDirs(config)
out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False)
defines = _GetDefines(config)
defines = [_EscapeCppDefineForMSVS(d) for d in defines]
disabled_warnings = _GetDisabledWarnings(config)
prebuild = config.get("msvs_prebuild")
postbuild = config.get("msvs_postbuild")
def_file = _GetModuleDefinition(spec)
precompiled_header = config.get("msvs_precompiled_header")
# Prepare the list of tools as a dictionary.
tools = dict()
# Add in user specified msvs_settings.
msvs_settings = config.get("msvs_settings", {})
MSVSSettings.ValidateMSVSSettings(msvs_settings)
# Prevent default library inheritance from the environment.
_ToolAppend(tools, "VCLinkerTool", "AdditionalDependencies", ["$(NOINHERIT)"])
for tool in msvs_settings:
settings = config["msvs_settings"][tool]
for setting in settings:
_ToolAppend(tools, tool, setting, settings[setting])
# Add the information to the appropriate tool
_ToolAppend(tools, "VCCLCompilerTool", "AdditionalIncludeDirectories", include_dirs)
_ToolAppend(tools, "VCMIDLTool", "AdditionalIncludeDirectories", midl_include_dirs)
_ToolAppend(
tools,
"VCResourceCompilerTool",
"AdditionalIncludeDirectories",
resource_include_dirs,
)
# Add in libraries.
_ToolAppend(tools, "VCLinkerTool", "AdditionalDependencies", libraries)
_ToolAppend(tools, "VCLinkerTool", "AdditionalLibraryDirectories", library_dirs)
if out_file:
_ToolAppend(tools, vc_tool, "OutputFile", out_file, only_if_unset=True)
# Add defines.
_ToolAppend(tools, "VCCLCompilerTool", "PreprocessorDefinitions", defines)
_ToolAppend(tools, "VCResourceCompilerTool", "PreprocessorDefinitions", defines)
# Change program database directory to prevent collisions.
_ToolAppend(
tools,
"VCCLCompilerTool",
"ProgramDataBaseFileName",
"$(IntDir)$(ProjectName)\\vc80.pdb",
only_if_unset=True,
)
# Add disabled warnings.
_ToolAppend(tools, "VCCLCompilerTool", "DisableSpecificWarnings", disabled_warnings)
# Add Pre-build.
_ToolAppend(tools, "VCPreBuildEventTool", "CommandLine", prebuild)
# Add Post-build.
_ToolAppend(tools, "VCPostBuildEventTool", "CommandLine", postbuild)
# Turn on precompiled headers if appropriate.
if precompiled_header:
precompiled_header = os.path.split(precompiled_header)[1]
_ToolAppend(tools, "VCCLCompilerTool", "UsePrecompiledHeader", "2")
_ToolAppend(
tools, "VCCLCompilerTool", "PrecompiledHeaderThrough", precompiled_header
)
_ToolAppend(tools, "VCCLCompilerTool", "ForcedIncludeFiles", precompiled_header)
# Loadable modules don't generate import libraries;
# tell dependent projects to not expect one.
if spec["type"] == "loadable_module":
_ToolAppend(tools, "VCLinkerTool", "IgnoreImportLibrary", "true")
# Set the module definition file if any.
if def_file:
_ToolAppend(tools, "VCLinkerTool", "ModuleDefinitionFile", def_file)
_AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name)
|
[
"def",
"_AddConfigurationToMSVSProject",
"(",
"p",
",",
"spec",
",",
"config_type",
",",
"config_name",
",",
"config",
")",
":",
"# Get the information for this configuration",
"include_dirs",
",",
"midl_include_dirs",
",",
"resource_include_dirs",
"=",
"_GetIncludeDirs",
"(",
"config",
")",
"libraries",
"=",
"_GetLibraries",
"(",
"spec",
")",
"library_dirs",
"=",
"_GetLibraryDirs",
"(",
"config",
")",
"out_file",
",",
"vc_tool",
",",
"_",
"=",
"_GetOutputFilePathAndTool",
"(",
"spec",
",",
"msbuild",
"=",
"False",
")",
"defines",
"=",
"_GetDefines",
"(",
"config",
")",
"defines",
"=",
"[",
"_EscapeCppDefineForMSVS",
"(",
"d",
")",
"for",
"d",
"in",
"defines",
"]",
"disabled_warnings",
"=",
"_GetDisabledWarnings",
"(",
"config",
")",
"prebuild",
"=",
"config",
".",
"get",
"(",
"\"msvs_prebuild\"",
")",
"postbuild",
"=",
"config",
".",
"get",
"(",
"\"msvs_postbuild\"",
")",
"def_file",
"=",
"_GetModuleDefinition",
"(",
"spec",
")",
"precompiled_header",
"=",
"config",
".",
"get",
"(",
"\"msvs_precompiled_header\"",
")",
"# Prepare the list of tools as a dictionary.",
"tools",
"=",
"dict",
"(",
")",
"# Add in user specified msvs_settings.",
"msvs_settings",
"=",
"config",
".",
"get",
"(",
"\"msvs_settings\"",
",",
"{",
"}",
")",
"MSVSSettings",
".",
"ValidateMSVSSettings",
"(",
"msvs_settings",
")",
"# Prevent default library inheritance from the environment.",
"_ToolAppend",
"(",
"tools",
",",
"\"VCLinkerTool\"",
",",
"\"AdditionalDependencies\"",
",",
"[",
"\"$(NOINHERIT)\"",
"]",
")",
"for",
"tool",
"in",
"msvs_settings",
":",
"settings",
"=",
"config",
"[",
"\"msvs_settings\"",
"]",
"[",
"tool",
"]",
"for",
"setting",
"in",
"settings",
":",
"_ToolAppend",
"(",
"tools",
",",
"tool",
",",
"setting",
",",
"settings",
"[",
"setting",
"]",
")",
"# Add the information to the appropriate tool",
"_ToolAppend",
"(",
"tools",
",",
"\"VCCLCompilerTool\"",
",",
"\"AdditionalIncludeDirectories\"",
",",
"include_dirs",
")",
"_ToolAppend",
"(",
"tools",
",",
"\"VCMIDLTool\"",
",",
"\"AdditionalIncludeDirectories\"",
",",
"midl_include_dirs",
")",
"_ToolAppend",
"(",
"tools",
",",
"\"VCResourceCompilerTool\"",
",",
"\"AdditionalIncludeDirectories\"",
",",
"resource_include_dirs",
",",
")",
"# Add in libraries.",
"_ToolAppend",
"(",
"tools",
",",
"\"VCLinkerTool\"",
",",
"\"AdditionalDependencies\"",
",",
"libraries",
")",
"_ToolAppend",
"(",
"tools",
",",
"\"VCLinkerTool\"",
",",
"\"AdditionalLibraryDirectories\"",
",",
"library_dirs",
")",
"if",
"out_file",
":",
"_ToolAppend",
"(",
"tools",
",",
"vc_tool",
",",
"\"OutputFile\"",
",",
"out_file",
",",
"only_if_unset",
"=",
"True",
")",
"# Add defines.",
"_ToolAppend",
"(",
"tools",
",",
"\"VCCLCompilerTool\"",
",",
"\"PreprocessorDefinitions\"",
",",
"defines",
")",
"_ToolAppend",
"(",
"tools",
",",
"\"VCResourceCompilerTool\"",
",",
"\"PreprocessorDefinitions\"",
",",
"defines",
")",
"# Change program database directory to prevent collisions.",
"_ToolAppend",
"(",
"tools",
",",
"\"VCCLCompilerTool\"",
",",
"\"ProgramDataBaseFileName\"",
",",
"\"$(IntDir)$(ProjectName)\\\\vc80.pdb\"",
",",
"only_if_unset",
"=",
"True",
",",
")",
"# Add disabled warnings.",
"_ToolAppend",
"(",
"tools",
",",
"\"VCCLCompilerTool\"",
",",
"\"DisableSpecificWarnings\"",
",",
"disabled_warnings",
")",
"# Add Pre-build.",
"_ToolAppend",
"(",
"tools",
",",
"\"VCPreBuildEventTool\"",
",",
"\"CommandLine\"",
",",
"prebuild",
")",
"# Add Post-build.",
"_ToolAppend",
"(",
"tools",
",",
"\"VCPostBuildEventTool\"",
",",
"\"CommandLine\"",
",",
"postbuild",
")",
"# Turn on precompiled headers if appropriate.",
"if",
"precompiled_header",
":",
"precompiled_header",
"=",
"os",
".",
"path",
".",
"split",
"(",
"precompiled_header",
")",
"[",
"1",
"]",
"_ToolAppend",
"(",
"tools",
",",
"\"VCCLCompilerTool\"",
",",
"\"UsePrecompiledHeader\"",
",",
"\"2\"",
")",
"_ToolAppend",
"(",
"tools",
",",
"\"VCCLCompilerTool\"",
",",
"\"PrecompiledHeaderThrough\"",
",",
"precompiled_header",
")",
"_ToolAppend",
"(",
"tools",
",",
"\"VCCLCompilerTool\"",
",",
"\"ForcedIncludeFiles\"",
",",
"precompiled_header",
")",
"# Loadable modules don't generate import libraries;",
"# tell dependent projects to not expect one.",
"if",
"spec",
"[",
"\"type\"",
"]",
"==",
"\"loadable_module\"",
":",
"_ToolAppend",
"(",
"tools",
",",
"\"VCLinkerTool\"",
",",
"\"IgnoreImportLibrary\"",
",",
"\"true\"",
")",
"# Set the module definition file if any.",
"if",
"def_file",
":",
"_ToolAppend",
"(",
"tools",
",",
"\"VCLinkerTool\"",
",",
"\"ModuleDefinitionFile\"",
",",
"def_file",
")",
"_AddConfigurationToMSVS",
"(",
"p",
",",
"spec",
",",
"tools",
",",
"config",
",",
"config_type",
",",
"config_name",
")"
] |
https://github.com/oracle/graaljs/blob/36a56e8e993d45fc40939a3a4d9c0c24990720f1/graal-nodejs/tools/gyp/pylib/gyp/generator/msvs.py#L1161-L1248
|
||
apple/turicreate
|
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
|
deps/src/libxml2-2.9.1/python/libxml2.py
|
python
|
xmlNode.copyNodeList
|
(self)
|
return __tmp
|
Do a recursive copy of the node list. Use
xmlDocCopyNodeList() if possible to ensure string interning.
|
Do a recursive copy of the node list. Use
xmlDocCopyNodeList() if possible to ensure string interning.
|
[
"Do",
"a",
"recursive",
"copy",
"of",
"the",
"node",
"list",
".",
"Use",
"xmlDocCopyNodeList",
"()",
"if",
"possible",
"to",
"ensure",
"string",
"interning",
"."
] |
def copyNodeList(self):
"""Do a recursive copy of the node list. Use
xmlDocCopyNodeList() if possible to ensure string interning. """
ret = libxml2mod.xmlCopyNodeList(self._o)
if ret is None:raise treeError('xmlCopyNodeList() failed')
__tmp = xmlNode(_obj=ret)
return __tmp
|
[
"def",
"copyNodeList",
"(",
"self",
")",
":",
"ret",
"=",
"libxml2mod",
".",
"xmlCopyNodeList",
"(",
"self",
".",
"_o",
")",
"if",
"ret",
"is",
"None",
":",
"raise",
"treeError",
"(",
"'xmlCopyNodeList() failed'",
")",
"__tmp",
"=",
"xmlNode",
"(",
"_obj",
"=",
"ret",
")",
"return",
"__tmp"
] |
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2.py#L3160-L3166
|
|
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
third_party/catapult/telemetry/third_party/mox3/mox3/mox.py
|
python
|
UnexpectedMockCreationError.__init__
|
(self, instance, *params, **named_params)
|
Init exception.
Args:
# instance: the type of obejct that was created
# params: parameters given during instantiation
# named_params: named parameters given during instantiation
|
Init exception.
|
[
"Init",
"exception",
"."
] |
def __init__(self, instance, *params, **named_params):
"""Init exception.
Args:
# instance: the type of obejct that was created
# params: parameters given during instantiation
# named_params: named parameters given during instantiation
"""
Error.__init__(self)
self._instance = instance
self._params = params
self._named_params = named_params
|
[
"def",
"__init__",
"(",
"self",
",",
"instance",
",",
"*",
"params",
",",
"*",
"*",
"named_params",
")",
":",
"Error",
".",
"__init__",
"(",
"self",
")",
"self",
".",
"_instance",
"=",
"instance",
"self",
".",
"_params",
"=",
"params",
"self",
".",
"_named_params",
"=",
"named_params"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/telemetry/third_party/mox3/mox3/mox.py#L205-L217
|
||
MythTV/mythtv
|
d282a209cb8be85d036f85a62a8ec971b67d45f4
|
mythtv/programs/scripts/metadata/Music/musicbrainzngs/musicbrainz.py
|
python
|
browse_urls
|
(resource=None, includes=[], limit=None, offset=None)
|
return _browse_impl("url", includes, valid_includes,
limit, offset, params)
|
Get urls by actual URL string.
You need to give a URL string as 'resource'
*Available includes*: {includes}
|
Get urls by actual URL string.
You need to give a URL string as 'resource'
|
[
"Get",
"urls",
"by",
"actual",
"URL",
"string",
".",
"You",
"need",
"to",
"give",
"a",
"URL",
"string",
"as",
"resource"
] |
def browse_urls(resource=None, includes=[], limit=None, offset=None):
"""Get urls by actual URL string.
You need to give a URL string as 'resource'
*Available includes*: {includes}"""
# optional parameter work?
valid_includes = VALID_BROWSE_INCLUDES['urls']
params = {"resource": resource}
return _browse_impl("url", includes, valid_includes,
limit, offset, params)
|
[
"def",
"browse_urls",
"(",
"resource",
"=",
"None",
",",
"includes",
"=",
"[",
"]",
",",
"limit",
"=",
"None",
",",
"offset",
"=",
"None",
")",
":",
"# optional parameter work?",
"valid_includes",
"=",
"VALID_BROWSE_INCLUDES",
"[",
"'urls'",
"]",
"params",
"=",
"{",
"\"resource\"",
":",
"resource",
"}",
"return",
"_browse_impl",
"(",
"\"url\"",
",",
"includes",
",",
"valid_includes",
",",
"limit",
",",
"offset",
",",
"params",
")"
] |
https://github.com/MythTV/mythtv/blob/d282a209cb8be85d036f85a62a8ec971b67d45f4/mythtv/programs/scripts/metadata/Music/musicbrainzngs/musicbrainz.py#L1165-L1174
|
|
BlzFans/wke
|
b0fa21158312e40c5fbd84682d643022b6c34a93
|
cygwin/lib/python2.6/compiler/pyassem.py
|
python
|
Block.getContainedGraphs
|
(self)
|
return contained
|
Return all graphs contained within this block.
For example, a MAKE_FUNCTION block will contain a reference to
the graph for the function body.
|
Return all graphs contained within this block.
|
[
"Return",
"all",
"graphs",
"contained",
"within",
"this",
"block",
"."
] |
def getContainedGraphs(self):
"""Return all graphs contained within this block.
For example, a MAKE_FUNCTION block will contain a reference to
the graph for the function body.
"""
contained = []
for inst in self.insts:
if len(inst) == 1:
continue
op = inst[1]
if hasattr(op, 'graph'):
contained.append(op.graph)
return contained
|
[
"def",
"getContainedGraphs",
"(",
"self",
")",
":",
"contained",
"=",
"[",
"]",
"for",
"inst",
"in",
"self",
".",
"insts",
":",
"if",
"len",
"(",
"inst",
")",
"==",
"1",
":",
"continue",
"op",
"=",
"inst",
"[",
"1",
"]",
"if",
"hasattr",
"(",
"op",
",",
"'graph'",
")",
":",
"contained",
".",
"append",
"(",
"op",
".",
"graph",
")",
"return",
"contained"
] |
https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/compiler/pyassem.py#L290-L303
|
|
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
third_party/closure_compiler/compile.py
|
python
|
Checker._format_errors
|
(self, errors)
|
return "## %s" % contents if contents else ""
|
Formats Closure compiler errors to easily spot compiler output.
Args:
errors: A list of strings extracted from the Closure compiler's output.
Returns:
A formatted output string.
|
Formats Closure compiler errors to easily spot compiler output.
|
[
"Formats",
"Closure",
"compiler",
"errors",
"to",
"easily",
"spot",
"compiler",
"output",
"."
] |
def _format_errors(self, errors):
"""Formats Closure compiler errors to easily spot compiler output.
Args:
errors: A list of strings extracted from the Closure compiler's output.
Returns:
A formatted output string.
"""
contents = "\n## ".join("\n\n".join(errors).splitlines())
return "## %s" % contents if contents else ""
|
[
"def",
"_format_errors",
"(",
"self",
",",
"errors",
")",
":",
"contents",
"=",
"\"\\n## \"",
".",
"join",
"(",
"\"\\n\\n\"",
".",
"join",
"(",
"errors",
")",
".",
"splitlines",
"(",
")",
")",
"return",
"\"## %s\"",
"%",
"contents",
"if",
"contents",
"else",
"\"\""
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/closure_compiler/compile.py#L162-L172
|
|
ChromiumWebApps/chromium
|
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
|
chrome/common/extensions/docs/server2/object_store.py
|
python
|
ObjectStore.Del
|
(self, key)
|
Deletes a key from the object store.
|
Deletes a key from the object store.
|
[
"Deletes",
"a",
"key",
"from",
"the",
"object",
"store",
"."
] |
def Del(self, key):
'''Deletes a key from the object store.
'''
self.DelMulti([key])
|
[
"def",
"Del",
"(",
"self",
",",
"key",
")",
":",
"self",
".",
"DelMulti",
"(",
"[",
"key",
"]",
")"
] |
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/chrome/common/extensions/docs/server2/object_store.py#L40-L43
|
||
mindspore-ai/mindspore
|
fb8fd3338605bb34fa5cea054e535a8b1d753fab
|
mindspore/python/mindspore/ops/_grad/grad_math_ops.py
|
python
|
bprop_matmul
|
(self)
|
return bprop
|
Grad definition for `MatMul` operation.
|
Grad definition for `MatMul` operation.
|
[
"Grad",
"definition",
"for",
"MatMul",
"operation",
"."
] |
def bprop_matmul(self):
"""Grad definition for `MatMul` operation."""
ta = self.transpose_a
tb = self.transpose_b
mul1 = P.MatMul(transpose_a=(ta and tb),
transpose_b=(ta or (not tb)))
mul2 = P.MatMul(transpose_a=((not ta) or tb),
transpose_b=(ta and tb))
def bprop(x, w, out, dout):
if ta:
dx = mul1(w, dout)
else:
dx = mul1(dout, w)
if tb:
dw = mul2(dout, x)
else:
dw = mul2(x, dout)
return dx, dw
return bprop
|
[
"def",
"bprop_matmul",
"(",
"self",
")",
":",
"ta",
"=",
"self",
".",
"transpose_a",
"tb",
"=",
"self",
".",
"transpose_b",
"mul1",
"=",
"P",
".",
"MatMul",
"(",
"transpose_a",
"=",
"(",
"ta",
"and",
"tb",
")",
",",
"transpose_b",
"=",
"(",
"ta",
"or",
"(",
"not",
"tb",
")",
")",
")",
"mul2",
"=",
"P",
".",
"MatMul",
"(",
"transpose_a",
"=",
"(",
"(",
"not",
"ta",
")",
"or",
"tb",
")",
",",
"transpose_b",
"=",
"(",
"ta",
"and",
"tb",
")",
")",
"def",
"bprop",
"(",
"x",
",",
"w",
",",
"out",
",",
"dout",
")",
":",
"if",
"ta",
":",
"dx",
"=",
"mul1",
"(",
"w",
",",
"dout",
")",
"else",
":",
"dx",
"=",
"mul1",
"(",
"dout",
",",
"w",
")",
"if",
"tb",
":",
"dw",
"=",
"mul2",
"(",
"dout",
",",
"x",
")",
"else",
":",
"dw",
"=",
"mul2",
"(",
"x",
",",
"dout",
")",
"return",
"dx",
",",
"dw",
"return",
"bprop"
] |
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/ops/_grad/grad_math_ops.py#L170-L190
|
|
livecode/livecode
|
4606a10ea10b16d5071d0f9f263ccdd7ede8b31d
|
gyp/pylib/gyp/xcode_emulation.py
|
python
|
XcodeSettings.GetCflagsCC
|
(self, configname)
|
return cflags_cc
|
Returns flags that need to be added to .cc, and .mm compilations.
|
Returns flags that need to be added to .cc, and .mm compilations.
|
[
"Returns",
"flags",
"that",
"need",
"to",
"be",
"added",
"to",
".",
"cc",
"and",
".",
"mm",
"compilations",
"."
] |
def GetCflagsCC(self, configname):
"""Returns flags that need to be added to .cc, and .mm compilations."""
self.configname = configname
cflags_cc = []
clang_cxx_language_standard = self._Settings().get(
'CLANG_CXX_LANGUAGE_STANDARD')
# Note: Don't make c++0x to c++11 so that c++0x can be used with older
# clangs that don't understand c++11 yet (like Xcode 4.2's).
if clang_cxx_language_standard:
cflags_cc.append('-std=%s' % clang_cxx_language_standard)
self._Appendf(cflags_cc, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
if self._Test('GCC_ENABLE_CPP_RTTI', 'NO', default='YES'):
cflags_cc.append('-fno-rtti')
if self._Test('GCC_ENABLE_CPP_EXCEPTIONS', 'NO', default='YES'):
cflags_cc.append('-fno-exceptions')
if self._Test('GCC_INLINES_ARE_PRIVATE_EXTERN', 'YES', default='NO'):
cflags_cc.append('-fvisibility-inlines-hidden')
if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'):
cflags_cc.append('-fno-threadsafe-statics')
# Note: This flag is a no-op for clang, it only has an effect for gcc.
if self._Test('GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO', 'NO', default='YES'):
cflags_cc.append('-Wno-invalid-offsetof')
other_ccflags = []
for flag in self._Settings().get('OTHER_CPLUSPLUSFLAGS', ['$(inherited)']):
# TODO: More general variable expansion. Missing in many other places too.
if flag in ('$inherited', '$(inherited)', '${inherited}'):
flag = '$OTHER_CFLAGS'
if flag in ('$OTHER_CFLAGS', '$(OTHER_CFLAGS)', '${OTHER_CFLAGS}'):
other_ccflags += self._Settings().get('OTHER_CFLAGS', [])
else:
other_ccflags.append(flag)
cflags_cc += other_ccflags
self.configname = None
return cflags_cc
|
[
"def",
"GetCflagsCC",
"(",
"self",
",",
"configname",
")",
":",
"self",
".",
"configname",
"=",
"configname",
"cflags_cc",
"=",
"[",
"]",
"clang_cxx_language_standard",
"=",
"self",
".",
"_Settings",
"(",
")",
".",
"get",
"(",
"'CLANG_CXX_LANGUAGE_STANDARD'",
")",
"# Note: Don't make c++0x to c++11 so that c++0x can be used with older",
"# clangs that don't understand c++11 yet (like Xcode 4.2's).",
"if",
"clang_cxx_language_standard",
":",
"cflags_cc",
".",
"append",
"(",
"'-std=%s'",
"%",
"clang_cxx_language_standard",
")",
"self",
".",
"_Appendf",
"(",
"cflags_cc",
",",
"'CLANG_CXX_LIBRARY'",
",",
"'-stdlib=%s'",
")",
"if",
"self",
".",
"_Test",
"(",
"'GCC_ENABLE_CPP_RTTI'",
",",
"'NO'",
",",
"default",
"=",
"'YES'",
")",
":",
"cflags_cc",
".",
"append",
"(",
"'-fno-rtti'",
")",
"if",
"self",
".",
"_Test",
"(",
"'GCC_ENABLE_CPP_EXCEPTIONS'",
",",
"'NO'",
",",
"default",
"=",
"'YES'",
")",
":",
"cflags_cc",
".",
"append",
"(",
"'-fno-exceptions'",
")",
"if",
"self",
".",
"_Test",
"(",
"'GCC_INLINES_ARE_PRIVATE_EXTERN'",
",",
"'YES'",
",",
"default",
"=",
"'NO'",
")",
":",
"cflags_cc",
".",
"append",
"(",
"'-fvisibility-inlines-hidden'",
")",
"if",
"self",
".",
"_Test",
"(",
"'GCC_THREADSAFE_STATICS'",
",",
"'NO'",
",",
"default",
"=",
"'YES'",
")",
":",
"cflags_cc",
".",
"append",
"(",
"'-fno-threadsafe-statics'",
")",
"# Note: This flag is a no-op for clang, it only has an effect for gcc.",
"if",
"self",
".",
"_Test",
"(",
"'GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO'",
",",
"'NO'",
",",
"default",
"=",
"'YES'",
")",
":",
"cflags_cc",
".",
"append",
"(",
"'-Wno-invalid-offsetof'",
")",
"other_ccflags",
"=",
"[",
"]",
"for",
"flag",
"in",
"self",
".",
"_Settings",
"(",
")",
".",
"get",
"(",
"'OTHER_CPLUSPLUSFLAGS'",
",",
"[",
"'$(inherited)'",
"]",
")",
":",
"# TODO: More general variable expansion. Missing in many other places too.",
"if",
"flag",
"in",
"(",
"'$inherited'",
",",
"'$(inherited)'",
",",
"'${inherited}'",
")",
":",
"flag",
"=",
"'$OTHER_CFLAGS'",
"if",
"flag",
"in",
"(",
"'$OTHER_CFLAGS'",
",",
"'$(OTHER_CFLAGS)'",
",",
"'${OTHER_CFLAGS}'",
")",
":",
"other_ccflags",
"+=",
"self",
".",
"_Settings",
"(",
")",
".",
"get",
"(",
"'OTHER_CFLAGS'",
",",
"[",
"]",
")",
"else",
":",
"other_ccflags",
".",
"append",
"(",
"flag",
")",
"cflags_cc",
"+=",
"other_ccflags",
"self",
".",
"configname",
"=",
"None",
"return",
"cflags_cc"
] |
https://github.com/livecode/livecode/blob/4606a10ea10b16d5071d0f9f263ccdd7ede8b31d/gyp/pylib/gyp/xcode_emulation.py#L588-L627
|
|
echronos/echronos
|
c996f1d2c8af6c6536205eb319c1bf1d4d84569c
|
external_tools/ply_info/example/unicalc/calc.py
|
python
|
p_expression_binop
|
(p)
|
expression : expression PLUS expression
| expression MINUS expression
| expression TIMES expression
| expression DIVIDE expression
|
expression : expression PLUS expression
| expression MINUS expression
| expression TIMES expression
| expression DIVIDE expression
|
[
"expression",
":",
"expression",
"PLUS",
"expression",
"|",
"expression",
"MINUS",
"expression",
"|",
"expression",
"TIMES",
"expression",
"|",
"expression",
"DIVIDE",
"expression"
] |
def p_expression_binop(p):
'''expression : expression PLUS expression
| expression MINUS expression
| expression TIMES expression
| expression DIVIDE expression'''
if p[2] == u'+' : p[0] = p[1] + p[3]
elif p[2] == u'-': p[0] = p[1] - p[3]
elif p[2] == u'*': p[0] = p[1] * p[3]
elif p[2] == u'/': p[0] = p[1] / p[3]
|
[
"def",
"p_expression_binop",
"(",
"p",
")",
":",
"if",
"p",
"[",
"2",
"]",
"==",
"u'+'",
":",
"p",
"[",
"0",
"]",
"=",
"p",
"[",
"1",
"]",
"+",
"p",
"[",
"3",
"]",
"elif",
"p",
"[",
"2",
"]",
"==",
"u'-'",
":",
"p",
"[",
"0",
"]",
"=",
"p",
"[",
"1",
"]",
"-",
"p",
"[",
"3",
"]",
"elif",
"p",
"[",
"2",
"]",
"==",
"u'*'",
":",
"p",
"[",
"0",
"]",
"=",
"p",
"[",
"1",
"]",
"*",
"p",
"[",
"3",
"]",
"elif",
"p",
"[",
"2",
"]",
"==",
"u'/'",
":",
"p",
"[",
"0",
"]",
"=",
"p",
"[",
"1",
"]",
"/",
"p",
"[",
"3",
"]"
] |
https://github.com/echronos/echronos/blob/c996f1d2c8af6c6536205eb319c1bf1d4d84569c/external_tools/ply_info/example/unicalc/calc.py#L72-L80
|
||
cms-sw/cmssw
|
fd9de012d503d3405420bcbeec0ec879baa57cf2
|
Alignment/OfflineValidation/python/TkAlAllInOneTool/alignment.py
|
python
|
Alignment.__shorthandExists
|
(self, theRcdName, theShorthand)
|
Method which checks, if `theShorthand` is a valid shorthand for the
given `theRcdName`.
Arguments:
- `theRcdName`: String which specifies the database record.
- `theShorthand`: String which specifies the shorthand to check.
|
Method which checks, if `theShorthand` is a valid shorthand for the
given `theRcdName`.
|
[
"Method",
"which",
"checks",
"if",
"theShorthand",
"is",
"a",
"valid",
"shorthand",
"for",
"the",
"given",
"theRcdName",
"."
] |
def __shorthandExists(self, theRcdName, theShorthand):
"""Method which checks, if `theShorthand` is a valid shorthand for the
given `theRcdName`.
Arguments:
- `theRcdName`: String which specifies the database record.
- `theShorthand`: String which specifies the shorthand to check.
"""
if (theRcdName in self.condShorts) and \
(theShorthand in self.condShorts[theRcdName]):
return True
else:
return False
|
[
"def",
"__shorthandExists",
"(",
"self",
",",
"theRcdName",
",",
"theShorthand",
")",
":",
"if",
"(",
"theRcdName",
"in",
"self",
".",
"condShorts",
")",
"and",
"(",
"theShorthand",
"in",
"self",
".",
"condShorts",
"[",
"theRcdName",
"]",
")",
":",
"return",
"True",
"else",
":",
"return",
"False"
] |
https://github.com/cms-sw/cmssw/blob/fd9de012d503d3405420bcbeec0ec879baa57cf2/Alignment/OfflineValidation/python/TkAlAllInOneTool/alignment.py#L64-L77
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/windows/Lib/bdb.py
|
python
|
Bdb.clear_break
|
(self, filename, lineno)
|
return None
|
Delete breakpoints for filename:lineno.
If no breakpoints were set, return an error message.
|
Delete breakpoints for filename:lineno.
|
[
"Delete",
"breakpoints",
"for",
"filename",
":",
"lineno",
"."
] |
def clear_break(self, filename, lineno):
"""Delete breakpoints for filename:lineno.
If no breakpoints were set, return an error message.
"""
filename = self.canonic(filename)
if filename not in self.breaks:
return 'There are no breakpoints in %s' % filename
if lineno not in self.breaks[filename]:
return 'There is no breakpoint at %s:%d' % (filename, lineno)
# If there's only one bp in the list for that file,line
# pair, then remove the breaks entry
for bp in Breakpoint.bplist[filename, lineno][:]:
bp.deleteMe()
self._prune_breaks(filename, lineno)
return None
|
[
"def",
"clear_break",
"(",
"self",
",",
"filename",
",",
"lineno",
")",
":",
"filename",
"=",
"self",
".",
"canonic",
"(",
"filename",
")",
"if",
"filename",
"not",
"in",
"self",
".",
"breaks",
":",
"return",
"'There are no breakpoints in %s'",
"%",
"filename",
"if",
"lineno",
"not",
"in",
"self",
".",
"breaks",
"[",
"filename",
"]",
":",
"return",
"'There is no breakpoint at %s:%d'",
"%",
"(",
"filename",
",",
"lineno",
")",
"# If there's only one bp in the list for that file,line",
"# pair, then remove the breaks entry",
"for",
"bp",
"in",
"Breakpoint",
".",
"bplist",
"[",
"filename",
",",
"lineno",
"]",
"[",
":",
"]",
":",
"bp",
".",
"deleteMe",
"(",
")",
"self",
".",
"_prune_breaks",
"(",
"filename",
",",
"lineno",
")",
"return",
"None"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/bdb.py#L397-L412
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/msw/_core.py
|
python
|
GBPosition.GetRow
|
(*args, **kwargs)
|
return _core_.GBPosition_GetRow(*args, **kwargs)
|
GetRow(self) -> int
|
GetRow(self) -> int
|
[
"GetRow",
"(",
"self",
")",
"-",
">",
"int"
] |
def GetRow(*args, **kwargs):
"""GetRow(self) -> int"""
return _core_.GBPosition_GetRow(*args, **kwargs)
|
[
"def",
"GetRow",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"GBPosition_GetRow",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_core.py#L15576-L15578
|
|
kamyu104/LeetCode-Solutions
|
77605708a927ea3b85aee5a479db733938c7c211
|
Python/count-binary-substrings.py
|
python
|
Solution.countBinarySubstrings
|
(self, s)
|
return result
|
:type s: str
:rtype: int
|
:type s: str
:rtype: int
|
[
":",
"type",
"s",
":",
"str",
":",
"rtype",
":",
"int"
] |
def countBinarySubstrings(self, s):
"""
:type s: str
:rtype: int
"""
result, prev, curr = 0, 0, 1
for i in xrange(1, len(s)):
if s[i-1] != s[i]:
result += min(prev, curr)
prev, curr = curr, 1
else:
curr += 1
result += min(prev, curr)
return result
|
[
"def",
"countBinarySubstrings",
"(",
"self",
",",
"s",
")",
":",
"result",
",",
"prev",
",",
"curr",
"=",
"0",
",",
"0",
",",
"1",
"for",
"i",
"in",
"xrange",
"(",
"1",
",",
"len",
"(",
"s",
")",
")",
":",
"if",
"s",
"[",
"i",
"-",
"1",
"]",
"!=",
"s",
"[",
"i",
"]",
":",
"result",
"+=",
"min",
"(",
"prev",
",",
"curr",
")",
"prev",
",",
"curr",
"=",
"curr",
",",
"1",
"else",
":",
"curr",
"+=",
"1",
"result",
"+=",
"min",
"(",
"prev",
",",
"curr",
")",
"return",
"result"
] |
https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/count-binary-substrings.py#L5-L18
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
wx/lib/rpcMixin.py
|
python
|
rpcMixin.OnExternal
|
(self,event)
|
this is the callback used to handle RPCs
**Arguments**
- event: an _ExternalEvent_ sent by the rpc server
Exceptions are caught and returned in the global _rpcStatus
structure. This allows the xmlrpc server to report the
exception to the client without mucking up any of the delicate
thread stuff.
|
this is the callback used to handle RPCs
|
[
"this",
"is",
"the",
"callback",
"used",
"to",
"handle",
"RPCs"
] |
def OnExternal(self,event):
""" this is the callback used to handle RPCs
**Arguments**
- event: an _ExternalEvent_ sent by the rpc server
Exceptions are caught and returned in the global _rpcStatus
structure. This allows the xmlrpc server to report the
exception to the client without mucking up any of the delicate
thread stuff.
"""
event.rpcStatusLock.acquire()
doQuit = 0
try:
methsplit = event.method.split('.')
meth = self
for piece in methsplit:
meth = getattr(meth,piece)
except AttributeError,msg:
event.rpcStatus.result = 'No Such Method',msg
event.rpcStatus.status = rpcEXCEPT
else:
try:
res = apply(meth,event.args)
except:
import traceback
if self.verbose: traceback.print_exc()
event.rpcStatus.result = sys.exc_info()[:2]
event.rpcStatus.status = rpcEXCEPT
else:
if res is None:
# returning None across the xmlrpc interface is problematic
event.rpcStatus.result = []
else:
event.rpcStatus.result = res
event.rpcStatus.status = rpcDONE
event.rpcStatusLock.release()
# broadcast (using the condition var) that we're done with the event
event.rpcCondVar.acquire()
event.rpcCondVar.notify()
event.rpcCondVar.release()
|
[
"def",
"OnExternal",
"(",
"self",
",",
"event",
")",
":",
"event",
".",
"rpcStatusLock",
".",
"acquire",
"(",
")",
"doQuit",
"=",
"0",
"try",
":",
"methsplit",
"=",
"event",
".",
"method",
".",
"split",
"(",
"'.'",
")",
"meth",
"=",
"self",
"for",
"piece",
"in",
"methsplit",
":",
"meth",
"=",
"getattr",
"(",
"meth",
",",
"piece",
")",
"except",
"AttributeError",
",",
"msg",
":",
"event",
".",
"rpcStatus",
".",
"result",
"=",
"'No Such Method'",
",",
"msg",
"event",
".",
"rpcStatus",
".",
"status",
"=",
"rpcEXCEPT",
"else",
":",
"try",
":",
"res",
"=",
"apply",
"(",
"meth",
",",
"event",
".",
"args",
")",
"except",
":",
"import",
"traceback",
"if",
"self",
".",
"verbose",
":",
"traceback",
".",
"print_exc",
"(",
")",
"event",
".",
"rpcStatus",
".",
"result",
"=",
"sys",
".",
"exc_info",
"(",
")",
"[",
":",
"2",
"]",
"event",
".",
"rpcStatus",
".",
"status",
"=",
"rpcEXCEPT",
"else",
":",
"if",
"res",
"is",
"None",
":",
"# returning None across the xmlrpc interface is problematic",
"event",
".",
"rpcStatus",
".",
"result",
"=",
"[",
"]",
"else",
":",
"event",
".",
"rpcStatus",
".",
"result",
"=",
"res",
"event",
".",
"rpcStatus",
".",
"status",
"=",
"rpcDONE",
"event",
".",
"rpcStatusLock",
".",
"release",
"(",
")",
"# broadcast (using the condition var) that we're done with the event",
"event",
".",
"rpcCondVar",
".",
"acquire",
"(",
")",
"event",
".",
"rpcCondVar",
".",
"notify",
"(",
")",
"event",
".",
"rpcCondVar",
".",
"release",
"(",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/rpcMixin.py#L297-L341
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_carbon/_gdi.py
|
python
|
DC.DrawLinePoint
|
(*args, **kwargs)
|
return _gdi_.DC_DrawLinePoint(*args, **kwargs)
|
DrawLinePoint(self, Point pt1, Point pt2)
Draws a line from the first point to the second. The current pen is
used for drawing the line. Note that the second point is *not* part of
the line and is not drawn by this function (this is consistent with
the behaviour of many other toolkits).
|
DrawLinePoint(self, Point pt1, Point pt2)
|
[
"DrawLinePoint",
"(",
"self",
"Point",
"pt1",
"Point",
"pt2",
")"
] |
def DrawLinePoint(*args, **kwargs):
"""
DrawLinePoint(self, Point pt1, Point pt2)
Draws a line from the first point to the second. The current pen is
used for drawing the line. Note that the second point is *not* part of
the line and is not drawn by this function (this is consistent with
the behaviour of many other toolkits).
"""
return _gdi_.DC_DrawLinePoint(*args, **kwargs)
|
[
"def",
"DrawLinePoint",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_gdi_",
".",
"DC_DrawLinePoint",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_gdi.py#L3417-L3426
|
|
devpack/android-python27
|
d42dd67565e104cf7b0b50eb473f615db3e69901
|
python-build-with-qt/sip-4.11.2/configure.py
|
python
|
create_config
|
(module, template, macros)
|
Create the SIP configuration module so that it can be imported by build
scripts.
module is the module file name.
template is the template file name.
macros is the dictionary of build macros.
|
Create the SIP configuration module so that it can be imported by build
scripts.
|
[
"Create",
"the",
"SIP",
"configuration",
"module",
"so",
"that",
"it",
"can",
"be",
"imported",
"by",
"build",
"scripts",
"."
] |
def create_config(module, template, macros):
"""Create the SIP configuration module so that it can be imported by build
scripts.
module is the module file name.
template is the template file name.
macros is the dictionary of build macros.
"""
siputils.inform("Creating %s..." % module)
content = {
"sip_config_args": sys.argv[1:],
"sip_version": sip_version,
"sip_version_str": sip_version_str,
"platform": opts.platform,
"sip_bin": os.path.join(opts.sipbindir, "sip"),
"sip_inc_dir": opts.sipincdir,
"sip_mod_dir": opts.sipmoddir,
"default_bin_dir": plat_bin_dir,
"default_mod_dir": plat_py_site_dir,
"default_sip_dir": opts.sipsipdir,
"py_version": py_version,
"py_inc_dir": plat_py_inc_dir,
"py_conf_inc_dir": plat_py_conf_inc_dir,
"py_lib_dir": plat_py_lib_dir,
"universal": opts.universal,
"arch": opts.arch
}
siputils.create_config_module(module, template, content, macros)
|
[
"def",
"create_config",
"(",
"module",
",",
"template",
",",
"macros",
")",
":",
"siputils",
".",
"inform",
"(",
"\"Creating %s...\"",
"%",
"module",
")",
"content",
"=",
"{",
"\"sip_config_args\"",
":",
"sys",
".",
"argv",
"[",
"1",
":",
"]",
",",
"\"sip_version\"",
":",
"sip_version",
",",
"\"sip_version_str\"",
":",
"sip_version_str",
",",
"\"platform\"",
":",
"opts",
".",
"platform",
",",
"\"sip_bin\"",
":",
"os",
".",
"path",
".",
"join",
"(",
"opts",
".",
"sipbindir",
",",
"\"sip\"",
")",
",",
"\"sip_inc_dir\"",
":",
"opts",
".",
"sipincdir",
",",
"\"sip_mod_dir\"",
":",
"opts",
".",
"sipmoddir",
",",
"\"default_bin_dir\"",
":",
"plat_bin_dir",
",",
"\"default_mod_dir\"",
":",
"plat_py_site_dir",
",",
"\"default_sip_dir\"",
":",
"opts",
".",
"sipsipdir",
",",
"\"py_version\"",
":",
"py_version",
",",
"\"py_inc_dir\"",
":",
"plat_py_inc_dir",
",",
"\"py_conf_inc_dir\"",
":",
"plat_py_conf_inc_dir",
",",
"\"py_lib_dir\"",
":",
"plat_py_lib_dir",
",",
"\"universal\"",
":",
"opts",
".",
"universal",
",",
"\"arch\"",
":",
"opts",
".",
"arch",
"}",
"siputils",
".",
"create_config_module",
"(",
"module",
",",
"template",
",",
"content",
",",
"macros",
")"
] |
https://github.com/devpack/android-python27/blob/d42dd67565e104cf7b0b50eb473f615db3e69901/python-build-with-qt/sip-4.11.2/configure.py#L213-L242
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/tools/python3/src/Lib/codecs.py
|
python
|
StreamReader.seek
|
(self, offset, whence=0)
|
Set the input stream's current position.
Resets the codec buffers used for keeping state.
|
Set the input stream's current position.
|
[
"Set",
"the",
"input",
"stream",
"s",
"current",
"position",
"."
] |
def seek(self, offset, whence=0):
""" Set the input stream's current position.
Resets the codec buffers used for keeping state.
"""
self.stream.seek(offset, whence)
self.reset()
|
[
"def",
"seek",
"(",
"self",
",",
"offset",
",",
"whence",
"=",
"0",
")",
":",
"self",
".",
"stream",
".",
"seek",
"(",
"offset",
",",
"whence",
")",
"self",
".",
"reset",
"(",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/codecs.py#L634-L640
|
||
metashell/metashell
|
f4177e4854ea00c8dbc722cadab26ef413d798ea
|
3rd/templight/llvm/utils/lit/lit/ShUtil.py
|
python
|
ShLexer.lex_one_token
|
(self)
|
return self.lex_arg(c)
|
lex_one_token - Lex a single 'sh' token.
|
lex_one_token - Lex a single 'sh' token.
|
[
"lex_one_token",
"-",
"Lex",
"a",
"single",
"sh",
"token",
"."
] |
def lex_one_token(self):
"""
lex_one_token - Lex a single 'sh' token. """
c = self.eat()
if c == ';':
return (c,)
if c == '|':
if self.maybe_eat('|'):
return ('||',)
return (c,)
if c == '&':
if self.maybe_eat('&'):
return ('&&',)
if self.maybe_eat('>'):
return ('&>',)
return (c,)
if c == '>':
if self.maybe_eat('&'):
return ('>&',)
if self.maybe_eat('>'):
return ('>>',)
return (c,)
if c == '<':
if self.maybe_eat('&'):
return ('<&',)
if self.maybe_eat('>'):
return ('<<',)
return (c,)
return self.lex_arg(c)
|
[
"def",
"lex_one_token",
"(",
"self",
")",
":",
"c",
"=",
"self",
".",
"eat",
"(",
")",
"if",
"c",
"==",
"';'",
":",
"return",
"(",
"c",
",",
")",
"if",
"c",
"==",
"'|'",
":",
"if",
"self",
".",
"maybe_eat",
"(",
"'|'",
")",
":",
"return",
"(",
"'||'",
",",
")",
"return",
"(",
"c",
",",
")",
"if",
"c",
"==",
"'&'",
":",
"if",
"self",
".",
"maybe_eat",
"(",
"'&'",
")",
":",
"return",
"(",
"'&&'",
",",
")",
"if",
"self",
".",
"maybe_eat",
"(",
"'>'",
")",
":",
"return",
"(",
"'&>'",
",",
")",
"return",
"(",
"c",
",",
")",
"if",
"c",
"==",
"'>'",
":",
"if",
"self",
".",
"maybe_eat",
"(",
"'&'",
")",
":",
"return",
"(",
"'>&'",
",",
")",
"if",
"self",
".",
"maybe_eat",
"(",
"'>'",
")",
":",
"return",
"(",
"'>>'",
",",
")",
"return",
"(",
"c",
",",
")",
"if",
"c",
"==",
"'<'",
":",
"if",
"self",
".",
"maybe_eat",
"(",
"'&'",
")",
":",
"return",
"(",
"'<&'",
",",
")",
"if",
"self",
".",
"maybe_eat",
"(",
"'>'",
")",
":",
"return",
"(",
"'<<'",
",",
")",
"return",
"(",
"c",
",",
")",
"return",
"self",
".",
"lex_arg",
"(",
"c",
")"
] |
https://github.com/metashell/metashell/blob/f4177e4854ea00c8dbc722cadab26ef413d798ea/3rd/templight/llvm/utils/lit/lit/ShUtil.py#L148-L178
|
|
Atarity/Lightpack
|
4dee73a443cba4c4073291febe450e6c1941f3af
|
Software/apiexamples/liOSC/OSC.py
|
python
|
OSCMultiClient._setTarget
|
(self, address, prefix=None, filters=None)
|
Add (i.e. subscribe) a new OSCTarget, or change the prefix for an existing OSCTarget.
- address ((host, port) tuple): IP-address & UDP-port
- prefix (string): The OSC-address prefix prepended to the address of each OSCMessage
sent to this OSCTarget (optional)
|
Add (i.e. subscribe) a new OSCTarget, or change the prefix for an existing OSCTarget.
- address ((host, port) tuple): IP-address & UDP-port
- prefix (string): The OSC-address prefix prepended to the address of each OSCMessage
sent to this OSCTarget (optional)
|
[
"Add",
"(",
"i",
".",
"e",
".",
"subscribe",
")",
"a",
"new",
"OSCTarget",
"or",
"change",
"the",
"prefix",
"for",
"an",
"existing",
"OSCTarget",
".",
"-",
"address",
"((",
"host",
"port",
")",
"tuple",
")",
":",
"IP",
"-",
"address",
"&",
"UDP",
"-",
"port",
"-",
"prefix",
"(",
"string",
")",
":",
"The",
"OSC",
"-",
"address",
"prefix",
"prepended",
"to",
"the",
"address",
"of",
"each",
"OSCMessage",
"sent",
"to",
"this",
"OSCTarget",
"(",
"optional",
")"
] |
def _setTarget(self, address, prefix=None, filters=None):
"""Add (i.e. subscribe) a new OSCTarget, or change the prefix for an existing OSCTarget.
- address ((host, port) tuple): IP-address & UDP-port
- prefix (string): The OSC-address prefix prepended to the address of each OSCMessage
sent to this OSCTarget (optional)
"""
if address not in self.targets.keys():
self.targets[address] = ["",{}]
if prefix != None:
if len(prefix):
# make sure prefix starts with ONE '/', and does not end with '/'
prefix = '/' + prefix.strip('/')
self.targets[address][0] = prefix
if filters != None:
if type(filters) in types.StringTypes:
(_, filters) = parseFilterStr(filters)
elif type(filters) != types.DictType:
raise TypeError("'filters' argument must be a dict with {addr:bool} entries")
self._updateFilters(self.targets[address][1], filters)
|
[
"def",
"_setTarget",
"(",
"self",
",",
"address",
",",
"prefix",
"=",
"None",
",",
"filters",
"=",
"None",
")",
":",
"if",
"address",
"not",
"in",
"self",
".",
"targets",
".",
"keys",
"(",
")",
":",
"self",
".",
"targets",
"[",
"address",
"]",
"=",
"[",
"\"\"",
",",
"{",
"}",
"]",
"if",
"prefix",
"!=",
"None",
":",
"if",
"len",
"(",
"prefix",
")",
":",
"# make sure prefix starts with ONE '/', and does not end with '/'",
"prefix",
"=",
"'/'",
"+",
"prefix",
".",
"strip",
"(",
"'/'",
")",
"self",
".",
"targets",
"[",
"address",
"]",
"[",
"0",
"]",
"=",
"prefix",
"if",
"filters",
"!=",
"None",
":",
"if",
"type",
"(",
"filters",
")",
"in",
"types",
".",
"StringTypes",
":",
"(",
"_",
",",
"filters",
")",
"=",
"parseFilterStr",
"(",
"filters",
")",
"elif",
"type",
"(",
"filters",
")",
"!=",
"types",
".",
"DictType",
":",
"raise",
"TypeError",
"(",
"\"'filters' argument must be a dict with {addr:bool} entries\"",
")",
"self",
".",
"_updateFilters",
"(",
"self",
".",
"targets",
"[",
"address",
"]",
"[",
"1",
"]",
",",
"filters",
")"
] |
https://github.com/Atarity/Lightpack/blob/4dee73a443cba4c4073291febe450e6c1941f3af/Software/apiexamples/liOSC/OSC.py#L1287-L1309
|
||
CanalTP/navitia
|
cb84ce9859070187e708818b058e6a7e0b7f891b
|
source/jormungandr/jormungandr/scenarios/distributed.py
|
python
|
Scenario.call_kraken
|
(self, request_type, request, instance, krakens_call, request_id, context)
|
All spawned futures must be started(if they're not yet started) when leaving the scope.
We do this to prevent the programme from being blocked in case where some un-started futures may hold
threading locks. If we leave the scope without cleaning these futures, they may hold locks forever.
Note that the cleaning process depends on the implementation of futures.
|
All spawned futures must be started(if they're not yet started) when leaving the scope.
|
[
"All",
"spawned",
"futures",
"must",
"be",
"started",
"(",
"if",
"they",
"re",
"not",
"yet",
"started",
")",
"when",
"leaving",
"the",
"scope",
"."
] |
def call_kraken(self, request_type, request, instance, krakens_call, request_id, context):
record_custom_parameter('scenario', 'distributed')
logger = logging.getLogger(__name__)
logger.warning("using experimental scenario!!")
"""
All spawned futures must be started(if they're not yet started) when leaving the scope.
We do this to prevent the programme from being blocked in case where some un-started futures may hold
threading locks. If we leave the scope without cleaning these futures, they may hold locks forever.
Note that the cleaning process depends on the implementation of futures.
"""
try:
with FutureManager(self.greenlet_pool_size) as future_manager, timed_logger(
logger, 'call_kraken', request_id
):
if request_type == type_pb2.ISOCHRONE:
return self._scenario._compute_isochrone_common(
future_manager, request, instance, krakens_call, type_pb2.ISOCHRONE
)
elif request_type == type_pb2.PLANNER:
return self._scenario._compute_journeys(
future_manager, request, instance, krakens_call, context, type_pb2.PLANNER
)
else:
abort(400, message="This type of request is not supported with distributed")
except PtException as e:
logger.exception('')
return [e.get()]
except EntryPointException as e:
logger.exception('')
return [e.get()]
|
[
"def",
"call_kraken",
"(",
"self",
",",
"request_type",
",",
"request",
",",
"instance",
",",
"krakens_call",
",",
"request_id",
",",
"context",
")",
":",
"record_custom_parameter",
"(",
"'scenario'",
",",
"'distributed'",
")",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"__name__",
")",
"logger",
".",
"warning",
"(",
"\"using experimental scenario!!\"",
")",
"try",
":",
"with",
"FutureManager",
"(",
"self",
".",
"greenlet_pool_size",
")",
"as",
"future_manager",
",",
"timed_logger",
"(",
"logger",
",",
"'call_kraken'",
",",
"request_id",
")",
":",
"if",
"request_type",
"==",
"type_pb2",
".",
"ISOCHRONE",
":",
"return",
"self",
".",
"_scenario",
".",
"_compute_isochrone_common",
"(",
"future_manager",
",",
"request",
",",
"instance",
",",
"krakens_call",
",",
"type_pb2",
".",
"ISOCHRONE",
")",
"elif",
"request_type",
"==",
"type_pb2",
".",
"PLANNER",
":",
"return",
"self",
".",
"_scenario",
".",
"_compute_journeys",
"(",
"future_manager",
",",
"request",
",",
"instance",
",",
"krakens_call",
",",
"context",
",",
"type_pb2",
".",
"PLANNER",
")",
"else",
":",
"abort",
"(",
"400",
",",
"message",
"=",
"\"This type of request is not supported with distributed\"",
")",
"except",
"PtException",
"as",
"e",
":",
"logger",
".",
"exception",
"(",
"''",
")",
"return",
"[",
"e",
".",
"get",
"(",
")",
"]",
"except",
"EntryPointException",
"as",
"e",
":",
"logger",
".",
"exception",
"(",
"''",
")",
"return",
"[",
"e",
".",
"get",
"(",
")",
"]"
] |
https://github.com/CanalTP/navitia/blob/cb84ce9859070187e708818b058e6a7e0b7f891b/source/jormungandr/jormungandr/scenarios/distributed.py#L427-L458
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/_pydecimal.py
|
python
|
_iexp
|
(x, M, L=8)
|
return M+y
|
Given integers x and M, M > 0, such that x/M is small in absolute
value, compute an integer approximation to M*exp(x/M). For 0 <=
x/M <= 2.4, the absolute error in the result is bounded by 60 (and
is usually much smaller).
|
Given integers x and M, M > 0, such that x/M is small in absolute
value, compute an integer approximation to M*exp(x/M). For 0 <=
x/M <= 2.4, the absolute error in the result is bounded by 60 (and
is usually much smaller).
|
[
"Given",
"integers",
"x",
"and",
"M",
"M",
">",
"0",
"such",
"that",
"x",
"/",
"M",
"is",
"small",
"in",
"absolute",
"value",
"compute",
"an",
"integer",
"approximation",
"to",
"M",
"*",
"exp",
"(",
"x",
"/",
"M",
")",
".",
"For",
"0",
"<",
"=",
"x",
"/",
"M",
"<",
"=",
"2",
".",
"4",
"the",
"absolute",
"error",
"in",
"the",
"result",
"is",
"bounded",
"by",
"60",
"(",
"and",
"is",
"usually",
"much",
"smaller",
")",
"."
] |
def _iexp(x, M, L=8):
"""Given integers x and M, M > 0, such that x/M is small in absolute
value, compute an integer approximation to M*exp(x/M). For 0 <=
x/M <= 2.4, the absolute error in the result is bounded by 60 (and
is usually much smaller)."""
# Algorithm: to compute exp(z) for a real number z, first divide z
# by a suitable power R of 2 so that |z/2**R| < 2**-L. Then
# compute expm1(z/2**R) = exp(z/2**R) - 1 using the usual Taylor
# series
#
# expm1(x) = x + x**2/2! + x**3/3! + ...
#
# Now use the identity
#
# expm1(2x) = expm1(x)*(expm1(x)+2)
#
# R times to compute the sequence expm1(z/2**R),
# expm1(z/2**(R-1)), ... , exp(z/2), exp(z).
# Find R such that x/2**R/M <= 2**-L
R = _nbits((x<<L)//M)
# Taylor series. (2**L)**T > M
T = -int(-10*len(str(M))//(3*L))
y = _div_nearest(x, T)
Mshift = M<<R
for i in range(T-1, 0, -1):
y = _div_nearest(x*(Mshift + y), Mshift * i)
# Expansion
for k in range(R-1, -1, -1):
Mshift = M<<(k+2)
y = _div_nearest(y*(y+Mshift), Mshift)
return M+y
|
[
"def",
"_iexp",
"(",
"x",
",",
"M",
",",
"L",
"=",
"8",
")",
":",
"# Algorithm: to compute exp(z) for a real number z, first divide z",
"# by a suitable power R of 2 so that |z/2**R| < 2**-L. Then",
"# compute expm1(z/2**R) = exp(z/2**R) - 1 using the usual Taylor",
"# series",
"#",
"# expm1(x) = x + x**2/2! + x**3/3! + ...",
"#",
"# Now use the identity",
"#",
"# expm1(2x) = expm1(x)*(expm1(x)+2)",
"#",
"# R times to compute the sequence expm1(z/2**R),",
"# expm1(z/2**(R-1)), ... , exp(z/2), exp(z).",
"# Find R such that x/2**R/M <= 2**-L",
"R",
"=",
"_nbits",
"(",
"(",
"x",
"<<",
"L",
")",
"//",
"M",
")",
"# Taylor series. (2**L)**T > M",
"T",
"=",
"-",
"int",
"(",
"-",
"10",
"*",
"len",
"(",
"str",
"(",
"M",
")",
")",
"//",
"(",
"3",
"*",
"L",
")",
")",
"y",
"=",
"_div_nearest",
"(",
"x",
",",
"T",
")",
"Mshift",
"=",
"M",
"<<",
"R",
"for",
"i",
"in",
"range",
"(",
"T",
"-",
"1",
",",
"0",
",",
"-",
"1",
")",
":",
"y",
"=",
"_div_nearest",
"(",
"x",
"*",
"(",
"Mshift",
"+",
"y",
")",
",",
"Mshift",
"*",
"i",
")",
"# Expansion",
"for",
"k",
"in",
"range",
"(",
"R",
"-",
"1",
",",
"-",
"1",
",",
"-",
"1",
")",
":",
"Mshift",
"=",
"M",
"<<",
"(",
"k",
"+",
"2",
")",
"y",
"=",
"_div_nearest",
"(",
"y",
"*",
"(",
"y",
"+",
"Mshift",
")",
",",
"Mshift",
")",
"return",
"M",
"+",
"y"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/_pydecimal.py#L5889-L5924
|
|
SFTtech/openage
|
d6a08c53c48dc1e157807471df92197f6ca9e04d
|
openage/util/fslike/path.py
|
python
|
Path.resolve_native_path
|
(self, mode="r")
|
Minimize the path and possibly return a native one.
Returns None if there was no native path.
|
Minimize the path and possibly return a native one.
Returns None if there was no native path.
|
[
"Minimize",
"the",
"path",
"and",
"possibly",
"return",
"a",
"native",
"one",
".",
"Returns",
"None",
"if",
"there",
"was",
"no",
"native",
"path",
"."
] |
def resolve_native_path(self, mode="r"):
"""
Minimize the path and possibly return a native one.
Returns None if there was no native path.
"""
if mode == "r":
return self.resolve_native_path_r()
if mode == "w":
return self.resolve_native_path_w()
raise UnsupportedOperation("unsupported resolve mode: " + mode)
|
[
"def",
"resolve_native_path",
"(",
"self",
",",
"mode",
"=",
"\"r\"",
")",
":",
"if",
"mode",
"==",
"\"r\"",
":",
"return",
"self",
".",
"resolve_native_path_r",
"(",
")",
"if",
"mode",
"==",
"\"w\"",
":",
"return",
"self",
".",
"resolve_native_path_w",
"(",
")",
"raise",
"UnsupportedOperation",
"(",
"\"unsupported resolve mode: \"",
"+",
"mode",
")"
] |
https://github.com/SFTtech/openage/blob/d6a08c53c48dc1e157807471df92197f6ca9e04d/openage/util/fslike/path.py#L165-L176
|
||
apple/swift-lldb
|
d74be846ef3e62de946df343e8c234bde93a8912
|
scripts/Python/static-binding/lldb.py
|
python
|
SBBreakpointList.AppendIfUnique
|
(self, sb_bkpt)
|
return _lldb.SBBreakpointList_AppendIfUnique(self, sb_bkpt)
|
AppendIfUnique(SBBreakpointList self, SBBreakpoint sb_bkpt) -> bool
|
AppendIfUnique(SBBreakpointList self, SBBreakpoint sb_bkpt) -> bool
|
[
"AppendIfUnique",
"(",
"SBBreakpointList",
"self",
"SBBreakpoint",
"sb_bkpt",
")",
"-",
">",
"bool"
] |
def AppendIfUnique(self, sb_bkpt):
"""AppendIfUnique(SBBreakpointList self, SBBreakpoint sb_bkpt) -> bool"""
return _lldb.SBBreakpointList_AppendIfUnique(self, sb_bkpt)
|
[
"def",
"AppendIfUnique",
"(",
"self",
",",
"sb_bkpt",
")",
":",
"return",
"_lldb",
".",
"SBBreakpointList_AppendIfUnique",
"(",
"self",
",",
"sb_bkpt",
")"
] |
https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/scripts/Python/static-binding/lldb.py#L1916-L1918
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/pandas/py2/pandas/io/formats/style.py
|
python
|
Styler._background_gradient
|
(s, cmap='PuBu', low=0, high=0,
text_color_threshold=0.408)
|
Color background in a range according to the data.
|
Color background in a range according to the data.
|
[
"Color",
"background",
"in",
"a",
"range",
"according",
"to",
"the",
"data",
"."
] |
def _background_gradient(s, cmap='PuBu', low=0, high=0,
text_color_threshold=0.408):
"""
Color background in a range according to the data.
"""
if (not isinstance(text_color_threshold, (float, int)) or
not 0 <= text_color_threshold <= 1):
msg = "`text_color_threshold` must be a value from 0 to 1."
raise ValueError(msg)
with _mpl(Styler.background_gradient) as (plt, colors):
smin = s.values.min()
smax = s.values.max()
rng = smax - smin
# extend lower / upper bounds, compresses color range
norm = colors.Normalize(smin - (rng * low), smax + (rng * high))
# matplotlib colors.Normalize modifies inplace?
# https://github.com/matplotlib/matplotlib/issues/5427
rgbas = plt.cm.get_cmap(cmap)(norm(s.values))
def relative_luminance(rgba):
"""
Calculate relative luminance of a color.
The calculation adheres to the W3C standards
(https://www.w3.org/WAI/GL/wiki/Relative_luminance)
Parameters
----------
color : rgb or rgba tuple
Returns
-------
float
The relative luminance as a value from 0 to 1
"""
r, g, b = (
x / 12.92 if x <= 0.03928 else ((x + 0.055) / 1.055 ** 2.4)
for x in rgba[:3]
)
return 0.2126 * r + 0.7152 * g + 0.0722 * b
def css(rgba):
dark = relative_luminance(rgba) < text_color_threshold
text_color = '#f1f1f1' if dark else '#000000'
return 'background-color: {b};color: {c};'.format(
b=colors.rgb2hex(rgba), c=text_color
)
if s.ndim == 1:
return [css(rgba) for rgba in rgbas]
else:
return pd.DataFrame(
[[css(rgba) for rgba in row] for row in rgbas],
index=s.index, columns=s.columns
)
|
[
"def",
"_background_gradient",
"(",
"s",
",",
"cmap",
"=",
"'PuBu'",
",",
"low",
"=",
"0",
",",
"high",
"=",
"0",
",",
"text_color_threshold",
"=",
"0.408",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"text_color_threshold",
",",
"(",
"float",
",",
"int",
")",
")",
"or",
"not",
"0",
"<=",
"text_color_threshold",
"<=",
"1",
")",
":",
"msg",
"=",
"\"`text_color_threshold` must be a value from 0 to 1.\"",
"raise",
"ValueError",
"(",
"msg",
")",
"with",
"_mpl",
"(",
"Styler",
".",
"background_gradient",
")",
"as",
"(",
"plt",
",",
"colors",
")",
":",
"smin",
"=",
"s",
".",
"values",
".",
"min",
"(",
")",
"smax",
"=",
"s",
".",
"values",
".",
"max",
"(",
")",
"rng",
"=",
"smax",
"-",
"smin",
"# extend lower / upper bounds, compresses color range",
"norm",
"=",
"colors",
".",
"Normalize",
"(",
"smin",
"-",
"(",
"rng",
"*",
"low",
")",
",",
"smax",
"+",
"(",
"rng",
"*",
"high",
")",
")",
"# matplotlib colors.Normalize modifies inplace?",
"# https://github.com/matplotlib/matplotlib/issues/5427",
"rgbas",
"=",
"plt",
".",
"cm",
".",
"get_cmap",
"(",
"cmap",
")",
"(",
"norm",
"(",
"s",
".",
"values",
")",
")",
"def",
"relative_luminance",
"(",
"rgba",
")",
":",
"\"\"\"\n Calculate relative luminance of a color.\n\n The calculation adheres to the W3C standards\n (https://www.w3.org/WAI/GL/wiki/Relative_luminance)\n\n Parameters\n ----------\n color : rgb or rgba tuple\n\n Returns\n -------\n float\n The relative luminance as a value from 0 to 1\n \"\"\"",
"r",
",",
"g",
",",
"b",
"=",
"(",
"x",
"/",
"12.92",
"if",
"x",
"<=",
"0.03928",
"else",
"(",
"(",
"x",
"+",
"0.055",
")",
"/",
"1.055",
"**",
"2.4",
")",
"for",
"x",
"in",
"rgba",
"[",
":",
"3",
"]",
")",
"return",
"0.2126",
"*",
"r",
"+",
"0.7152",
"*",
"g",
"+",
"0.0722",
"*",
"b",
"def",
"css",
"(",
"rgba",
")",
":",
"dark",
"=",
"relative_luminance",
"(",
"rgba",
")",
"<",
"text_color_threshold",
"text_color",
"=",
"'#f1f1f1'",
"if",
"dark",
"else",
"'#000000'",
"return",
"'background-color: {b};color: {c};'",
".",
"format",
"(",
"b",
"=",
"colors",
".",
"rgb2hex",
"(",
"rgba",
")",
",",
"c",
"=",
"text_color",
")",
"if",
"s",
".",
"ndim",
"==",
"1",
":",
"return",
"[",
"css",
"(",
"rgba",
")",
"for",
"rgba",
"in",
"rgbas",
"]",
"else",
":",
"return",
"pd",
".",
"DataFrame",
"(",
"[",
"[",
"css",
"(",
"rgba",
")",
"for",
"rgba",
"in",
"row",
"]",
"for",
"row",
"in",
"rgbas",
"]",
",",
"index",
"=",
"s",
".",
"index",
",",
"columns",
"=",
"s",
".",
"columns",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py2/pandas/io/formats/style.py#L930-L985
|
||
AMReX-Astro/Castro
|
5bf85dc1fe41909206d80ff71463f2baad22dab5
|
Exec/science/flame/analysis/flame_speed.py
|
python
|
Profile.find_flame_width
|
(self)
|
return dT/gradT
|
given a profile T(x), find the flame width
|
given a profile T(x), find the flame width
|
[
"given",
"a",
"profile",
"T",
"(",
"x",
")",
"find",
"the",
"flame",
"width"
] |
def find_flame_width(self):
""" given a profile T(x), find the flame width """
gradT = np.max(np.abs( (self.T[1:] - self.T[:-1])/(self.x[1:] - self.x[:-1]) ))
dT = self.T.max() - self.T.min()
return dT/gradT
|
[
"def",
"find_flame_width",
"(",
"self",
")",
":",
"gradT",
"=",
"np",
".",
"max",
"(",
"np",
".",
"abs",
"(",
"(",
"self",
".",
"T",
"[",
"1",
":",
"]",
"-",
"self",
".",
"T",
"[",
":",
"-",
"1",
"]",
")",
"/",
"(",
"self",
".",
"x",
"[",
"1",
":",
"]",
"-",
"self",
".",
"x",
"[",
":",
"-",
"1",
"]",
")",
")",
")",
"dT",
"=",
"self",
".",
"T",
".",
"max",
"(",
")",
"-",
"self",
".",
"T",
".",
"min",
"(",
")",
"return",
"dT",
"/",
"gradT"
] |
https://github.com/AMReX-Astro/Castro/blob/5bf85dc1fe41909206d80ff71463f2baad22dab5/Exec/science/flame/analysis/flame_speed.py#L77-L83
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/io/common.py
|
python
|
get_handle
|
(
path_or_buf,
mode: str,
encoding=None,
compression: Optional[Union[str, Mapping[str, Any]]] = None,
memory_map: bool = False,
is_text: bool = True,
)
|
return f, handles
|
Get file handle for given path/buffer and mode.
Parameters
----------
path_or_buf : str or file handle
File path or object.
mode : str
Mode to open path_or_buf with.
encoding : str or None
Encoding to use.
compression : str or dict, default None
If string, specifies compression mode. If dict, value at key 'method'
specifies compression mode. Compression mode must be one of {'infer',
'gzip', 'bz2', 'zip', 'xz', None}. If compression mode is 'infer'
and `filepath_or_buffer` is path-like, then detect compression from
the following extensions: '.gz', '.bz2', '.zip', or '.xz' (otherwise
no compression). If dict and compression mode is 'zip' or inferred as
'zip', other entries passed as additional compression options.
.. versionchanged:: 1.0.0
May now be a dict with key 'method' as compression mode
and other keys as compression options if compression
mode is 'zip'.
memory_map : boolean, default False
See parsers._parser_params for more information.
is_text : boolean, default True
whether file/buffer is in text format (csv, json, etc.), or in binary
mode (pickle, etc.).
Returns
-------
f : file-like
A file-like object.
handles : list of file-like objects
A list of file-like object that were opened in this function.
|
Get file handle for given path/buffer and mode.
|
[
"Get",
"file",
"handle",
"for",
"given",
"path",
"/",
"buffer",
"and",
"mode",
"."
] |
def get_handle(
path_or_buf,
mode: str,
encoding=None,
compression: Optional[Union[str, Mapping[str, Any]]] = None,
memory_map: bool = False,
is_text: bool = True,
):
"""
Get file handle for given path/buffer and mode.
Parameters
----------
path_or_buf : str or file handle
File path or object.
mode : str
Mode to open path_or_buf with.
encoding : str or None
Encoding to use.
compression : str or dict, default None
If string, specifies compression mode. If dict, value at key 'method'
specifies compression mode. Compression mode must be one of {'infer',
'gzip', 'bz2', 'zip', 'xz', None}. If compression mode is 'infer'
and `filepath_or_buffer` is path-like, then detect compression from
the following extensions: '.gz', '.bz2', '.zip', or '.xz' (otherwise
no compression). If dict and compression mode is 'zip' or inferred as
'zip', other entries passed as additional compression options.
.. versionchanged:: 1.0.0
May now be a dict with key 'method' as compression mode
and other keys as compression options if compression
mode is 'zip'.
memory_map : boolean, default False
See parsers._parser_params for more information.
is_text : boolean, default True
whether file/buffer is in text format (csv, json, etc.), or in binary
mode (pickle, etc.).
Returns
-------
f : file-like
A file-like object.
handles : list of file-like objects
A list of file-like object that were opened in this function.
"""
try:
from s3fs import S3File
need_text_wrapping = (BufferedIOBase, RawIOBase, S3File)
except ImportError:
need_text_wrapping = (BufferedIOBase, RawIOBase) # type: ignore
handles: List[IO] = list()
f = path_or_buf
# Convert pathlib.Path/py.path.local or string
path_or_buf = stringify_path(path_or_buf)
is_path = isinstance(path_or_buf, str)
compression, compression_args = get_compression_method(compression)
if is_path:
compression = infer_compression(path_or_buf, compression)
if compression:
# GZ Compression
if compression == "gzip":
if is_path:
f = gzip.open(path_or_buf, mode)
else:
f = gzip.GzipFile(fileobj=path_or_buf)
# BZ Compression
elif compression == "bz2":
if is_path:
f = bz2.BZ2File(path_or_buf, mode)
else:
f = bz2.BZ2File(path_or_buf)
# ZIP Compression
elif compression == "zip":
zf = _BytesZipFile(path_or_buf, mode, **compression_args)
# Ensure the container is closed as well.
handles.append(zf)
if zf.mode == "w":
f = zf
elif zf.mode == "r":
zip_names = zf.namelist()
if len(zip_names) == 1:
f = zf.open(zip_names.pop())
elif len(zip_names) == 0:
raise ValueError(f"Zero files found in ZIP file {path_or_buf}")
else:
raise ValueError(
"Multiple files found in ZIP file."
f" Only one file per ZIP: {zip_names}"
)
# XZ Compression
elif compression == "xz":
f = _get_lzma_file(lzma)(path_or_buf, mode)
# Unrecognized Compression
else:
msg = f"Unrecognized compression type: {compression}"
raise ValueError(msg)
handles.append(f)
elif is_path:
if encoding:
# Encoding
f = open(path_or_buf, mode, encoding=encoding, newline="")
elif is_text:
# No explicit encoding
f = open(path_or_buf, mode, errors="replace", newline="")
else:
# Binary mode
f = open(path_or_buf, mode)
handles.append(f)
# Convert BytesIO or file objects passed with an encoding
if is_text and (compression or isinstance(f, need_text_wrapping)):
from io import TextIOWrapper
g = TextIOWrapper(f, encoding=encoding, newline="")
if not isinstance(f, (BufferedIOBase, RawIOBase)):
handles.append(g)
f = g
if memory_map and hasattr(f, "fileno"):
try:
wrapped = _MMapWrapper(f)
f.close()
f = wrapped
except Exception:
# we catch any errors that may have occurred
# because that is consistent with the lower-level
# functionality of the C engine (pd.read_csv), so
# leave the file handler as is then
pass
return f, handles
|
[
"def",
"get_handle",
"(",
"path_or_buf",
",",
"mode",
":",
"str",
",",
"encoding",
"=",
"None",
",",
"compression",
":",
"Optional",
"[",
"Union",
"[",
"str",
",",
"Mapping",
"[",
"str",
",",
"Any",
"]",
"]",
"]",
"=",
"None",
",",
"memory_map",
":",
"bool",
"=",
"False",
",",
"is_text",
":",
"bool",
"=",
"True",
",",
")",
":",
"try",
":",
"from",
"s3fs",
"import",
"S3File",
"need_text_wrapping",
"=",
"(",
"BufferedIOBase",
",",
"RawIOBase",
",",
"S3File",
")",
"except",
"ImportError",
":",
"need_text_wrapping",
"=",
"(",
"BufferedIOBase",
",",
"RawIOBase",
")",
"# type: ignore",
"handles",
":",
"List",
"[",
"IO",
"]",
"=",
"list",
"(",
")",
"f",
"=",
"path_or_buf",
"# Convert pathlib.Path/py.path.local or string",
"path_or_buf",
"=",
"stringify_path",
"(",
"path_or_buf",
")",
"is_path",
"=",
"isinstance",
"(",
"path_or_buf",
",",
"str",
")",
"compression",
",",
"compression_args",
"=",
"get_compression_method",
"(",
"compression",
")",
"if",
"is_path",
":",
"compression",
"=",
"infer_compression",
"(",
"path_or_buf",
",",
"compression",
")",
"if",
"compression",
":",
"# GZ Compression",
"if",
"compression",
"==",
"\"gzip\"",
":",
"if",
"is_path",
":",
"f",
"=",
"gzip",
".",
"open",
"(",
"path_or_buf",
",",
"mode",
")",
"else",
":",
"f",
"=",
"gzip",
".",
"GzipFile",
"(",
"fileobj",
"=",
"path_or_buf",
")",
"# BZ Compression",
"elif",
"compression",
"==",
"\"bz2\"",
":",
"if",
"is_path",
":",
"f",
"=",
"bz2",
".",
"BZ2File",
"(",
"path_or_buf",
",",
"mode",
")",
"else",
":",
"f",
"=",
"bz2",
".",
"BZ2File",
"(",
"path_or_buf",
")",
"# ZIP Compression",
"elif",
"compression",
"==",
"\"zip\"",
":",
"zf",
"=",
"_BytesZipFile",
"(",
"path_or_buf",
",",
"mode",
",",
"*",
"*",
"compression_args",
")",
"# Ensure the container is closed as well.",
"handles",
".",
"append",
"(",
"zf",
")",
"if",
"zf",
".",
"mode",
"==",
"\"w\"",
":",
"f",
"=",
"zf",
"elif",
"zf",
".",
"mode",
"==",
"\"r\"",
":",
"zip_names",
"=",
"zf",
".",
"namelist",
"(",
")",
"if",
"len",
"(",
"zip_names",
")",
"==",
"1",
":",
"f",
"=",
"zf",
".",
"open",
"(",
"zip_names",
".",
"pop",
"(",
")",
")",
"elif",
"len",
"(",
"zip_names",
")",
"==",
"0",
":",
"raise",
"ValueError",
"(",
"f\"Zero files found in ZIP file {path_or_buf}\"",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Multiple files found in ZIP file.\"",
"f\" Only one file per ZIP: {zip_names}\"",
")",
"# XZ Compression",
"elif",
"compression",
"==",
"\"xz\"",
":",
"f",
"=",
"_get_lzma_file",
"(",
"lzma",
")",
"(",
"path_or_buf",
",",
"mode",
")",
"# Unrecognized Compression",
"else",
":",
"msg",
"=",
"f\"Unrecognized compression type: {compression}\"",
"raise",
"ValueError",
"(",
"msg",
")",
"handles",
".",
"append",
"(",
"f",
")",
"elif",
"is_path",
":",
"if",
"encoding",
":",
"# Encoding",
"f",
"=",
"open",
"(",
"path_or_buf",
",",
"mode",
",",
"encoding",
"=",
"encoding",
",",
"newline",
"=",
"\"\"",
")",
"elif",
"is_text",
":",
"# No explicit encoding",
"f",
"=",
"open",
"(",
"path_or_buf",
",",
"mode",
",",
"errors",
"=",
"\"replace\"",
",",
"newline",
"=",
"\"\"",
")",
"else",
":",
"# Binary mode",
"f",
"=",
"open",
"(",
"path_or_buf",
",",
"mode",
")",
"handles",
".",
"append",
"(",
"f",
")",
"# Convert BytesIO or file objects passed with an encoding",
"if",
"is_text",
"and",
"(",
"compression",
"or",
"isinstance",
"(",
"f",
",",
"need_text_wrapping",
")",
")",
":",
"from",
"io",
"import",
"TextIOWrapper",
"g",
"=",
"TextIOWrapper",
"(",
"f",
",",
"encoding",
"=",
"encoding",
",",
"newline",
"=",
"\"\"",
")",
"if",
"not",
"isinstance",
"(",
"f",
",",
"(",
"BufferedIOBase",
",",
"RawIOBase",
")",
")",
":",
"handles",
".",
"append",
"(",
"g",
")",
"f",
"=",
"g",
"if",
"memory_map",
"and",
"hasattr",
"(",
"f",
",",
"\"fileno\"",
")",
":",
"try",
":",
"wrapped",
"=",
"_MMapWrapper",
"(",
"f",
")",
"f",
".",
"close",
"(",
")",
"f",
"=",
"wrapped",
"except",
"Exception",
":",
"# we catch any errors that may have occurred",
"# because that is consistent with the lower-level",
"# functionality of the C engine (pd.read_csv), so",
"# leave the file handler as is then",
"pass",
"return",
"f",
",",
"handles"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/pandas/io/common.py#L314-L458
|
|
BlzFans/wke
|
b0fa21158312e40c5fbd84682d643022b6c34a93
|
cygwin/lib/python2.6/mailbox.py
|
python
|
MaildirMessage.get_flags
|
(self)
|
Return as a string the flags that are set.
|
Return as a string the flags that are set.
|
[
"Return",
"as",
"a",
"string",
"the",
"flags",
"that",
"are",
"set",
"."
] |
def get_flags(self):
"""Return as a string the flags that are set."""
if self._info.startswith('2,'):
return self._info[2:]
else:
return ''
|
[
"def",
"get_flags",
"(",
"self",
")",
":",
"if",
"self",
".",
"_info",
".",
"startswith",
"(",
"'2,'",
")",
":",
"return",
"self",
".",
"_info",
"[",
"2",
":",
"]",
"else",
":",
"return",
"''"
] |
https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/mailbox.py#L1390-L1395
|
||
eclipse/sumo
|
7132a9b8b6eea734bdec38479026b4d8c4336d03
|
tools/contributed/sumopy/agilepy/lib_wx/objpanel.py
|
python
|
WidgetContainer.get_widgetvalue
|
(self)
|
return None
|
Returnes current value from valuewidget.
Depends on attribute type and hence widgettype.
To be overwritten.
|
Returnes current value from valuewidget.
Depends on attribute type and hence widgettype.
To be overwritten.
|
[
"Returnes",
"current",
"value",
"from",
"valuewidget",
".",
"Depends",
"on",
"attribute",
"type",
"and",
"hence",
"widgettype",
".",
"To",
"be",
"overwritten",
"."
] |
def get_widgetvalue(self):
"""
Returnes current value from valuewidget.
Depends on attribute type and hence widgettype.
To be overwritten.
"""
return None
|
[
"def",
"get_widgetvalue",
"(",
"self",
")",
":",
"return",
"None"
] |
https://github.com/eclipse/sumo/blob/7132a9b8b6eea734bdec38479026b4d8c4336d03/tools/contributed/sumopy/agilepy/lib_wx/objpanel.py#L307-L313
|
|
lammps/lammps
|
b75c3065430a75b1b5543a10e10f46d9b4c91913
|
tools/i-pi/ipi/inputs/outputs.py
|
python
|
InputOutputs.make_default
|
(cls)
|
return [ ipi.engine.outputs.PropertyOutput(filename="i-pi.md", stride=10, outlist=[ "time", "step", "conserved", "temperature", "potential", "kinetic_cv" ] ),
ipi.engine.outputs.TrajectoryOutput(filename="i-pi.pos", stride=100, what="positions", format="xyz"),
ipi.engine.outputs.CheckpointOutput(filename="i-pi.checkpoint", stride=1000, overwrite=True)]
|
Used to make the default value of the outputs class for use when no
output is specified.
Needed since this is a fairly complicated default, with many mutable
objects, and the default has to be generated by a function that does not
use any mutable objects as arguments.
|
Used to make the default value of the outputs class for use when no
output is specified.
|
[
"Used",
"to",
"make",
"the",
"default",
"value",
"of",
"the",
"outputs",
"class",
"for",
"use",
"when",
"no",
"output",
"is",
"specified",
"."
] |
def make_default(cls):
"""Used to make the default value of the outputs class for use when no
output is specified.
Needed since this is a fairly complicated default, with many mutable
objects, and the default has to be generated by a function that does not
use any mutable objects as arguments.
"""
return [ ipi.engine.outputs.PropertyOutput(filename="i-pi.md", stride=10, outlist=[ "time", "step", "conserved", "temperature", "potential", "kinetic_cv" ] ),
ipi.engine.outputs.TrajectoryOutput(filename="i-pi.pos", stride=100, what="positions", format="xyz"),
ipi.engine.outputs.CheckpointOutput(filename="i-pi.checkpoint", stride=1000, overwrite=True)]
|
[
"def",
"make_default",
"(",
"cls",
")",
":",
"return",
"[",
"ipi",
".",
"engine",
".",
"outputs",
".",
"PropertyOutput",
"(",
"filename",
"=",
"\"i-pi.md\"",
",",
"stride",
"=",
"10",
",",
"outlist",
"=",
"[",
"\"time\"",
",",
"\"step\"",
",",
"\"conserved\"",
",",
"\"temperature\"",
",",
"\"potential\"",
",",
"\"kinetic_cv\"",
"]",
")",
",",
"ipi",
".",
"engine",
".",
"outputs",
".",
"TrajectoryOutput",
"(",
"filename",
"=",
"\"i-pi.pos\"",
",",
"stride",
"=",
"100",
",",
"what",
"=",
"\"positions\"",
",",
"format",
"=",
"\"xyz\"",
")",
",",
"ipi",
".",
"engine",
".",
"outputs",
".",
"CheckpointOutput",
"(",
"filename",
"=",
"\"i-pi.checkpoint\"",
",",
"stride",
"=",
"1000",
",",
"overwrite",
"=",
"True",
")",
"]"
] |
https://github.com/lammps/lammps/blob/b75c3065430a75b1b5543a10e10f46d9b4c91913/tools/i-pi/ipi/inputs/outputs.py#L265-L276
|
|
pmq20/node-packer
|
12c46c6e44fbc14d9ee645ebd17d5296b324f7e0
|
current/deps/v8/tools/grokdump.py
|
python
|
InspectionShell.do_dsa
|
(self, address)
|
return self.do_display_stack_ascii(address)
|
see display_stack_ascii
|
see display_stack_ascii
|
[
"see",
"display_stack_ascii"
] |
def do_dsa(self, address):
""" see display_stack_ascii"""
return self.do_display_stack_ascii(address)
|
[
"def",
"do_dsa",
"(",
"self",
",",
"address",
")",
":",
"return",
"self",
".",
"do_display_stack_ascii",
"(",
"address",
")"
] |
https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/current/deps/v8/tools/grokdump.py#L3510-L3512
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.