nwo
stringlengths 5
86
| sha
stringlengths 40
40
| path
stringlengths 4
189
| language
stringclasses 1
value | identifier
stringlengths 1
94
| parameters
stringlengths 2
4.03k
| argument_list
stringclasses 1
value | return_statement
stringlengths 0
11.5k
| docstring
stringlengths 1
33.2k
| docstring_summary
stringlengths 0
5.15k
| docstring_tokens
list | function
stringlengths 34
151k
| function_tokens
list | url
stringlengths 90
278
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
third_party/catapult/telemetry/third_party/pyfakefs/pyfakefs/fake_filesystem.py
|
python
|
FakeOsModule.mkdir
|
(self, dir_name, mode=PERM_DEF)
|
Create a leaf Fake directory.
Args:
dir_name: (str) Name of directory to create. Relative paths are assumed
to be relative to '/'.
mode: (int) Mode to create directory with. This argument defaults to
0o777. The umask is applied to this mode.
Raises:
OSError: if the directory name is invalid or parent directory is read only
or as per FakeFilesystem.AddObject.
|
Create a leaf Fake directory.
|
[
"Create",
"a",
"leaf",
"Fake",
"directory",
"."
] |
def mkdir(self, dir_name, mode=PERM_DEF):
"""Create a leaf Fake directory.
Args:
dir_name: (str) Name of directory to create. Relative paths are assumed
to be relative to '/'.
mode: (int) Mode to create directory with. This argument defaults to
0o777. The umask is applied to this mode.
Raises:
OSError: if the directory name is invalid or parent directory is read only
or as per FakeFilesystem.AddObject.
"""
if dir_name.endswith(self.sep):
dir_name = dir_name[:-1]
parent_dir, _ = self.path.split(dir_name)
if parent_dir:
base_dir = self.path.normpath(parent_dir)
if parent_dir.endswith(self.sep + '..'):
base_dir, unused_dotdot, _ = parent_dir.partition(self.sep + '..')
if not self.filesystem.Exists(base_dir):
raise OSError(errno.ENOENT, 'No such fake directory', base_dir)
dir_name = self.filesystem.NormalizePath(dir_name)
if self.filesystem.Exists(dir_name):
raise OSError(errno.EEXIST, 'Fake object already exists', dir_name)
head, tail = self.path.split(dir_name)
directory_object = self.filesystem.GetObject(head)
if not directory_object.st_mode & PERM_WRITE:
raise OSError(errno.EACCES, 'Permission Denied', dir_name)
self.filesystem.AddObject(
head, FakeDirectory(tail, mode & ~self.filesystem.umask))
|
[
"def",
"mkdir",
"(",
"self",
",",
"dir_name",
",",
"mode",
"=",
"PERM_DEF",
")",
":",
"if",
"dir_name",
".",
"endswith",
"(",
"self",
".",
"sep",
")",
":",
"dir_name",
"=",
"dir_name",
"[",
":",
"-",
"1",
"]",
"parent_dir",
",",
"_",
"=",
"self",
".",
"path",
".",
"split",
"(",
"dir_name",
")",
"if",
"parent_dir",
":",
"base_dir",
"=",
"self",
".",
"path",
".",
"normpath",
"(",
"parent_dir",
")",
"if",
"parent_dir",
".",
"endswith",
"(",
"self",
".",
"sep",
"+",
"'..'",
")",
":",
"base_dir",
",",
"unused_dotdot",
",",
"_",
"=",
"parent_dir",
".",
"partition",
"(",
"self",
".",
"sep",
"+",
"'..'",
")",
"if",
"not",
"self",
".",
"filesystem",
".",
"Exists",
"(",
"base_dir",
")",
":",
"raise",
"OSError",
"(",
"errno",
".",
"ENOENT",
",",
"'No such fake directory'",
",",
"base_dir",
")",
"dir_name",
"=",
"self",
".",
"filesystem",
".",
"NormalizePath",
"(",
"dir_name",
")",
"if",
"self",
".",
"filesystem",
".",
"Exists",
"(",
"dir_name",
")",
":",
"raise",
"OSError",
"(",
"errno",
".",
"EEXIST",
",",
"'Fake object already exists'",
",",
"dir_name",
")",
"head",
",",
"tail",
"=",
"self",
".",
"path",
".",
"split",
"(",
"dir_name",
")",
"directory_object",
"=",
"self",
".",
"filesystem",
".",
"GetObject",
"(",
"head",
")",
"if",
"not",
"directory_object",
".",
"st_mode",
"&",
"PERM_WRITE",
":",
"raise",
"OSError",
"(",
"errno",
".",
"EACCES",
",",
"'Permission Denied'",
",",
"dir_name",
")",
"self",
".",
"filesystem",
".",
"AddObject",
"(",
"head",
",",
"FakeDirectory",
"(",
"tail",
",",
"mode",
"&",
"~",
"self",
".",
"filesystem",
".",
"umask",
")",
")"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/telemetry/third_party/pyfakefs/pyfakefs/fake_filesystem.py#L1630-L1663
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/Jinja2/py2/jinja2/filters.py
|
python
|
do_reject
|
(*args, **kwargs)
|
return select_or_reject(args, kwargs, lambda x: not x, False)
|
Filters a sequence of objects by applying a test to each object,
and rejecting the objects with the test succeeding.
If no test is specified, each object will be evaluated as a boolean.
Example usage:
.. sourcecode:: jinja
{{ numbers|reject("odd") }}
Similar to a generator comprehension such as:
.. code-block:: python
(n for n in numbers if not test_odd(n))
.. versionadded:: 2.7
|
Filters a sequence of objects by applying a test to each object,
and rejecting the objects with the test succeeding.
|
[
"Filters",
"a",
"sequence",
"of",
"objects",
"by",
"applying",
"a",
"test",
"to",
"each",
"object",
"and",
"rejecting",
"the",
"objects",
"with",
"the",
"test",
"succeeding",
"."
] |
def do_reject(*args, **kwargs):
"""Filters a sequence of objects by applying a test to each object,
and rejecting the objects with the test succeeding.
If no test is specified, each object will be evaluated as a boolean.
Example usage:
.. sourcecode:: jinja
{{ numbers|reject("odd") }}
Similar to a generator comprehension such as:
.. code-block:: python
(n for n in numbers if not test_odd(n))
.. versionadded:: 2.7
"""
return select_or_reject(args, kwargs, lambda x: not x, False)
|
[
"def",
"do_reject",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"select_or_reject",
"(",
"args",
",",
"kwargs",
",",
"lambda",
"x",
":",
"not",
"x",
",",
"False",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/Jinja2/py2/jinja2/filters.py#L1151-L1171
|
|
rprichard/CxxCodeBrowser
|
a2fa83d2fe06119f0a7a1827b8167fab88b53561
|
third_party/libre2/lib/codereview/codereview.py
|
python
|
VersionControlSystem.GetUnknownFiles
|
(self)
|
Return a list of files unknown to the VCS.
|
Return a list of files unknown to the VCS.
|
[
"Return",
"a",
"list",
"of",
"files",
"unknown",
"to",
"the",
"VCS",
"."
] |
def GetUnknownFiles(self):
"""Return a list of files unknown to the VCS."""
raise NotImplementedError(
"abstract method -- subclass %s must override" % self.__class__)
|
[
"def",
"GetUnknownFiles",
"(",
"self",
")",
":",
"raise",
"NotImplementedError",
"(",
"\"abstract method -- subclass %s must override\"",
"%",
"self",
".",
"__class__",
")"
] |
https://github.com/rprichard/CxxCodeBrowser/blob/a2fa83d2fe06119f0a7a1827b8167fab88b53561/third_party/libre2/lib/codereview/codereview.py#L3178-L3181
|
||
weolar/miniblink49
|
1c4678db0594a4abde23d3ebbcc7cd13c3170777
|
third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/coverage/parser.py
|
python
|
ByteParser._all_chunks
|
(self)
|
return chunks
|
Returns a list of `Chunk` objects for this code and its children.
See `_split_into_chunks` for details.
|
Returns a list of `Chunk` objects for this code and its children.
|
[
"Returns",
"a",
"list",
"of",
"Chunk",
"objects",
"for",
"this",
"code",
"and",
"its",
"children",
"."
] |
def _all_chunks(self):
"""Returns a list of `Chunk` objects for this code and its children.
See `_split_into_chunks` for details.
"""
chunks = []
for bp in self.child_parsers():
chunks.extend(bp._split_into_chunks())
return chunks
|
[
"def",
"_all_chunks",
"(",
"self",
")",
":",
"chunks",
"=",
"[",
"]",
"for",
"bp",
"in",
"self",
".",
"child_parsers",
"(",
")",
":",
"chunks",
".",
"extend",
"(",
"bp",
".",
"_split_into_chunks",
"(",
")",
")",
"return",
"chunks"
] |
https://github.com/weolar/miniblink49/blob/1c4678db0594a4abde23d3ebbcc7cd13c3170777/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/coverage/parser.py#L599-L609
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_cocoa/grid.py
|
python
|
Grid.CellToRect
|
(*args, **kwargs)
|
return _grid.Grid_CellToRect(*args, **kwargs)
|
CellToRect(self, int row, int col) -> Rect
|
CellToRect(self, int row, int col) -> Rect
|
[
"CellToRect",
"(",
"self",
"int",
"row",
"int",
"col",
")",
"-",
">",
"Rect"
] |
def CellToRect(*args, **kwargs):
"""CellToRect(self, int row, int col) -> Rect"""
return _grid.Grid_CellToRect(*args, **kwargs)
|
[
"def",
"CellToRect",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_grid",
".",
"Grid_CellToRect",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/grid.py#L1402-L1404
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/ftplib.py
|
python
|
FTP.dir
|
(self, *args)
|
List a directory in long form.
By default list current directory to stdout.
Optional last argument is callback function; all
non-empty arguments before it are concatenated to the
LIST command. (This *should* only be used for a pathname.)
|
List a directory in long form.
By default list current directory to stdout.
Optional last argument is callback function; all
non-empty arguments before it are concatenated to the
LIST command. (This *should* only be used for a pathname.)
|
[
"List",
"a",
"directory",
"in",
"long",
"form",
".",
"By",
"default",
"list",
"current",
"directory",
"to",
"stdout",
".",
"Optional",
"last",
"argument",
"is",
"callback",
"function",
";",
"all",
"non",
"-",
"empty",
"arguments",
"before",
"it",
"are",
"concatenated",
"to",
"the",
"LIST",
"command",
".",
"(",
"This",
"*",
"should",
"*",
"only",
"be",
"used",
"for",
"a",
"pathname",
".",
")"
] |
def dir(self, *args):
'''List a directory in long form.
By default list current directory to stdout.
Optional last argument is callback function; all
non-empty arguments before it are concatenated to the
LIST command. (This *should* only be used for a pathname.)'''
cmd = 'LIST'
func = None
if args[-1:] and type(args[-1]) != type(''):
args, func = args[:-1], args[-1]
for arg in args:
if arg:
cmd = cmd + (' ' + arg)
self.retrlines(cmd, func)
|
[
"def",
"dir",
"(",
"self",
",",
"*",
"args",
")",
":",
"cmd",
"=",
"'LIST'",
"func",
"=",
"None",
"if",
"args",
"[",
"-",
"1",
":",
"]",
"and",
"type",
"(",
"args",
"[",
"-",
"1",
"]",
")",
"!=",
"type",
"(",
"''",
")",
":",
"args",
",",
"func",
"=",
"args",
"[",
":",
"-",
"1",
"]",
",",
"args",
"[",
"-",
"1",
"]",
"for",
"arg",
"in",
"args",
":",
"if",
"arg",
":",
"cmd",
"=",
"cmd",
"+",
"(",
"' '",
"+",
"arg",
")",
"self",
".",
"retrlines",
"(",
"cmd",
",",
"func",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/ftplib.py#L562-L575
|
||
martinmoene/variant-lite
|
f1af3518e4c28f12b09839b9d2ee37984cbf137a
|
script/update-version.py
|
python
|
editFilesToVersionFromCommandLine
|
()
|
Update version number given on command line in paths from configuration table.
|
Update version number given on command line in paths from configuration table.
|
[
"Update",
"version",
"number",
"given",
"on",
"command",
"line",
"in",
"paths",
"from",
"configuration",
"table",
"."
] |
def editFilesToVersionFromCommandLine():
"""Update version number given on command line in paths from configuration table."""
parser = argparse.ArgumentParser(
description='Update version number in files.',
epilog="""""",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
'version',
metavar='version',
type=str,
nargs=1,
help='new version number, like 1.2.3')
parser.add_argument(
'-v', '--verbose',
action='store_true',
help='report the name of the file being processed')
parser.add_argument(
'--max-types',
metavar='types',
type=int,
default=def_max_types,
help='number of variant types')
parser.add_argument(
'--max-args',
metavar='args',
type=int,
default=def_max_args,
help='number of arguments for \'visit\' methods')
args = parser.parse_args()
editFilesToVersion( args.version[0], table, args.verbose )
makeVariantHeader( 'template/variant.hpp', 'include/nonstd/variant.hpp', args.max_types, args.max_args, args.verbose )
|
[
"def",
"editFilesToVersionFromCommandLine",
"(",
")",
":",
"parser",
"=",
"argparse",
".",
"ArgumentParser",
"(",
"description",
"=",
"'Update version number in files.'",
",",
"epilog",
"=",
"\"\"\"\"\"\"",
",",
"formatter_class",
"=",
"argparse",
".",
"ArgumentDefaultsHelpFormatter",
")",
"parser",
".",
"add_argument",
"(",
"'version'",
",",
"metavar",
"=",
"'version'",
",",
"type",
"=",
"str",
",",
"nargs",
"=",
"1",
",",
"help",
"=",
"'new version number, like 1.2.3'",
")",
"parser",
".",
"add_argument",
"(",
"'-v'",
",",
"'--verbose'",
",",
"action",
"=",
"'store_true'",
",",
"help",
"=",
"'report the name of the file being processed'",
")",
"parser",
".",
"add_argument",
"(",
"'--max-types'",
",",
"metavar",
"=",
"'types'",
",",
"type",
"=",
"int",
",",
"default",
"=",
"def_max_types",
",",
"help",
"=",
"'number of variant types'",
")",
"parser",
".",
"add_argument",
"(",
"'--max-args'",
",",
"metavar",
"=",
"'args'",
",",
"type",
"=",
"int",
",",
"default",
"=",
"def_max_args",
",",
"help",
"=",
"'number of arguments for \\'visit\\' methods'",
")",
"args",
"=",
"parser",
".",
"parse_args",
"(",
")",
"editFilesToVersion",
"(",
"args",
".",
"version",
"[",
"0",
"]",
",",
"table",
",",
"args",
".",
"verbose",
")",
"makeVariantHeader",
"(",
"'template/variant.hpp'",
",",
"'include/nonstd/variant.hpp'",
",",
"args",
".",
"max_types",
",",
"args",
".",
"max_args",
",",
"args",
".",
"verbose",
")"
] |
https://github.com/martinmoene/variant-lite/blob/f1af3518e4c28f12b09839b9d2ee37984cbf137a/script/update-version.py#L108-L145
|
||
ApolloAuto/apollo
|
463fb82f9e979d02dcb25044e60931293ab2dba0
|
cyber/python/cyber_py3/cyber_time.py
|
python
|
Time.now
|
()
|
return time_now
|
return current time.
|
return current time.
|
[
"return",
"current",
"time",
"."
] |
def now():
"""
return current time.
"""
# print _CYBER_TIME.PyTime_now()
# print type(_CYBER_TIME.PyTime_now())
time_now = Time(_CYBER_TIME.PyTime_now())
return time_now
|
[
"def",
"now",
"(",
")",
":",
"# print _CYBER_TIME.PyTime_now()",
"# print type(_CYBER_TIME.PyTime_now())",
"time_now",
"=",
"Time",
"(",
"_CYBER_TIME",
".",
"PyTime_now",
"(",
")",
")",
"return",
"time_now"
] |
https://github.com/ApolloAuto/apollo/blob/463fb82f9e979d02dcb25044e60931293ab2dba0/cyber/python/cyber_py3/cyber_time.py#L137-L144
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/queue.py
|
python
|
_PySimpleQueue.put_nowait
|
(self, item)
|
return self.put(item, block=False)
|
Put an item into the queue without blocking.
This is exactly equivalent to `put(item)` and is only provided
for compatibility with the Queue class.
|
Put an item into the queue without blocking.
|
[
"Put",
"an",
"item",
"into",
"the",
"queue",
"without",
"blocking",
"."
] |
def put_nowait(self, item):
'''Put an item into the queue without blocking.
This is exactly equivalent to `put(item)` and is only provided
for compatibility with the Queue class.
'''
return self.put(item, block=False)
|
[
"def",
"put_nowait",
"(",
"self",
",",
"item",
")",
":",
"return",
"self",
".",
"put",
"(",
"item",
",",
"block",
"=",
"False",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/queue.py#L295-L301
|
|
kushview/Element
|
1cc16380caa2ab79461246ba758b9de1f46db2a5
|
waflib/extras/msvs.py
|
python
|
vsnode_target.collect_properties
|
(self)
|
Visual studio projects are associated with platforms and configurations (for building especially)
|
Visual studio projects are associated with platforms and configurations (for building especially)
|
[
"Visual",
"studio",
"projects",
"are",
"associated",
"with",
"platforms",
"and",
"configurations",
"(",
"for",
"building",
"especially",
")"
] |
def collect_properties(self):
"""
Visual studio projects are associated with platforms and configurations (for building especially)
"""
super(vsnode_target, self).collect_properties()
for x in self.build_properties:
x.outdir = self.path.parent.win32path()
x.preprocessor_definitions = ''
x.includes_search_path = ''
try:
tsk = self.tg.link_task
except AttributeError:
pass
else:
x.output_file = tsk.outputs[0].win32path()
x.preprocessor_definitions = ';'.join(tsk.env.DEFINES)
x.includes_search_path = ';'.join(self.tg.env.INCPATHS)
|
[
"def",
"collect_properties",
"(",
"self",
")",
":",
"super",
"(",
"vsnode_target",
",",
"self",
")",
".",
"collect_properties",
"(",
")",
"for",
"x",
"in",
"self",
".",
"build_properties",
":",
"x",
".",
"outdir",
"=",
"self",
".",
"path",
".",
"parent",
".",
"win32path",
"(",
")",
"x",
".",
"preprocessor_definitions",
"=",
"''",
"x",
".",
"includes_search_path",
"=",
"''",
"try",
":",
"tsk",
"=",
"self",
".",
"tg",
".",
"link_task",
"except",
"AttributeError",
":",
"pass",
"else",
":",
"x",
".",
"output_file",
"=",
"tsk",
".",
"outputs",
"[",
"0",
"]",
".",
"win32path",
"(",
")",
"x",
".",
"preprocessor_definitions",
"=",
"';'",
".",
"join",
"(",
"tsk",
".",
"env",
".",
"DEFINES",
")",
"x",
".",
"includes_search_path",
"=",
"';'",
".",
"join",
"(",
"self",
".",
"tg",
".",
"env",
".",
"INCPATHS",
")"
] |
https://github.com/kushview/Element/blob/1cc16380caa2ab79461246ba758b9de1f46db2a5/waflib/extras/msvs.py#L705-L722
|
||
AMReX-Astro/Castro
|
5bf85dc1fe41909206d80ff71463f2baad22dab5
|
Exec/science/flame_wave/analysis/profiles.py
|
python
|
_Abar
|
(field, data)
|
return 1 / sum * amu
|
Mean atomic mass.
|
Mean atomic mass.
|
[
"Mean",
"atomic",
"mass",
"."
] |
def _Abar(field, data):
""" Mean atomic mass. """
sum = None
for i, f in enumerate(mfrac_fields):
mfracs = data[f]
A = atomic_masses[i]
if sum is None: sum = mfracs / A
else: sum += mfracs / A
return 1 / sum * amu
|
[
"def",
"_Abar",
"(",
"field",
",",
"data",
")",
":",
"sum",
"=",
"None",
"for",
"i",
",",
"f",
"in",
"enumerate",
"(",
"mfrac_fields",
")",
":",
"mfracs",
"=",
"data",
"[",
"f",
"]",
"A",
"=",
"atomic_masses",
"[",
"i",
"]",
"if",
"sum",
"is",
"None",
":",
"sum",
"=",
"mfracs",
"/",
"A",
"else",
":",
"sum",
"+=",
"mfracs",
"/",
"A",
"return",
"1",
"/",
"sum",
"*",
"amu"
] |
https://github.com/AMReX-Astro/Castro/blob/5bf85dc1fe41909206d80ff71463f2baad22dab5/Exec/science/flame_wave/analysis/profiles.py#L21-L34
|
|
Xilinx/Vitis-AI
|
fc74d404563d9951b57245443c73bef389f3657f
|
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/eager/monitoring.py
|
python
|
StringGauge.get_cell
|
(self, *labels)
|
return StringGaugeCell(super(StringGauge, self).get_cell(*labels))
|
Retrieves the cell.
|
Retrieves the cell.
|
[
"Retrieves",
"the",
"cell",
"."
] |
def get_cell(self, *labels):
"""Retrieves the cell."""
return StringGaugeCell(super(StringGauge, self).get_cell(*labels))
|
[
"def",
"get_cell",
"(",
"self",
",",
"*",
"labels",
")",
":",
"return",
"StringGaugeCell",
"(",
"super",
"(",
"StringGauge",
",",
"self",
")",
".",
"get_cell",
"(",
"*",
"labels",
")",
")"
] |
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/eager/monitoring.py#L284-L286
|
|
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
third_party/catapult/third_party/Paste/paste/util/multidict.py
|
python
|
UnicodeMultiDict._decode_value
|
(self, value)
|
return value
|
Decode the specified value to unicode. Assumes value is a ``str`` or
`FieldStorage`` object.
``FieldStorage`` objects are specially handled.
|
Decode the specified value to unicode. Assumes value is a ``str`` or
`FieldStorage`` object.
|
[
"Decode",
"the",
"specified",
"value",
"to",
"unicode",
".",
"Assumes",
"value",
"is",
"a",
"str",
"or",
"FieldStorage",
"object",
"."
] |
def _decode_value(self, value):
"""
Decode the specified value to unicode. Assumes value is a ``str`` or
`FieldStorage`` object.
``FieldStorage`` objects are specially handled.
"""
if isinstance(value, cgi.FieldStorage):
# decode FieldStorage's field name and filename
value = copy.copy(value)
if self.decode_keys and isinstance(value.name, six.binary_type):
value.name = value.name.decode(self.encoding, self.errors)
if six.PY2:
value.filename = value.filename.decode(self.encoding, self.errors)
else:
try:
value = value.decode(self.encoding, self.errors)
except AttributeError:
pass
return value
|
[
"def",
"_decode_value",
"(",
"self",
",",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"cgi",
".",
"FieldStorage",
")",
":",
"# decode FieldStorage's field name and filename",
"value",
"=",
"copy",
".",
"copy",
"(",
"value",
")",
"if",
"self",
".",
"decode_keys",
"and",
"isinstance",
"(",
"value",
".",
"name",
",",
"six",
".",
"binary_type",
")",
":",
"value",
".",
"name",
"=",
"value",
".",
"name",
".",
"decode",
"(",
"self",
".",
"encoding",
",",
"self",
".",
"errors",
")",
"if",
"six",
".",
"PY2",
":",
"value",
".",
"filename",
"=",
"value",
".",
"filename",
".",
"decode",
"(",
"self",
".",
"encoding",
",",
"self",
".",
"errors",
")",
"else",
":",
"try",
":",
"value",
"=",
"value",
".",
"decode",
"(",
"self",
".",
"encoding",
",",
"self",
".",
"errors",
")",
"except",
"AttributeError",
":",
"pass",
"return",
"value"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/Paste/paste/util/multidict.py#L259-L278
|
|
CaoWGG/TensorRT-YOLOv4
|
4d7c2edce99e8794a4cb4ea3540d51ce91158a36
|
onnx-tensorrt/third_party/onnx/third_party/pybind11/tools/clang/cindex.py
|
python
|
TokenKind.__init__
|
(self, value, name)
|
Create a new TokenKind instance from a numeric value and a name.
|
Create a new TokenKind instance from a numeric value and a name.
|
[
"Create",
"a",
"new",
"TokenKind",
"instance",
"from",
"a",
"numeric",
"value",
"and",
"a",
"name",
"."
] |
def __init__(self, value, name):
"""Create a new TokenKind instance from a numeric value and a name."""
self.value = value
self.name = name
|
[
"def",
"__init__",
"(",
"self",
",",
"value",
",",
"name",
")",
":",
"self",
".",
"value",
"=",
"value",
"self",
".",
"name",
"=",
"name"
] |
https://github.com/CaoWGG/TensorRT-YOLOv4/blob/4d7c2edce99e8794a4cb4ea3540d51ce91158a36/onnx-tensorrt/third_party/onnx/third_party/pybind11/tools/clang/cindex.py#L483-L486
|
||
hughperkins/tf-coriander
|
970d3df6c11400ad68405f22b0c42a52374e94ca
|
tensorflow/tensorboard/backend/server.py
|
python
|
BuildServer
|
(multiplexer, host, port, logdir)
|
return ThreadedHTTPServer((host, port), factory)
|
Sets up an HTTP server for running TensorBoard.
Args:
multiplexer: An `EventMultiplexer` that the server will query for
information about events.
host: The host name.
port: The port number to bind to, or 0 to pick one automatically.
logdir: The logdir argument string that tensorboard started up with.
Returns:
A `BaseHTTPServer.HTTPServer`.
|
Sets up an HTTP server for running TensorBoard.
|
[
"Sets",
"up",
"an",
"HTTP",
"server",
"for",
"running",
"TensorBoard",
"."
] |
def BuildServer(multiplexer, host, port, logdir):
"""Sets up an HTTP server for running TensorBoard.
Args:
multiplexer: An `EventMultiplexer` that the server will query for
information about events.
host: The host name.
port: The port number to bind to, or 0 to pick one automatically.
logdir: The logdir argument string that tensorboard started up with.
Returns:
A `BaseHTTPServer.HTTPServer`.
"""
factory = functools.partial(handler.TensorboardHandler, multiplexer, logdir)
return ThreadedHTTPServer((host, port), factory)
|
[
"def",
"BuildServer",
"(",
"multiplexer",
",",
"host",
",",
"port",
",",
"logdir",
")",
":",
"factory",
"=",
"functools",
".",
"partial",
"(",
"handler",
".",
"TensorboardHandler",
",",
"multiplexer",
",",
"logdir",
")",
"return",
"ThreadedHTTPServer",
"(",
"(",
"host",
",",
"port",
")",
",",
"factory",
")"
] |
https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/tensorboard/backend/server.py#L148-L162
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/scipy/scipy/ndimage/filters.py
|
python
|
maximum_filter
|
(input, size=None, footprint=None, output=None,
mode="reflect", cval=0.0, origin=0)
|
return _min_or_max_filter(input, size, footprint, None, output, mode,
cval, origin, 0)
|
Calculates a multi-dimensional maximum filter.
Parameters
----------
%(input)s
%(size_foot)s
%(output)s
%(mode)s
%(cval)s
%(origin)s
|
Calculates a multi-dimensional maximum filter.
|
[
"Calculates",
"a",
"multi",
"-",
"dimensional",
"maximum",
"filter",
"."
] |
def maximum_filter(input, size=None, footprint=None, output=None,
mode="reflect", cval=0.0, origin=0):
"""Calculates a multi-dimensional maximum filter.
Parameters
----------
%(input)s
%(size_foot)s
%(output)s
%(mode)s
%(cval)s
%(origin)s
"""
return _min_or_max_filter(input, size, footprint, None, output, mode,
cval, origin, 0)
|
[
"def",
"maximum_filter",
"(",
"input",
",",
"size",
"=",
"None",
",",
"footprint",
"=",
"None",
",",
"output",
"=",
"None",
",",
"mode",
"=",
"\"reflect\"",
",",
"cval",
"=",
"0.0",
",",
"origin",
"=",
"0",
")",
":",
"return",
"_min_or_max_filter",
"(",
"input",
",",
"size",
",",
"footprint",
",",
"None",
",",
"output",
",",
"mode",
",",
"cval",
",",
"origin",
",",
"0",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/scipy/ndimage/filters.py#L1014-L1028
|
|
facebookarchive/LogDevice
|
ce7726050edc49a1e15d9160e81c890736b779e2
|
build/fbcode_builder/getdeps/dyndeps.py
|
python
|
WinDeps.emit_dev_run_script
|
(self, script_path, dep_dirs)
|
Emit a script that can be used to run build artifacts directly from the
build directory, without installing them.
The dep_dirs parameter should be a list of paths that need to be added to $PATH.
This can be computed by calling compute_dependency_paths() or
compute_dependency_paths_fast().
This is only necessary on Windows, which does not have RPATH, and instead
requires the $PATH environment variable be updated in order to find the proper
library dependencies.
|
Emit a script that can be used to run build artifacts directly from the
build directory, without installing them.
|
[
"Emit",
"a",
"script",
"that",
"can",
"be",
"used",
"to",
"run",
"build",
"artifacts",
"directly",
"from",
"the",
"build",
"directory",
"without",
"installing",
"them",
"."
] |
def emit_dev_run_script(self, script_path, dep_dirs):
"""Emit a script that can be used to run build artifacts directly from the
build directory, without installing them.
The dep_dirs parameter should be a list of paths that need to be added to $PATH.
This can be computed by calling compute_dependency_paths() or
compute_dependency_paths_fast().
This is only necessary on Windows, which does not have RPATH, and instead
requires the $PATH environment variable be updated in order to find the proper
library dependencies.
"""
contents = self._get_dev_run_script_contents(dep_dirs)
with open(script_path, "w") as f:
f.write(contents)
|
[
"def",
"emit_dev_run_script",
"(",
"self",
",",
"script_path",
",",
"dep_dirs",
")",
":",
"contents",
"=",
"self",
".",
"_get_dev_run_script_contents",
"(",
"dep_dirs",
")",
"with",
"open",
"(",
"script_path",
",",
"\"w\"",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"contents",
")"
] |
https://github.com/facebookarchive/LogDevice/blob/ce7726050edc49a1e15d9160e81c890736b779e2/build/fbcode_builder/getdeps/dyndeps.py#L235-L249
|
||
facebook/redex
|
fac189a289bca2647061f9e364016afc1096500d
|
tools/python/dex.py
|
python
|
DexInstruction.get_AA
|
(self)
|
return self.get_uint8_hi(0)
|
Get the 8 bit value of AA from the byte next to the Op
|
Get the 8 bit value of AA from the byte next to the Op
|
[
"Get",
"the",
"8",
"bit",
"value",
"of",
"AA",
"from",
"the",
"byte",
"next",
"to",
"the",
"Op"
] |
def get_AA(self):
"""Get the 8 bit value of AA from the byte next to the Op"""
return self.get_uint8_hi(0)
|
[
"def",
"get_AA",
"(",
"self",
")",
":",
"return",
"self",
".",
"get_uint8_hi",
"(",
"0",
")"
] |
https://github.com/facebook/redex/blob/fac189a289bca2647061f9e364016afc1096500d/tools/python/dex.py#L4349-L4351
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pkg_resources/__init__.py
|
python
|
resolve_egg_link
|
(path)
|
return next(dist_groups, ())
|
Given a path to an .egg-link, resolve distributions
present in the referenced path.
|
Given a path to an .egg-link, resolve distributions
present in the referenced path.
|
[
"Given",
"a",
"path",
"to",
"an",
".",
"egg",
"-",
"link",
"resolve",
"distributions",
"present",
"in",
"the",
"referenced",
"path",
"."
] |
def resolve_egg_link(path):
"""
Given a path to an .egg-link, resolve distributions
present in the referenced path.
"""
referenced_paths = non_empty_lines(path)
resolved_paths = (
os.path.join(os.path.dirname(path), ref)
for ref in referenced_paths
)
dist_groups = map(find_distributions, resolved_paths)
return next(dist_groups, ())
|
[
"def",
"resolve_egg_link",
"(",
"path",
")",
":",
"referenced_paths",
"=",
"non_empty_lines",
"(",
"path",
")",
"resolved_paths",
"=",
"(",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"path",
")",
",",
"ref",
")",
"for",
"ref",
"in",
"referenced_paths",
")",
"dist_groups",
"=",
"map",
"(",
"find_distributions",
",",
"resolved_paths",
")",
"return",
"next",
"(",
"dist_groups",
",",
"(",
")",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pkg_resources/__init__.py#L2164-L2175
|
|
natanielruiz/android-yolo
|
1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f
|
jni-build/jni/include/tensorflow/contrib/learn/python/learn/learn_io/data_feeder.py
|
python
|
DataFeeder.input_builder
|
(self)
|
return self._input_placeholder, self._output_placeholder
|
Builds inputs in the graph.
Returns:
Two placeholders for inputs and outputs.
|
Builds inputs in the graph.
|
[
"Builds",
"inputs",
"in",
"the",
"graph",
"."
] |
def input_builder(self):
"""Builds inputs in the graph.
Returns:
Two placeholders for inputs and outputs.
"""
input_shape = [None] + self.input_shape[1:]
self._input_placeholder = array_ops.placeholder(
dtypes.as_dtype(self._input_dtype),
input_shape,
name='input')
if self.output_shape is None:
self._output_placeholder = None
else:
output_shape = [None] + self.output_shape[1:]
self._output_placeholder = array_ops.placeholder(
dtypes.as_dtype(self._output_dtype),
output_shape,
name='output')
return self._input_placeholder, self._output_placeholder
|
[
"def",
"input_builder",
"(",
"self",
")",
":",
"input_shape",
"=",
"[",
"None",
"]",
"+",
"self",
".",
"input_shape",
"[",
"1",
":",
"]",
"self",
".",
"_input_placeholder",
"=",
"array_ops",
".",
"placeholder",
"(",
"dtypes",
".",
"as_dtype",
"(",
"self",
".",
"_input_dtype",
")",
",",
"input_shape",
",",
"name",
"=",
"'input'",
")",
"if",
"self",
".",
"output_shape",
"is",
"None",
":",
"self",
".",
"_output_placeholder",
"=",
"None",
"else",
":",
"output_shape",
"=",
"[",
"None",
"]",
"+",
"self",
".",
"output_shape",
"[",
"1",
":",
"]",
"self",
".",
"_output_placeholder",
"=",
"array_ops",
".",
"placeholder",
"(",
"dtypes",
".",
"as_dtype",
"(",
"self",
".",
"_output_dtype",
")",
",",
"output_shape",
",",
"name",
"=",
"'output'",
")",
"return",
"self",
".",
"_input_placeholder",
",",
"self",
".",
"_output_placeholder"
] |
https://github.com/natanielruiz/android-yolo/blob/1ebb54f96a67a20ff83ddfc823ed83a13dc3a47f/jni-build/jni/include/tensorflow/contrib/learn/python/learn/learn_io/data_feeder.py#L316-L335
|
|
microsoft/onnxruntime
|
f92e47e95b13a240e37caf7b36577983544f98fc
|
orttraining/orttraining/python/training/ortmodule/_torch_module_ort.py
|
python
|
TorchModuleORT.get_buffer
|
(self, target: str)
|
return self._original_module.get_buffer(target)
|
Override original method to delegate execution to the original PyTorch user module
|
Override original method to delegate execution to the original PyTorch user module
|
[
"Override",
"original",
"method",
"to",
"delegate",
"execution",
"to",
"the",
"original",
"PyTorch",
"user",
"module"
] |
def get_buffer(self, target: str) -> torch.Tensor:
"""Override original method to delegate execution to the original PyTorch user module"""
return self._original_module.get_buffer(target)
|
[
"def",
"get_buffer",
"(",
"self",
",",
"target",
":",
"str",
")",
"->",
"torch",
".",
"Tensor",
":",
"return",
"self",
".",
"_original_module",
".",
"get_buffer",
"(",
"target",
")"
] |
https://github.com/microsoft/onnxruntime/blob/f92e47e95b13a240e37caf7b36577983544f98fc/orttraining/orttraining/python/training/ortmodule/_torch_module_ort.py#L86-L89
|
|
tfwu/FaceDetection-ConvNet-3D
|
f9251c48eb40c5aec8fba7455115c355466555be
|
python/build/lib.linux-x86_64-2.7/mxnet/model.py
|
python
|
FeedForward._init_eval_iter
|
(self, eval_data)
|
return eval_data
|
Initialize the iterator given eval_data.
|
Initialize the iterator given eval_data.
|
[
"Initialize",
"the",
"iterator",
"given",
"eval_data",
"."
] |
def _init_eval_iter(self, eval_data):
"""Initialize the iterator given eval_data."""
if eval_data is None:
return eval_data
if isinstance(eval_data, (tuple, list)) and len(eval_data) == 2:
if eval_data[0] is not None:
if eval_data[1] is None and isinstance(eval_data[0], io.DataIter):
return eval_data[0]
input_data = (np.array(eval_data[0]) if isinstance(eval_data[0], list)
else eval_data[0])
input_label = (np.array(eval_data[1]) if isinstance(eval_data[1], list)
else eval_data[1])
return self._init_iter(input_data, input_label, is_train=True)
else:
raise ValueError("Eval data is NONE")
if not isinstance(eval_data, io.DataIter):
raise TypeError('Eval data must be DataIter, or ' \
'NDArray/numpy.ndarray/list pair (i.e. tuple/list of length 2)')
return eval_data
|
[
"def",
"_init_eval_iter",
"(",
"self",
",",
"eval_data",
")",
":",
"if",
"eval_data",
"is",
"None",
":",
"return",
"eval_data",
"if",
"isinstance",
"(",
"eval_data",
",",
"(",
"tuple",
",",
"list",
")",
")",
"and",
"len",
"(",
"eval_data",
")",
"==",
"2",
":",
"if",
"eval_data",
"[",
"0",
"]",
"is",
"not",
"None",
":",
"if",
"eval_data",
"[",
"1",
"]",
"is",
"None",
"and",
"isinstance",
"(",
"eval_data",
"[",
"0",
"]",
",",
"io",
".",
"DataIter",
")",
":",
"return",
"eval_data",
"[",
"0",
"]",
"input_data",
"=",
"(",
"np",
".",
"array",
"(",
"eval_data",
"[",
"0",
"]",
")",
"if",
"isinstance",
"(",
"eval_data",
"[",
"0",
"]",
",",
"list",
")",
"else",
"eval_data",
"[",
"0",
"]",
")",
"input_label",
"=",
"(",
"np",
".",
"array",
"(",
"eval_data",
"[",
"1",
"]",
")",
"if",
"isinstance",
"(",
"eval_data",
"[",
"1",
"]",
",",
"list",
")",
"else",
"eval_data",
"[",
"1",
"]",
")",
"return",
"self",
".",
"_init_iter",
"(",
"input_data",
",",
"input_label",
",",
"is_train",
"=",
"True",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Eval data is NONE\"",
")",
"if",
"not",
"isinstance",
"(",
"eval_data",
",",
"io",
".",
"DataIter",
")",
":",
"raise",
"TypeError",
"(",
"'Eval data must be DataIter, or '",
"'NDArray/numpy.ndarray/list pair (i.e. tuple/list of length 2)'",
")",
"return",
"eval_data"
] |
https://github.com/tfwu/FaceDetection-ConvNet-3D/blob/f9251c48eb40c5aec8fba7455115c355466555be/python/build/lib.linux-x86_64-2.7/mxnet/model.py#L560-L578
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pkg_resources/__init__.py
|
python
|
Environment.obtain
|
(self, requirement, installer=None)
|
Obtain a distribution matching `requirement` (e.g. via download)
Obtain a distro that matches requirement (e.g. via download). In the
base ``Environment`` class, this routine just returns
``installer(requirement)``, unless `installer` is None, in which case
None is returned instead. This method is a hook that allows subclasses
to attempt other ways of obtaining a distribution before falling back
to the `installer` argument.
|
Obtain a distribution matching `requirement` (e.g. via download)
|
[
"Obtain",
"a",
"distribution",
"matching",
"requirement",
"(",
"e",
".",
"g",
".",
"via",
"download",
")"
] |
def obtain(self, requirement, installer=None):
"""Obtain a distribution matching `requirement` (e.g. via download)
Obtain a distro that matches requirement (e.g. via download). In the
base ``Environment`` class, this routine just returns
``installer(requirement)``, unless `installer` is None, in which case
None is returned instead. This method is a hook that allows subclasses
to attempt other ways of obtaining a distribution before falling back
to the `installer` argument."""
if installer is not None:
return installer(requirement)
|
[
"def",
"obtain",
"(",
"self",
",",
"requirement",
",",
"installer",
"=",
"None",
")",
":",
"if",
"installer",
"is",
"not",
"None",
":",
"return",
"installer",
"(",
"requirement",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pkg_resources/__init__.py#L1068-L1078
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/gtk/aui.py
|
python
|
AuiManager.RestoreMaximizedPane
|
(*args, **kwargs)
|
return _aui.AuiManager_RestoreMaximizedPane(*args, **kwargs)
|
RestoreMaximizedPane(self)
|
RestoreMaximizedPane(self)
|
[
"RestoreMaximizedPane",
"(",
"self",
")"
] |
def RestoreMaximizedPane(*args, **kwargs):
"""RestoreMaximizedPane(self)"""
return _aui.AuiManager_RestoreMaximizedPane(*args, **kwargs)
|
[
"def",
"RestoreMaximizedPane",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_aui",
".",
"AuiManager_RestoreMaximizedPane",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/aui.py#L699-L701
|
|
PaddlePaddle/Paddle
|
1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c
|
python/paddle/fluid/dygraph/layers.py
|
python
|
Layer.named_parameters
|
(self, prefix='', include_sublayers=True)
|
Returns an iterator over all parameters in the Layer, yielding tuple of name and parameter.
Parameters:
prefix(str, optional): Prefix to prepend to all parameter names. Default: ''.
include_sublayers(bool, optional): Whether include the parameters of sublayers.
If True, also include the named parameters from sublayers. Default: True.
Yields:
(string, Parameter): Tuple of name and Parameter
Examples:
.. code-block:: python
import paddle
fc1 = paddle.nn.Linear(10, 3)
fc2 = paddle.nn.Linear(3, 10, bias_attr=False)
model = paddle.nn.Sequential(fc1, fc2)
for name, param in model.named_parameters():
print(name, param)
|
Returns an iterator over all parameters in the Layer, yielding tuple of name and parameter.
|
[
"Returns",
"an",
"iterator",
"over",
"all",
"parameters",
"in",
"the",
"Layer",
"yielding",
"tuple",
"of",
"name",
"and",
"parameter",
"."
] |
def named_parameters(self, prefix='', include_sublayers=True):
"""
Returns an iterator over all parameters in the Layer, yielding tuple of name and parameter.
Parameters:
prefix(str, optional): Prefix to prepend to all parameter names. Default: ''.
include_sublayers(bool, optional): Whether include the parameters of sublayers.
If True, also include the named parameters from sublayers. Default: True.
Yields:
(string, Parameter): Tuple of name and Parameter
Examples:
.. code-block:: python
import paddle
fc1 = paddle.nn.Linear(10, 3)
fc2 = paddle.nn.Linear(3, 10, bias_attr=False)
model = paddle.nn.Sequential(fc1, fc2)
for name, param in model.named_parameters():
print(name, param)
"""
params_set = set()
named_sublayers = self.named_sublayers(
prefix=prefix,
include_self=True) if include_sublayers else zip([prefix], [self])
for layer_prefix, sublayer in named_sublayers:
params = sublayer._parameters.items()
for key, param in params:
if param is None or param in params_set:
continue
params_set.add(param)
name = layer_prefix + ('.' if layer_prefix else '') + key
yield name, param
|
[
"def",
"named_parameters",
"(",
"self",
",",
"prefix",
"=",
"''",
",",
"include_sublayers",
"=",
"True",
")",
":",
"params_set",
"=",
"set",
"(",
")",
"named_sublayers",
"=",
"self",
".",
"named_sublayers",
"(",
"prefix",
"=",
"prefix",
",",
"include_self",
"=",
"True",
")",
"if",
"include_sublayers",
"else",
"zip",
"(",
"[",
"prefix",
"]",
",",
"[",
"self",
"]",
")",
"for",
"layer_prefix",
",",
"sublayer",
"in",
"named_sublayers",
":",
"params",
"=",
"sublayer",
".",
"_parameters",
".",
"items",
"(",
")",
"for",
"key",
",",
"param",
"in",
"params",
":",
"if",
"param",
"is",
"None",
"or",
"param",
"in",
"params_set",
":",
"continue",
"params_set",
".",
"add",
"(",
"param",
")",
"name",
"=",
"layer_prefix",
"+",
"(",
"'.'",
"if",
"layer_prefix",
"else",
"''",
")",
"+",
"key",
"yield",
"name",
",",
"param"
] |
https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/fluid/dygraph/layers.py#L633-L668
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/setuptools/archive_util.py
|
python
|
unpack_archive
|
(
filename, extract_dir, progress_filter=default_filter,
drivers=None)
|
Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
`progress_filter` is a function taking two arguments: a source path
internal to the archive ('/'-separated), and a filesystem path where it
will be extracted. The callback must return the desired extract path
(which may be the same as the one passed in), or else ``None`` to skip
that file or directory. The callback can thus be used to report on the
progress of the extraction, as well as to filter the items extracted or
alter their extraction paths.
`drivers`, if supplied, must be a non-empty sequence of functions with the
same signature as this function (minus the `drivers` argument), that raise
``UnrecognizedFormat`` if they do not support extracting the designated
archive type. The `drivers` are tried in sequence until one is found that
does not raise an error, or until all are exhausted (in which case
``UnrecognizedFormat`` is raised). If you do not supply a sequence of
drivers, the module's ``extraction_drivers`` constant will be used, which
means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
order.
|
Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
|
[
"Unpack",
"filename",
"to",
"extract_dir",
"or",
"raise",
"UnrecognizedFormat"
] |
def unpack_archive(
filename, extract_dir, progress_filter=default_filter,
drivers=None):
"""Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
`progress_filter` is a function taking two arguments: a source path
internal to the archive ('/'-separated), and a filesystem path where it
will be extracted. The callback must return the desired extract path
(which may be the same as the one passed in), or else ``None`` to skip
that file or directory. The callback can thus be used to report on the
progress of the extraction, as well as to filter the items extracted or
alter their extraction paths.
`drivers`, if supplied, must be a non-empty sequence of functions with the
same signature as this function (minus the `drivers` argument), that raise
``UnrecognizedFormat`` if they do not support extracting the designated
archive type. The `drivers` are tried in sequence until one is found that
does not raise an error, or until all are exhausted (in which case
``UnrecognizedFormat`` is raised). If you do not supply a sequence of
drivers, the module's ``extraction_drivers`` constant will be used, which
means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
order.
"""
for driver in drivers or extraction_drivers:
try:
driver(filename, extract_dir, progress_filter)
except UnrecognizedFormat:
continue
else:
return
else:
raise UnrecognizedFormat(
"Not a recognized archive type: %s" % filename
)
|
[
"def",
"unpack_archive",
"(",
"filename",
",",
"extract_dir",
",",
"progress_filter",
"=",
"default_filter",
",",
"drivers",
"=",
"None",
")",
":",
"for",
"driver",
"in",
"drivers",
"or",
"extraction_drivers",
":",
"try",
":",
"driver",
"(",
"filename",
",",
"extract_dir",
",",
"progress_filter",
")",
"except",
"UnrecognizedFormat",
":",
"continue",
"else",
":",
"return",
"else",
":",
"raise",
"UnrecognizedFormat",
"(",
"\"Not a recognized archive type: %s\"",
"%",
"filename",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/setuptools/archive_util.py#L28-L61
|
||
apple/turicreate
|
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
|
src/external/coremltools_wrap/coremltools/deps/protobuf/python/google/protobuf/text_format.py
|
python
|
Tokenizer.ConsumeBool
|
(self)
|
return result
|
Consumes a boolean value.
Returns:
The bool parsed.
Raises:
ParseError: If a boolean value couldn't be consumed.
|
Consumes a boolean value.
|
[
"Consumes",
"a",
"boolean",
"value",
"."
] |
def ConsumeBool(self):
"""Consumes a boolean value.
Returns:
The bool parsed.
Raises:
ParseError: If a boolean value couldn't be consumed.
"""
try:
result = ParseBool(self.token)
except ValueError as e:
raise self.ParseError(str(e))
self.NextToken()
return result
|
[
"def",
"ConsumeBool",
"(",
"self",
")",
":",
"try",
":",
"result",
"=",
"ParseBool",
"(",
"self",
".",
"token",
")",
"except",
"ValueError",
"as",
"e",
":",
"raise",
"self",
".",
"ParseError",
"(",
"str",
"(",
"e",
")",
")",
"self",
".",
"NextToken",
"(",
")",
"return",
"result"
] |
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/external/coremltools_wrap/coremltools/deps/protobuf/python/google/protobuf/text_format.py#L1144-L1158
|
|
hpi-xnor/BMXNet
|
ed0b201da6667887222b8e4b5f997c4f6b61943d
|
example/image-classification/symbols/lenet.py
|
python
|
get_loc
|
(data, attr={'lr_mult':'0.01'})
|
return loc
|
the localisation network in lenet-stn, it will increase acc about more than 1%,
when num-epoch >=15
|
the localisation network in lenet-stn, it will increase acc about more than 1%,
when num-epoch >=15
|
[
"the",
"localisation",
"network",
"in",
"lenet",
"-",
"stn",
"it",
"will",
"increase",
"acc",
"about",
"more",
"than",
"1%",
"when",
"num",
"-",
"epoch",
">",
"=",
"15"
] |
def get_loc(data, attr={'lr_mult':'0.01'}):
"""
the localisation network in lenet-stn, it will increase acc about more than 1%,
when num-epoch >=15
"""
loc = mx.symbol.Convolution(data=data, num_filter=30, kernel=(5, 5), stride=(2,2))
loc = mx.symbol.Activation(data = loc, act_type='relu')
loc = mx.symbol.Pooling(data=loc, kernel=(2, 2), stride=(2, 2), pool_type='max')
loc = mx.symbol.Convolution(data=loc, num_filter=60, kernel=(3, 3), stride=(1,1), pad=(1, 1))
loc = mx.symbol.Activation(data = loc, act_type='relu')
loc = mx.symbol.Pooling(data=loc, global_pool=True, kernel=(2, 2), pool_type='avg')
loc = mx.symbol.Flatten(data=loc)
loc = mx.symbol.FullyConnected(data=loc, num_hidden=6, name="stn_loc", attr=attr)
return loc
|
[
"def",
"get_loc",
"(",
"data",
",",
"attr",
"=",
"{",
"'lr_mult'",
":",
"'0.01'",
"}",
")",
":",
"loc",
"=",
"mx",
".",
"symbol",
".",
"Convolution",
"(",
"data",
"=",
"data",
",",
"num_filter",
"=",
"30",
",",
"kernel",
"=",
"(",
"5",
",",
"5",
")",
",",
"stride",
"=",
"(",
"2",
",",
"2",
")",
")",
"loc",
"=",
"mx",
".",
"symbol",
".",
"Activation",
"(",
"data",
"=",
"loc",
",",
"act_type",
"=",
"'relu'",
")",
"loc",
"=",
"mx",
".",
"symbol",
".",
"Pooling",
"(",
"data",
"=",
"loc",
",",
"kernel",
"=",
"(",
"2",
",",
"2",
")",
",",
"stride",
"=",
"(",
"2",
",",
"2",
")",
",",
"pool_type",
"=",
"'max'",
")",
"loc",
"=",
"mx",
".",
"symbol",
".",
"Convolution",
"(",
"data",
"=",
"loc",
",",
"num_filter",
"=",
"60",
",",
"kernel",
"=",
"(",
"3",
",",
"3",
")",
",",
"stride",
"=",
"(",
"1",
",",
"1",
")",
",",
"pad",
"=",
"(",
"1",
",",
"1",
")",
")",
"loc",
"=",
"mx",
".",
"symbol",
".",
"Activation",
"(",
"data",
"=",
"loc",
",",
"act_type",
"=",
"'relu'",
")",
"loc",
"=",
"mx",
".",
"symbol",
".",
"Pooling",
"(",
"data",
"=",
"loc",
",",
"global_pool",
"=",
"True",
",",
"kernel",
"=",
"(",
"2",
",",
"2",
")",
",",
"pool_type",
"=",
"'avg'",
")",
"loc",
"=",
"mx",
".",
"symbol",
".",
"Flatten",
"(",
"data",
"=",
"loc",
")",
"loc",
"=",
"mx",
".",
"symbol",
".",
"FullyConnected",
"(",
"data",
"=",
"loc",
",",
"num_hidden",
"=",
"6",
",",
"name",
"=",
"\"stn_loc\"",
",",
"attr",
"=",
"attr",
")",
"return",
"loc"
] |
https://github.com/hpi-xnor/BMXNet/blob/ed0b201da6667887222b8e4b5f997c4f6b61943d/example/image-classification/symbols/lenet.py#L25-L38
|
|
bristolcrypto/SPDZ-2
|
721abfae849625a02ea49aabc534f9cf41ca643f
|
Compiler/types.py
|
python
|
sfloat.value
|
(self)
|
return (1 - 2*self.s.value)*(1 - self.z.value)*self.v.value/float(2**self.p.value)
|
Gets actual floating point value, if emulation is enabled.
|
Gets actual floating point value, if emulation is enabled.
|
[
"Gets",
"actual",
"floating",
"point",
"value",
"if",
"emulation",
"is",
"enabled",
"."
] |
def value(self):
""" Gets actual floating point value, if emulation is enabled. """
return (1 - 2*self.s.value)*(1 - self.z.value)*self.v.value/float(2**self.p.value)
|
[
"def",
"value",
"(",
"self",
")",
":",
"return",
"(",
"1",
"-",
"2",
"*",
"self",
".",
"s",
".",
"value",
")",
"*",
"(",
"1",
"-",
"self",
".",
"z",
".",
"value",
")",
"*",
"self",
".",
"v",
".",
"value",
"/",
"float",
"(",
"2",
"**",
"self",
".",
"p",
".",
"value",
")"
] |
https://github.com/bristolcrypto/SPDZ-2/blob/721abfae849625a02ea49aabc534f9cf41ca643f/Compiler/types.py#L2232-L2234
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/gtk/grid.py
|
python
|
GridCellTextEditor.__init__
|
(self, *args, **kwargs)
|
__init__(self) -> GridCellTextEditor
|
__init__(self) -> GridCellTextEditor
|
[
"__init__",
"(",
"self",
")",
"-",
">",
"GridCellTextEditor"
] |
def __init__(self, *args, **kwargs):
"""__init__(self) -> GridCellTextEditor"""
_grid.GridCellTextEditor_swiginit(self,_grid.new_GridCellTextEditor(*args, **kwargs))
self._setOORInfo(self)
|
[
"def",
"__init__",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"_grid",
".",
"GridCellTextEditor_swiginit",
"(",
"self",
",",
"_grid",
".",
"new_GridCellTextEditor",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
")",
"self",
".",
"_setOORInfo",
"(",
"self",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/grid.py#L417-L420
|
||
klzgrad/naiveproxy
|
ed2c513637c77b18721fe428d7ed395b4d284c83
|
src/tools/grit/grit/node/base.py
|
python
|
Node.__str__
|
(self)
|
return header + self.FormatXml()
|
Returns this node and all nodes below it as an XML document in a Unicode
string.
|
Returns this node and all nodes below it as an XML document in a Unicode
string.
|
[
"Returns",
"this",
"node",
"and",
"all",
"nodes",
"below",
"it",
"as",
"an",
"XML",
"document",
"in",
"a",
"Unicode",
"string",
"."
] |
def __str__(self):
'''Returns this node and all nodes below it as an XML document in a Unicode
string.'''
header = u'<?xml version="1.0" encoding="UTF-8"?>\n'
return header + self.FormatXml()
|
[
"def",
"__str__",
"(",
"self",
")",
":",
"header",
"=",
"u'<?xml version=\"1.0\" encoding=\"UTF-8\"?>\\n'",
"return",
"header",
"+",
"self",
".",
"FormatXml",
"(",
")"
] |
https://github.com/klzgrad/naiveproxy/blob/ed2c513637c77b18721fe428d7ed395b4d284c83/src/tools/grit/grit/node/base.py#L257-L261
|
|
LiquidPlayer/LiquidCore
|
9405979363f2353ac9a71ad8ab59685dd7f919c9
|
deps/node-10.15.3/tools/gyp/pylib/gyp/win_tool.py
|
python
|
WinTool.ExecClCompile
|
(self, project_dir, selected_files)
|
return subprocess.call(cmd, shell=True, cwd=BASE_DIR)
|
Executed by msvs-ninja projects when the 'ClCompile' target is used to
build selected C/C++ files.
|
Executed by msvs-ninja projects when the 'ClCompile' target is used to
build selected C/C++ files.
|
[
"Executed",
"by",
"msvs",
"-",
"ninja",
"projects",
"when",
"the",
"ClCompile",
"target",
"is",
"used",
"to",
"build",
"selected",
"C",
"/",
"C",
"++",
"files",
"."
] |
def ExecClCompile(self, project_dir, selected_files):
"""Executed by msvs-ninja projects when the 'ClCompile' target is used to
build selected C/C++ files."""
project_dir = os.path.relpath(project_dir, BASE_DIR)
selected_files = selected_files.split(';')
ninja_targets = [os.path.join(project_dir, filename) + '^^'
for filename in selected_files]
cmd = ['ninja.exe']
cmd.extend(ninja_targets)
return subprocess.call(cmd, shell=True, cwd=BASE_DIR)
|
[
"def",
"ExecClCompile",
"(",
"self",
",",
"project_dir",
",",
"selected_files",
")",
":",
"project_dir",
"=",
"os",
".",
"path",
".",
"relpath",
"(",
"project_dir",
",",
"BASE_DIR",
")",
"selected_files",
"=",
"selected_files",
".",
"split",
"(",
"';'",
")",
"ninja_targets",
"=",
"[",
"os",
".",
"path",
".",
"join",
"(",
"project_dir",
",",
"filename",
")",
"+",
"'^^'",
"for",
"filename",
"in",
"selected_files",
"]",
"cmd",
"=",
"[",
"'ninja.exe'",
"]",
"cmd",
".",
"extend",
"(",
"ninja_targets",
")",
"return",
"subprocess",
".",
"call",
"(",
"cmd",
",",
"shell",
"=",
"True",
",",
"cwd",
"=",
"BASE_DIR",
")"
] |
https://github.com/LiquidPlayer/LiquidCore/blob/9405979363f2353ac9a71ad8ab59685dd7f919c9/deps/node-10.15.3/tools/gyp/pylib/gyp/win_tool.py#L310-L319
|
|
ceph/ceph
|
959663007321a369c83218414a29bd9dbc8bda3a
|
src/pybind/rbd/setup.py
|
python
|
check_sanity
|
()
|
Test if development headers and library for rbd is available by compiling a dummy C program.
|
Test if development headers and library for rbd is available by compiling a dummy C program.
|
[
"Test",
"if",
"development",
"headers",
"and",
"library",
"for",
"rbd",
"is",
"available",
"by",
"compiling",
"a",
"dummy",
"C",
"program",
"."
] |
def check_sanity():
"""
Test if development headers and library for rbd is available by compiling a dummy C program.
"""
CEPH_SRC_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
'..',
'..'
)
tmp_dir = tempfile.mkdtemp(dir=os.environ.get('TMPDIR', os.path.dirname(__file__)))
tmp_file = os.path.join(tmp_dir, 'rbd_dummy.c')
with open(tmp_file, 'w') as fp:
dummy_prog = textwrap.dedent("""
#include <stddef.h>
#include <rbd/librbd.h>
int main(void) {
rados_t cluster;
rados_create(&cluster, NULL);
return 0;
}
""")
fp.write(dummy_prog)
compiler = new_compiler()
distutils.sysconfig.customize_compiler(compiler)
if 'CEPH_LIBDIR' in os.environ:
# The setup.py has been invoked by a top-level Ceph make.
# Set the appropriate CFLAGS and LDFLAGS
compiler.set_include_dirs([os.path.join(CEPH_SRC_DIR, 'include')])
compiler.set_library_dirs([os.environ.get('CEPH_LIBDIR')])
try:
compiler.define_macro('_FILE_OFFSET_BITS', '64')
link_objects = compiler.compile(
sources=[tmp_file],
output_dir=tmp_dir
)
compiler.link_executable(
objects=link_objects,
output_progname=os.path.join(tmp_dir, 'rbd_dummy'),
libraries=['rbd', 'rados'],
output_dir=tmp_dir,
)
except CompileError:
print('\nCompile Error: RBD development headers not found', file=sys.stderr)
return False
except LinkError:
print('\nLink Error: RBD library not found', file=sys.stderr)
return False
else:
return True
finally:
shutil.rmtree(tmp_dir)
|
[
"def",
"check_sanity",
"(",
")",
":",
"CEPH_SRC_DIR",
"=",
"os",
".",
"path",
".",
"join",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"__file__",
")",
")",
",",
"'..'",
",",
"'..'",
")",
"tmp_dir",
"=",
"tempfile",
".",
"mkdtemp",
"(",
"dir",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'TMPDIR'",
",",
"os",
".",
"path",
".",
"dirname",
"(",
"__file__",
")",
")",
")",
"tmp_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"tmp_dir",
",",
"'rbd_dummy.c'",
")",
"with",
"open",
"(",
"tmp_file",
",",
"'w'",
")",
"as",
"fp",
":",
"dummy_prog",
"=",
"textwrap",
".",
"dedent",
"(",
"\"\"\"\n #include <stddef.h>\n #include <rbd/librbd.h>\n int main(void) {\n rados_t cluster;\n rados_create(&cluster, NULL);\n return 0;\n }\n \"\"\"",
")",
"fp",
".",
"write",
"(",
"dummy_prog",
")",
"compiler",
"=",
"new_compiler",
"(",
")",
"distutils",
".",
"sysconfig",
".",
"customize_compiler",
"(",
"compiler",
")",
"if",
"'CEPH_LIBDIR'",
"in",
"os",
".",
"environ",
":",
"# The setup.py has been invoked by a top-level Ceph make.",
"# Set the appropriate CFLAGS and LDFLAGS",
"compiler",
".",
"set_include_dirs",
"(",
"[",
"os",
".",
"path",
".",
"join",
"(",
"CEPH_SRC_DIR",
",",
"'include'",
")",
"]",
")",
"compiler",
".",
"set_library_dirs",
"(",
"[",
"os",
".",
"environ",
".",
"get",
"(",
"'CEPH_LIBDIR'",
")",
"]",
")",
"try",
":",
"compiler",
".",
"define_macro",
"(",
"'_FILE_OFFSET_BITS'",
",",
"'64'",
")",
"link_objects",
"=",
"compiler",
".",
"compile",
"(",
"sources",
"=",
"[",
"tmp_file",
"]",
",",
"output_dir",
"=",
"tmp_dir",
")",
"compiler",
".",
"link_executable",
"(",
"objects",
"=",
"link_objects",
",",
"output_progname",
"=",
"os",
".",
"path",
".",
"join",
"(",
"tmp_dir",
",",
"'rbd_dummy'",
")",
",",
"libraries",
"=",
"[",
"'rbd'",
",",
"'rados'",
"]",
",",
"output_dir",
"=",
"tmp_dir",
",",
")",
"except",
"CompileError",
":",
"print",
"(",
"'\\nCompile Error: RBD development headers not found'",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"return",
"False",
"except",
"LinkError",
":",
"print",
"(",
"'\\nLink Error: RBD library not found'",
",",
"file",
"=",
"sys",
".",
"stderr",
")",
"return",
"False",
"else",
":",
"return",
"True",
"finally",
":",
"shutil",
".",
"rmtree",
"(",
"tmp_dir",
")"
] |
https://github.com/ceph/ceph/blob/959663007321a369c83218414a29bd9dbc8bda3a/src/pybind/rbd/setup.py#L77-L134
|
||
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
third_party/catapult/dashboard/dashboard/graph_revisions.py
|
python
|
_UpdateCache
|
(test_key)
|
return rows
|
Queries Rows for a test then updates the cache.
Args:
test_key: ndb.Key for a TestMetadata entity.
Returns:
The list of triplets that was just fetched and set in the cache.
|
Queries Rows for a test then updates the cache.
|
[
"Queries",
"Rows",
"for",
"a",
"test",
"then",
"updates",
"the",
"cache",
"."
] |
def _UpdateCache(test_key):
"""Queries Rows for a test then updates the cache.
Args:
test_key: ndb.Key for a TestMetadata entity.
Returns:
The list of triplets that was just fetched and set in the cache.
"""
test = test_key.get()
if not test:
return []
assert utils.IsInternalUser() or not test.internal_only
datastore_hooks.SetSinglePrivilegedRequest()
# A projection query queries just for the values of particular properties;
# this is faster than querying for whole entities.
query = graph_data.Row.query(projection=['revision', 'value', 'timestamp'])
query = query.filter(
graph_data.Row.parent_test == utils.OldStyleTestKey(test_key))
# Using a large batch_size speeds up queries with > 1000 Rows.
rows = map(_MakeTriplet, query.iter(batch_size=1000))
# Note: Unit tests do not call datastore_hooks with the above query, but
# it is called in production and with more recent SDK.
datastore_hooks.CancelSinglePrivilegedRequest()
SetCache(utils.TestPath(test_key), rows)
return rows
|
[
"def",
"_UpdateCache",
"(",
"test_key",
")",
":",
"test",
"=",
"test_key",
".",
"get",
"(",
")",
"if",
"not",
"test",
":",
"return",
"[",
"]",
"assert",
"utils",
".",
"IsInternalUser",
"(",
")",
"or",
"not",
"test",
".",
"internal_only",
"datastore_hooks",
".",
"SetSinglePrivilegedRequest",
"(",
")",
"# A projection query queries just for the values of particular properties;",
"# this is faster than querying for whole entities.",
"query",
"=",
"graph_data",
".",
"Row",
".",
"query",
"(",
"projection",
"=",
"[",
"'revision'",
",",
"'value'",
",",
"'timestamp'",
"]",
")",
"query",
"=",
"query",
".",
"filter",
"(",
"graph_data",
".",
"Row",
".",
"parent_test",
"==",
"utils",
".",
"OldStyleTestKey",
"(",
"test_key",
")",
")",
"# Using a large batch_size speeds up queries with > 1000 Rows.",
"rows",
"=",
"map",
"(",
"_MakeTriplet",
",",
"query",
".",
"iter",
"(",
"batch_size",
"=",
"1000",
")",
")",
"# Note: Unit tests do not call datastore_hooks with the above query, but",
"# it is called in production and with more recent SDK.",
"datastore_hooks",
".",
"CancelSinglePrivilegedRequest",
"(",
")",
"SetCache",
"(",
"utils",
".",
"TestPath",
"(",
"test_key",
")",
",",
"rows",
")",
"return",
"rows"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/dashboard/dashboard/graph_revisions.py#L71-L98
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/llvmlite/ir/types.py
|
python
|
Type.get_abi_alignment
|
(self, target_data, context=None)
|
return target_data.get_pointee_abi_alignment(llty)
|
Get the minimum ABI alignment of this type according to data layout
*target_data*.
|
Get the minimum ABI alignment of this type according to data layout
*target_data*.
|
[
"Get",
"the",
"minimum",
"ABI",
"alignment",
"of",
"this",
"type",
"according",
"to",
"data",
"layout",
"*",
"target_data",
"*",
"."
] |
def get_abi_alignment(self, target_data, context=None):
"""
Get the minimum ABI alignment of this type according to data layout
*target_data*.
"""
llty = self._get_ll_pointer_type(target_data, context)
return target_data.get_pointee_abi_alignment(llty)
|
[
"def",
"get_abi_alignment",
"(",
"self",
",",
"target_data",
",",
"context",
"=",
"None",
")",
":",
"llty",
"=",
"self",
".",
"_get_ll_pointer_type",
"(",
"target_data",
",",
"context",
")",
"return",
"target_data",
".",
"get_pointee_abi_alignment",
"(",
"llty",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/llvmlite/ir/types.py#L57-L63
|
|
google/gemmlowp
|
e844ffd17118c1e17d94e1ba4354c075a4577b88
|
meta/generators/transform_kernels_common.py
|
python
|
MinMaxTransformation.Transform
|
(self, emitter, registers, input_address, elements,
output_address)
|
Generate the MinMax transform inner loop code.
|
Generate the MinMax transform inner loop code.
|
[
"Generate",
"the",
"MinMax",
"transform",
"inner",
"loop",
"code",
"."
] |
def Transform(self, emitter, registers, input_address, elements,
output_address):
"""Generate the MinMax transform inner loop code."""
emitter.EmitNewline()
emitter.EmitComment('MinMax::Transform')
register_count = (elements + 15) / 16
load = [registers.QuadRegister() for unused_i in range(register_count)]
emitter.EmitVLoadAE(8, elements, load, input_address, None)
emitter.EmitPldOffset(input_address, emitter.ImmediateConstant(16))
for register in load:
emitter.EmitVMax('u8', register, register, self.min)
for register in load:
emitter.EmitVMin('u8', register, register, self.max)
emitter.EmitNewline()
emitter.EmitVStoreAE(8, elements, load, output_address, None)
emitter.EmitPld(output_address)
registers.FreeRegisters(load)
|
[
"def",
"Transform",
"(",
"self",
",",
"emitter",
",",
"registers",
",",
"input_address",
",",
"elements",
",",
"output_address",
")",
":",
"emitter",
".",
"EmitNewline",
"(",
")",
"emitter",
".",
"EmitComment",
"(",
"'MinMax::Transform'",
")",
"register_count",
"=",
"(",
"elements",
"+",
"15",
")",
"/",
"16",
"load",
"=",
"[",
"registers",
".",
"QuadRegister",
"(",
")",
"for",
"unused_i",
"in",
"range",
"(",
"register_count",
")",
"]",
"emitter",
".",
"EmitVLoadAE",
"(",
"8",
",",
"elements",
",",
"load",
",",
"input_address",
",",
"None",
")",
"emitter",
".",
"EmitPldOffset",
"(",
"input_address",
",",
"emitter",
".",
"ImmediateConstant",
"(",
"16",
")",
")",
"for",
"register",
"in",
"load",
":",
"emitter",
".",
"EmitVMax",
"(",
"'u8'",
",",
"register",
",",
"register",
",",
"self",
".",
"min",
")",
"for",
"register",
"in",
"load",
":",
"emitter",
".",
"EmitVMin",
"(",
"'u8'",
",",
"register",
",",
"register",
",",
"self",
".",
"max",
")",
"emitter",
".",
"EmitNewline",
"(",
")",
"emitter",
".",
"EmitVStoreAE",
"(",
"8",
",",
"elements",
",",
"load",
",",
"output_address",
",",
"None",
")",
"emitter",
".",
"EmitPld",
"(",
"output_address",
")",
"registers",
".",
"FreeRegisters",
"(",
"load",
")"
] |
https://github.com/google/gemmlowp/blob/e844ffd17118c1e17d94e1ba4354c075a4577b88/meta/generators/transform_kernels_common.py#L57-L76
|
||
lammps/lammps
|
b75c3065430a75b1b5543a10e10f46d9b4c91913
|
python/lammps/pylammps.py
|
python
|
PyLammps.version
|
(self)
|
return self.lmp.version()
|
Return a numerical representation of the LAMMPS version in use.
This is a wrapper around the :py:meth:`lammps.version` function of the Python interface.
:return: version number
:rtype: int
|
Return a numerical representation of the LAMMPS version in use.
|
[
"Return",
"a",
"numerical",
"representation",
"of",
"the",
"LAMMPS",
"version",
"in",
"use",
"."
] |
def version(self):
"""Return a numerical representation of the LAMMPS version in use.
This is a wrapper around the :py:meth:`lammps.version` function of the Python interface.
:return: version number
:rtype: int
"""
return self.lmp.version()
|
[
"def",
"version",
"(",
"self",
")",
":",
"return",
"self",
".",
"lmp",
".",
"version",
"(",
")"
] |
https://github.com/lammps/lammps/blob/b75c3065430a75b1b5543a10e10f46d9b4c91913/python/lammps/pylammps.py#L501-L509
|
|
llvm/llvm-project
|
ffa6262cb4e2a335d26416fad39a581b4f98c5f4
|
lldb/scripts/verify_api.py
|
python
|
verify_api
|
(all_args)
|
Verify the API in the specified library is valid given one or more binaries.
|
Verify the API in the specified library is valid given one or more binaries.
|
[
"Verify",
"the",
"API",
"in",
"the",
"specified",
"library",
"is",
"valid",
"given",
"one",
"or",
"more",
"binaries",
"."
] |
def verify_api(all_args):
'''Verify the API in the specified library is valid given one or more binaries.'''
usage = "usage: verify_api --library <path> [ --library <path> ...] executable1 [executable2 ...]"
description = '''Verify the API in the specified library is valid given one or more binaries.
Example:
verify_api.py --library ~/Documents/src/lldb/build/Debug/LLDB.framework/LLDB --arch x86_64 /Applications/Xcode.app/Contents/PlugIns/DebuggerLLDB.ideplugin/Contents/MacOS/DebuggerLLDB --api-regex lldb
'''
parser = optparse.OptionParser(
description=description,
prog='verify_api',
usage=usage)
parser.add_option(
'-v',
'--verbose',
action='store_true',
dest='verbose',
help='display verbose debug info',
default=False)
parser.add_option(
'-a',
'--arch',
type='string',
action='append',
dest='archs',
help='architecture to use when checking the api')
parser.add_option(
'-r',
'--api-regex',
type='string',
dest='api_regex_str',
help='Exclude any undefined symbols that do not match this regular expression when searching for missing APIs.')
parser.add_option(
'-l',
'--library',
type='string',
action='append',
dest='libraries',
help='Specify one or more libraries that will contain all needed APIs for the executables.')
(options, args) = parser.parse_args(all_args)
api_external_symbols = list()
if options.archs:
for arch in options.archs:
for library in options.libraries:
external_symbols = extract_exe_symbol_names(
arch, library, "( SECT EXT)")
if external_symbols:
for external_symbol in external_symbols:
api_external_symbols.append(external_symbol)
else:
sys.exit(1)
else:
print('error: must specify one or more architectures with the --arch option')
sys.exit(4)
if options.verbose:
print("API symbols:")
for (i, external_symbol) in enumerate(api_external_symbols):
print("[%u] %s" % (i, external_symbol))
api_regex = None
if options.api_regex_str:
api_regex = re.compile(options.api_regex_str)
for arch in options.archs:
for exe_path in args:
print('Verifying (%s) "%s"...' % (arch, exe_path))
exe_errors = 0
undefined_symbols = extract_exe_symbol_names(
arch, exe_path, "( UNDF EXT)")
for undefined_symbol in undefined_symbols:
if api_regex:
match = api_regex.search(undefined_symbol)
if not match:
if options.verbose:
print('ignoring symbol: %s' % (undefined_symbol))
continue
if undefined_symbol in api_external_symbols:
if options.verbose:
print('verified symbol: %s' % (undefined_symbol))
else:
print('missing symbol: %s' % (undefined_symbol))
exe_errors += 1
if exe_errors:
print('error: missing %u API symbols from %s' % (exe_errors, options.libraries))
else:
print('success')
|
[
"def",
"verify_api",
"(",
"all_args",
")",
":",
"usage",
"=",
"\"usage: verify_api --library <path> [ --library <path> ...] executable1 [executable2 ...]\"",
"description",
"=",
"'''Verify the API in the specified library is valid given one or more binaries.\n\n Example:\n\n verify_api.py --library ~/Documents/src/lldb/build/Debug/LLDB.framework/LLDB --arch x86_64 /Applications/Xcode.app/Contents/PlugIns/DebuggerLLDB.ideplugin/Contents/MacOS/DebuggerLLDB --api-regex lldb\n '''",
"parser",
"=",
"optparse",
".",
"OptionParser",
"(",
"description",
"=",
"description",
",",
"prog",
"=",
"'verify_api'",
",",
"usage",
"=",
"usage",
")",
"parser",
".",
"add_option",
"(",
"'-v'",
",",
"'--verbose'",
",",
"action",
"=",
"'store_true'",
",",
"dest",
"=",
"'verbose'",
",",
"help",
"=",
"'display verbose debug info'",
",",
"default",
"=",
"False",
")",
"parser",
".",
"add_option",
"(",
"'-a'",
",",
"'--arch'",
",",
"type",
"=",
"'string'",
",",
"action",
"=",
"'append'",
",",
"dest",
"=",
"'archs'",
",",
"help",
"=",
"'architecture to use when checking the api'",
")",
"parser",
".",
"add_option",
"(",
"'-r'",
",",
"'--api-regex'",
",",
"type",
"=",
"'string'",
",",
"dest",
"=",
"'api_regex_str'",
",",
"help",
"=",
"'Exclude any undefined symbols that do not match this regular expression when searching for missing APIs.'",
")",
"parser",
".",
"add_option",
"(",
"'-l'",
",",
"'--library'",
",",
"type",
"=",
"'string'",
",",
"action",
"=",
"'append'",
",",
"dest",
"=",
"'libraries'",
",",
"help",
"=",
"'Specify one or more libraries that will contain all needed APIs for the executables.'",
")",
"(",
"options",
",",
"args",
")",
"=",
"parser",
".",
"parse_args",
"(",
"all_args",
")",
"api_external_symbols",
"=",
"list",
"(",
")",
"if",
"options",
".",
"archs",
":",
"for",
"arch",
"in",
"options",
".",
"archs",
":",
"for",
"library",
"in",
"options",
".",
"libraries",
":",
"external_symbols",
"=",
"extract_exe_symbol_names",
"(",
"arch",
",",
"library",
",",
"\"( SECT EXT)\"",
")",
"if",
"external_symbols",
":",
"for",
"external_symbol",
"in",
"external_symbols",
":",
"api_external_symbols",
".",
"append",
"(",
"external_symbol",
")",
"else",
":",
"sys",
".",
"exit",
"(",
"1",
")",
"else",
":",
"print",
"(",
"'error: must specify one or more architectures with the --arch option'",
")",
"sys",
".",
"exit",
"(",
"4",
")",
"if",
"options",
".",
"verbose",
":",
"print",
"(",
"\"API symbols:\"",
")",
"for",
"(",
"i",
",",
"external_symbol",
")",
"in",
"enumerate",
"(",
"api_external_symbols",
")",
":",
"print",
"(",
"\"[%u] %s\"",
"%",
"(",
"i",
",",
"external_symbol",
")",
")",
"api_regex",
"=",
"None",
"if",
"options",
".",
"api_regex_str",
":",
"api_regex",
"=",
"re",
".",
"compile",
"(",
"options",
".",
"api_regex_str",
")",
"for",
"arch",
"in",
"options",
".",
"archs",
":",
"for",
"exe_path",
"in",
"args",
":",
"print",
"(",
"'Verifying (%s) \"%s\"...'",
"%",
"(",
"arch",
",",
"exe_path",
")",
")",
"exe_errors",
"=",
"0",
"undefined_symbols",
"=",
"extract_exe_symbol_names",
"(",
"arch",
",",
"exe_path",
",",
"\"( UNDF EXT)\"",
")",
"for",
"undefined_symbol",
"in",
"undefined_symbols",
":",
"if",
"api_regex",
":",
"match",
"=",
"api_regex",
".",
"search",
"(",
"undefined_symbol",
")",
"if",
"not",
"match",
":",
"if",
"options",
".",
"verbose",
":",
"print",
"(",
"'ignoring symbol: %s'",
"%",
"(",
"undefined_symbol",
")",
")",
"continue",
"if",
"undefined_symbol",
"in",
"api_external_symbols",
":",
"if",
"options",
".",
"verbose",
":",
"print",
"(",
"'verified symbol: %s'",
"%",
"(",
"undefined_symbol",
")",
")",
"else",
":",
"print",
"(",
"'missing symbol: %s'",
"%",
"(",
"undefined_symbol",
")",
")",
"exe_errors",
"+=",
"1",
"if",
"exe_errors",
":",
"print",
"(",
"'error: missing %u API symbols from %s'",
"%",
"(",
"exe_errors",
",",
"options",
".",
"libraries",
")",
")",
"else",
":",
"print",
"(",
"'success'",
")"
] |
https://github.com/llvm/llvm-project/blob/ffa6262cb4e2a335d26416fad39a581b4f98c5f4/lldb/scripts/verify_api.py#L25-L112
|
||
DGA-MI-SSI/YaCo
|
9b85e6ca1809114c4df1382c11255f7e38408912
|
deps/libxml2-2.7.8/python/libxml.py
|
python
|
SAXCallback.notationDecl
|
(self, name, externalID, systemID)
|
called when an NOTATION declaration has been found, name is the
notation name and externalID, systemID are the notation public and
system identifier for that notation if available
|
called when an NOTATION declaration has been found, name is the
notation name and externalID, systemID are the notation public and
system identifier for that notation if available
|
[
"called",
"when",
"an",
"NOTATION",
"declaration",
"has",
"been",
"found",
"name",
"is",
"the",
"notation",
"name",
"and",
"externalID",
"systemID",
"are",
"the",
"notation",
"public",
"and",
"system",
"identifier",
"for",
"that",
"notation",
"if",
"available"
] |
def notationDecl(self, name, externalID, systemID):
"""called when an NOTATION declaration has been found, name is the
notation name and externalID, systemID are the notation public and
system identifier for that notation if available"""
pass
|
[
"def",
"notationDecl",
"(",
"self",
",",
"name",
",",
"externalID",
",",
"systemID",
")",
":",
"pass"
] |
https://github.com/DGA-MI-SSI/YaCo/blob/9b85e6ca1809114c4df1382c11255f7e38408912/deps/libxml2-2.7.8/python/libxml.py#L202-L206
|
||
wlanjie/AndroidFFmpeg
|
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
|
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/pickletools.py
|
python
|
read_stringnl_noescape_pair
|
(f)
|
return "%s %s" % (read_stringnl_noescape(f), read_stringnl_noescape(f))
|
r"""
>>> import StringIO
>>> read_stringnl_noescape_pair(StringIO.StringIO("Queue\nEmpty\njunk"))
'Queue Empty'
|
r"""
>>> import StringIO
>>> read_stringnl_noescape_pair(StringIO.StringIO("Queue\nEmpty\njunk"))
'Queue Empty'
|
[
"r",
">>>",
"import",
"StringIO",
">>>",
"read_stringnl_noescape_pair",
"(",
"StringIO",
".",
"StringIO",
"(",
"Queue",
"\\",
"nEmpty",
"\\",
"njunk",
"))",
"Queue",
"Empty"
] |
def read_stringnl_noescape_pair(f):
r"""
>>> import StringIO
>>> read_stringnl_noescape_pair(StringIO.StringIO("Queue\nEmpty\njunk"))
'Queue Empty'
"""
return "%s %s" % (read_stringnl_noescape(f), read_stringnl_noescape(f))
|
[
"def",
"read_stringnl_noescape_pair",
"(",
"f",
")",
":",
"return",
"\"%s %s\"",
"%",
"(",
"read_stringnl_noescape",
"(",
"f",
")",
",",
"read_stringnl_noescape",
"(",
"f",
")",
")"
] |
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/pickletools.py#L335-L342
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/msw/grid.py
|
python
|
Grid.SetRowAttr
|
(*args, **kwargs)
|
return _grid.Grid_SetRowAttr(*args, **kwargs)
|
SetRowAttr(self, int row, GridCellAttr attr)
|
SetRowAttr(self, int row, GridCellAttr attr)
|
[
"SetRowAttr",
"(",
"self",
"int",
"row",
"GridCellAttr",
"attr",
")"
] |
def SetRowAttr(*args, **kwargs):
"""SetRowAttr(self, int row, GridCellAttr attr)"""
return _grid.Grid_SetRowAttr(*args, **kwargs)
|
[
"def",
"SetRowAttr",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_grid",
".",
"Grid_SetRowAttr",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/grid.py#L1714-L1716
|
|
epam/Indigo
|
30e40b4b1eb9bae0207435a26cfcb81ddcc42be1
|
api/python/indigo/__init__.py
|
python
|
IndigoObject.radical
|
(self)
|
return value.value
|
Atom method returns the radical value
Returns:
int: radical value
|
Atom method returns the radical value
|
[
"Atom",
"method",
"returns",
"the",
"radical",
"value"
] |
def radical(self):
"""Atom method returns the radical value
Returns:
int: radical value
"""
value = c_int()
self.dispatcher._setSessionId()
res = self.dispatcher._checkResult(
Indigo._lib.indigoGetRadical(self.id, pointer(value))
)
if res == 0:
return None
return value.value
|
[
"def",
"radical",
"(",
"self",
")",
":",
"value",
"=",
"c_int",
"(",
")",
"self",
".",
"dispatcher",
".",
"_setSessionId",
"(",
")",
"res",
"=",
"self",
".",
"dispatcher",
".",
"_checkResult",
"(",
"Indigo",
".",
"_lib",
".",
"indigoGetRadical",
"(",
"self",
".",
"id",
",",
"pointer",
"(",
"value",
")",
")",
")",
"if",
"res",
"==",
"0",
":",
"return",
"None",
"return",
"value",
".",
"value"
] |
https://github.com/epam/Indigo/blob/30e40b4b1eb9bae0207435a26cfcb81ddcc42be1/api/python/indigo/__init__.py#L1123-L1136
|
|
perilouswithadollarsign/cstrike15_src
|
f82112a2388b841d72cb62ca48ab1846dfcc11c8
|
thirdparty/protobuf-2.5.0/python/google/protobuf/internal/python_message.py
|
python
|
_IsPresent
|
(item)
|
Given a (FieldDescriptor, value) tuple from _fields, return true if the
value should be included in the list returned by ListFields().
|
Given a (FieldDescriptor, value) tuple from _fields, return true if the
value should be included in the list returned by ListFields().
|
[
"Given",
"a",
"(",
"FieldDescriptor",
"value",
")",
"tuple",
"from",
"_fields",
"return",
"true",
"if",
"the",
"value",
"should",
"be",
"included",
"in",
"the",
"list",
"returned",
"by",
"ListFields",
"()",
"."
] |
def _IsPresent(item):
"""Given a (FieldDescriptor, value) tuple from _fields, return true if the
value should be included in the list returned by ListFields()."""
if item[0].label == _FieldDescriptor.LABEL_REPEATED:
return bool(item[1])
elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE:
return item[1]._is_present_in_parent
else:
return True
|
[
"def",
"_IsPresent",
"(",
"item",
")",
":",
"if",
"item",
"[",
"0",
"]",
".",
"label",
"==",
"_FieldDescriptor",
".",
"LABEL_REPEATED",
":",
"return",
"bool",
"(",
"item",
"[",
"1",
"]",
")",
"elif",
"item",
"[",
"0",
"]",
".",
"cpp_type",
"==",
"_FieldDescriptor",
".",
"CPPTYPE_MESSAGE",
":",
"return",
"item",
"[",
"1",
"]",
".",
"_is_present_in_parent",
"else",
":",
"return",
"True"
] |
https://github.com/perilouswithadollarsign/cstrike15_src/blob/f82112a2388b841d72cb62ca48ab1846dfcc11c8/thirdparty/protobuf-2.5.0/python/google/protobuf/internal/python_message.py#L562-L571
|
||
yun-liu/RCF
|
91bfb054ad04187dbbe21e539e165ad9bd3ff00b
|
scripts/cpp_lint.py
|
python
|
_CppLintState.PrintErrorCounts
|
(self)
|
Print a summary of errors by category, and the total.
|
Print a summary of errors by category, and the total.
|
[
"Print",
"a",
"summary",
"of",
"errors",
"by",
"category",
"and",
"the",
"total",
"."
] |
def PrintErrorCounts(self):
"""Print a summary of errors by category, and the total."""
for category, count in self.errors_by_category.iteritems():
sys.stderr.write('Category \'%s\' errors found: %d\n' %
(category, count))
sys.stderr.write('Total errors found: %d\n' % self.error_count)
|
[
"def",
"PrintErrorCounts",
"(",
"self",
")",
":",
"for",
"category",
",",
"count",
"in",
"self",
".",
"errors_by_category",
".",
"iteritems",
"(",
")",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'Category \\'%s\\' errors found: %d\\n'",
"%",
"(",
"category",
",",
"count",
")",
")",
"sys",
".",
"stderr",
".",
"write",
"(",
"'Total errors found: %d\\n'",
"%",
"self",
".",
"error_count",
")"
] |
https://github.com/yun-liu/RCF/blob/91bfb054ad04187dbbe21e539e165ad9bd3ff00b/scripts/cpp_lint.py#L757-L762
|
||
wujixiu/helmet-detection
|
8eff5c59ddfba5a29e0b76aeb48babcb49246178
|
hardhat-wearing-detection/SSD-RPA/python/caffe/pycaffe.py
|
python
|
_Net_backward
|
(self, diffs=None, start=None, end=None, **kwargs)
|
return {out: self.blobs[out].diff for out in outputs}
|
Backward pass: prepare diffs and run the net backward.
Parameters
----------
diffs : list of diffs to return in addition to bottom diffs.
kwargs : Keys are output blob names and values are diff ndarrays.
If None, top diffs are taken from forward loss.
start : optional name of layer at which to begin the backward pass
end : optional name of layer at which to finish the backward pass
(inclusive)
Returns
-------
outs: {blob name: diff ndarray} dict.
|
Backward pass: prepare diffs and run the net backward.
|
[
"Backward",
"pass",
":",
"prepare",
"diffs",
"and",
"run",
"the",
"net",
"backward",
"."
] |
def _Net_backward(self, diffs=None, start=None, end=None, **kwargs):
"""
Backward pass: prepare diffs and run the net backward.
Parameters
----------
diffs : list of diffs to return in addition to bottom diffs.
kwargs : Keys are output blob names and values are diff ndarrays.
If None, top diffs are taken from forward loss.
start : optional name of layer at which to begin the backward pass
end : optional name of layer at which to finish the backward pass
(inclusive)
Returns
-------
outs: {blob name: diff ndarray} dict.
"""
if diffs is None:
diffs = []
if start is not None:
start_ind = list(self._layer_names).index(start)
else:
start_ind = len(self.layers) - 1
if end is not None:
end_ind = list(self._layer_names).index(end)
outputs = set([end] + diffs)
else:
end_ind = 0
outputs = set(self.inputs + diffs)
if kwargs:
if set(kwargs.keys()) != set(self.outputs):
raise Exception('Top diff arguments do not match net outputs.')
# Set top diffs according to defined shapes and make arrays single and
# C-contiguous as Caffe expects.
for top, diff in six.iteritems(kwargs):
if diff.shape[0] != self.blobs[top].shape[0]:
raise Exception('Diff is not batch sized')
self.blobs[top].diff[...] = diff
self._backward(start_ind, end_ind)
# Unpack diffs to extract
return {out: self.blobs[out].diff for out in outputs}
|
[
"def",
"_Net_backward",
"(",
"self",
",",
"diffs",
"=",
"None",
",",
"start",
"=",
"None",
",",
"end",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"diffs",
"is",
"None",
":",
"diffs",
"=",
"[",
"]",
"if",
"start",
"is",
"not",
"None",
":",
"start_ind",
"=",
"list",
"(",
"self",
".",
"_layer_names",
")",
".",
"index",
"(",
"start",
")",
"else",
":",
"start_ind",
"=",
"len",
"(",
"self",
".",
"layers",
")",
"-",
"1",
"if",
"end",
"is",
"not",
"None",
":",
"end_ind",
"=",
"list",
"(",
"self",
".",
"_layer_names",
")",
".",
"index",
"(",
"end",
")",
"outputs",
"=",
"set",
"(",
"[",
"end",
"]",
"+",
"diffs",
")",
"else",
":",
"end_ind",
"=",
"0",
"outputs",
"=",
"set",
"(",
"self",
".",
"inputs",
"+",
"diffs",
")",
"if",
"kwargs",
":",
"if",
"set",
"(",
"kwargs",
".",
"keys",
"(",
")",
")",
"!=",
"set",
"(",
"self",
".",
"outputs",
")",
":",
"raise",
"Exception",
"(",
"'Top diff arguments do not match net outputs.'",
")",
"# Set top diffs according to defined shapes and make arrays single and",
"# C-contiguous as Caffe expects.",
"for",
"top",
",",
"diff",
"in",
"six",
".",
"iteritems",
"(",
"kwargs",
")",
":",
"if",
"diff",
".",
"shape",
"[",
"0",
"]",
"!=",
"self",
".",
"blobs",
"[",
"top",
"]",
".",
"shape",
"[",
"0",
"]",
":",
"raise",
"Exception",
"(",
"'Diff is not batch sized'",
")",
"self",
".",
"blobs",
"[",
"top",
"]",
".",
"diff",
"[",
"...",
"]",
"=",
"diff",
"self",
".",
"_backward",
"(",
"start_ind",
",",
"end_ind",
")",
"# Unpack diffs to extract",
"return",
"{",
"out",
":",
"self",
".",
"blobs",
"[",
"out",
"]",
".",
"diff",
"for",
"out",
"in",
"outputs",
"}"
] |
https://github.com/wujixiu/helmet-detection/blob/8eff5c59ddfba5a29e0b76aeb48babcb49246178/hardhat-wearing-detection/SSD-RPA/python/caffe/pycaffe.py#L127-L172
|
|
zerollzeng/tiny-tensorrt
|
e7bdb8f82934342a0f22ce68dfefdb8e15eb72b2
|
third_party/pybind11/tools/clang/cindex.py
|
python
|
Type.get_pointee
|
(self)
|
return conf.lib.clang_getPointeeType(self)
|
For pointer types, returns the type of the pointee.
|
For pointer types, returns the type of the pointee.
|
[
"For",
"pointer",
"types",
"returns",
"the",
"type",
"of",
"the",
"pointee",
"."
] |
def get_pointee(self):
"""
For pointer types, returns the type of the pointee.
"""
return conf.lib.clang_getPointeeType(self)
|
[
"def",
"get_pointee",
"(",
"self",
")",
":",
"return",
"conf",
".",
"lib",
".",
"clang_getPointeeType",
"(",
"self",
")"
] |
https://github.com/zerollzeng/tiny-tensorrt/blob/e7bdb8f82934342a0f22ce68dfefdb8e15eb72b2/third_party/pybind11/tools/clang/cindex.py#L2042-L2046
|
|
idaholab/moose
|
9eeebc65e098b4c30f8205fb41591fd5b61eb6ff
|
python/mooseutils/gitutils.py
|
python
|
git_fetch_remote
|
(name, branch, working_dir=None)
|
Add then fetch a remote with *name* and remote location *url*.
|
Add then fetch a remote with *name* and remote location *url*.
|
[
"Add",
"then",
"fetch",
"a",
"remote",
"with",
"*",
"name",
"*",
"and",
"remote",
"location",
"*",
"url",
"*",
"."
] |
def git_fetch_remote(name, branch, working_dir=None):
"""
Add then fetch a remote with *name* and remote location *url*.
"""
mooseutils.check_output(['git', 'fetch', name, branch], cwd=working_dir, check=True)
|
[
"def",
"git_fetch_remote",
"(",
"name",
",",
"branch",
",",
"working_dir",
"=",
"None",
")",
":",
"mooseutils",
".",
"check_output",
"(",
"[",
"'git'",
",",
"'fetch'",
",",
"name",
",",
"branch",
"]",
",",
"cwd",
"=",
"working_dir",
",",
"check",
"=",
"True",
")"
] |
https://github.com/idaholab/moose/blob/9eeebc65e098b4c30f8205fb41591fd5b61eb6ff/python/mooseutils/gitutils.py#L229-L233
|
||
bundy-dns/bundy
|
3d41934996b82b0cd2fe22dd74d2abc1daba835d
|
src/lib/python/bundy/config/module_spec.py
|
python
|
ModuleSpec.get_full_spec
|
(self)
|
return self._module_spec
|
Returns a dict representation of the full module specification
|
Returns a dict representation of the full module specification
|
[
"Returns",
"a",
"dict",
"representation",
"of",
"the",
"full",
"module",
"specification"
] |
def get_full_spec(self):
"""Returns a dict representation of the full module specification"""
return self._module_spec
|
[
"def",
"get_full_spec",
"(",
"self",
")",
":",
"return",
"self",
".",
"_module_spec"
] |
https://github.com/bundy-dns/bundy/blob/3d41934996b82b0cd2fe22dd74d2abc1daba835d/src/lib/python/bundy/config/module_spec.py#L156-L158
|
|
BlzFans/wke
|
b0fa21158312e40c5fbd84682d643022b6c34a93
|
cygwin/lib/python2.6/urllib.py
|
python
|
URLopener.open_file
|
(self, url)
|
Use local file or FTP depending on form of URL.
|
Use local file or FTP depending on form of URL.
|
[
"Use",
"local",
"file",
"or",
"FTP",
"depending",
"on",
"form",
"of",
"URL",
"."
] |
def open_file(self, url):
"""Use local file or FTP depending on form of URL."""
if not isinstance(url, str):
raise IOError, ('file error', 'proxy support for file protocol currently not implemented')
if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/':
return self.open_ftp(url)
else:
return self.open_local_file(url)
|
[
"def",
"open_file",
"(",
"self",
",",
"url",
")",
":",
"if",
"not",
"isinstance",
"(",
"url",
",",
"str",
")",
":",
"raise",
"IOError",
",",
"(",
"'file error'",
",",
"'proxy support for file protocol currently not implemented'",
")",
"if",
"url",
"[",
":",
"2",
"]",
"==",
"'//'",
"and",
"url",
"[",
"2",
":",
"3",
"]",
"!=",
"'/'",
"and",
"url",
"[",
"2",
":",
"12",
"]",
".",
"lower",
"(",
")",
"!=",
"'localhost/'",
":",
"return",
"self",
".",
"open_ftp",
"(",
"url",
")",
"else",
":",
"return",
"self",
".",
"open_local_file",
"(",
"url",
")"
] |
https://github.com/BlzFans/wke/blob/b0fa21158312e40c5fbd84682d643022b6c34a93/cygwin/lib/python2.6/urllib.py#L460-L467
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/windows/Lib/site-packages/setuptools/msvc.py
|
python
|
EnvironmentInfo.VCTools
|
(self)
|
return tools
|
Microsoft Visual C++ Tools.
Return
------
list of str
paths
|
Microsoft Visual C++ Tools.
|
[
"Microsoft",
"Visual",
"C",
"++",
"Tools",
"."
] |
def VCTools(self):
"""
Microsoft Visual C++ Tools.
Return
------
list of str
paths
"""
si = self.si
tools = [join(si.VCInstallDir, 'VCPackages')]
forcex86 = True if self.vs_ver <= 10.0 else False
arch_subdir = self.pi.cross_dir(forcex86)
if arch_subdir:
tools += [join(si.VCInstallDir, 'Bin%s' % arch_subdir)]
if self.vs_ver == 14.0:
path = 'Bin%s' % self.pi.current_dir(hidex86=True)
tools += [join(si.VCInstallDir, path)]
elif self.vs_ver >= 15.0:
host_dir = (r'bin\HostX86%s' if self.pi.current_is_x86() else
r'bin\HostX64%s')
tools += [join(
si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))]
if self.pi.current_cpu != self.pi.target_cpu:
tools += [join(
si.VCInstallDir, host_dir % self.pi.current_dir(x64=True))]
else:
tools += [join(si.VCInstallDir, 'Bin')]
return tools
|
[
"def",
"VCTools",
"(",
"self",
")",
":",
"si",
"=",
"self",
".",
"si",
"tools",
"=",
"[",
"join",
"(",
"si",
".",
"VCInstallDir",
",",
"'VCPackages'",
")",
"]",
"forcex86",
"=",
"True",
"if",
"self",
".",
"vs_ver",
"<=",
"10.0",
"else",
"False",
"arch_subdir",
"=",
"self",
".",
"pi",
".",
"cross_dir",
"(",
"forcex86",
")",
"if",
"arch_subdir",
":",
"tools",
"+=",
"[",
"join",
"(",
"si",
".",
"VCInstallDir",
",",
"'Bin%s'",
"%",
"arch_subdir",
")",
"]",
"if",
"self",
".",
"vs_ver",
"==",
"14.0",
":",
"path",
"=",
"'Bin%s'",
"%",
"self",
".",
"pi",
".",
"current_dir",
"(",
"hidex86",
"=",
"True",
")",
"tools",
"+=",
"[",
"join",
"(",
"si",
".",
"VCInstallDir",
",",
"path",
")",
"]",
"elif",
"self",
".",
"vs_ver",
">=",
"15.0",
":",
"host_dir",
"=",
"(",
"r'bin\\HostX86%s'",
"if",
"self",
".",
"pi",
".",
"current_is_x86",
"(",
")",
"else",
"r'bin\\HostX64%s'",
")",
"tools",
"+=",
"[",
"join",
"(",
"si",
".",
"VCInstallDir",
",",
"host_dir",
"%",
"self",
".",
"pi",
".",
"target_dir",
"(",
"x64",
"=",
"True",
")",
")",
"]",
"if",
"self",
".",
"pi",
".",
"current_cpu",
"!=",
"self",
".",
"pi",
".",
"target_cpu",
":",
"tools",
"+=",
"[",
"join",
"(",
"si",
".",
"VCInstallDir",
",",
"host_dir",
"%",
"self",
".",
"pi",
".",
"current_dir",
"(",
"x64",
"=",
"True",
")",
")",
"]",
"else",
":",
"tools",
"+=",
"[",
"join",
"(",
"si",
".",
"VCInstallDir",
",",
"'Bin'",
")",
"]",
"return",
"tools"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/setuptools/msvc.py#L1317-L1351
|
|
mongodb/mongo
|
d8ff665343ad29cf286ee2cf4a1960d29371937b
|
buildscripts/resmokelib/logging/loggers.py
|
python
|
new_test_thread_logger
|
(parent, test_kind, thread_id)
|
return logger
|
Create a new test thread logger that will be the child of the given parent.
|
Create a new test thread logger that will be the child of the given parent.
|
[
"Create",
"a",
"new",
"test",
"thread",
"logger",
"that",
"will",
"be",
"the",
"child",
"of",
"the",
"given",
"parent",
"."
] |
def new_test_thread_logger(parent, test_kind, thread_id):
"""Create a new test thread logger that will be the child of the given parent."""
name = "%s:%s" % (test_kind, thread_id)
logger = logging.Logger(name)
logger.parent = parent
return logger
|
[
"def",
"new_test_thread_logger",
"(",
"parent",
",",
"test_kind",
",",
"thread_id",
")",
":",
"name",
"=",
"\"%s:%s\"",
"%",
"(",
"test_kind",
",",
"thread_id",
")",
"logger",
"=",
"logging",
".",
"Logger",
"(",
"name",
")",
"logger",
".",
"parent",
"=",
"parent",
"return",
"logger"
] |
https://github.com/mongodb/mongo/blob/d8ff665343ad29cf286ee2cf4a1960d29371937b/buildscripts/resmokelib/logging/loggers.py#L236-L241
|
|
raspberrypi/tools
|
13474ee775d0c5ec8a7da4fb0a9fa84187abfc87
|
arm-bcm2708/arm-rpi-4.9.3-linux-gnueabihf/share/gdb/python/gdb/command/pretty_printers.py
|
python
|
do_enable_pretty_printer_1
|
(pretty_printers, name_re, subname_re, flag)
|
return total
|
Worker for enabling/disabling pretty-printers.
Arguments:
pretty_printers: list of pretty-printers
name_re: regular-expression object to select printers
subname_re: regular expression object to select subprinters or None
if all are affected
flag: True for Enable, False for Disable
Returns:
The number of printers affected.
This is just for informational purposes for the user.
|
Worker for enabling/disabling pretty-printers.
|
[
"Worker",
"for",
"enabling",
"/",
"disabling",
"pretty",
"-",
"printers",
"."
] |
def do_enable_pretty_printer_1 (pretty_printers, name_re, subname_re, flag):
"""Worker for enabling/disabling pretty-printers.
Arguments:
pretty_printers: list of pretty-printers
name_re: regular-expression object to select printers
subname_re: regular expression object to select subprinters or None
if all are affected
flag: True for Enable, False for Disable
Returns:
The number of printers affected.
This is just for informational purposes for the user.
"""
total = 0
for printer in pretty_printers:
if (hasattr(printer, "name") and name_re.match(printer.name) or
hasattr(printer, "__name__") and name_re.match(printer.__name__)):
if (hasattr(printer, "subprinters") and
printer.subprinters is not None):
if not subname_re:
# Only record printers that change state.
if printer_enabled_p(printer) != flag:
for subprinter in printer.subprinters:
if printer_enabled_p(subprinter):
total += 1
# NOTE: We preserve individual subprinter settings.
printer.enabled = flag
else:
# NOTE: Whether this actually disables the subprinter
# depends on whether the printer's lookup function supports
# the "enable" API. We can only assume it does.
for subprinter in printer.subprinters:
if subname_re.match(subprinter.name):
# Only record printers that change state.
if (printer_enabled_p(printer) and
printer_enabled_p(subprinter) != flag):
total += 1
subprinter.enabled = flag
else:
# This printer has no subprinters.
# If the user does "disable pretty-printer .* .* foo"
# should we disable printers that don't have subprinters?
# How do we apply "foo" in this context? Since there is no
# "foo" subprinter it feels like we should skip this printer.
# There's still the issue of how to handle
# "disable pretty-printer .* .* .*", and every other variation
# that can match everything. For now punt and only support
# "disable pretty-printer .* .*" (i.e. subname is elided)
# to disable everything.
if not subname_re:
# Only record printers that change state.
if printer_enabled_p(printer) != flag:
total += 1
printer.enabled = flag
return total
|
[
"def",
"do_enable_pretty_printer_1",
"(",
"pretty_printers",
",",
"name_re",
",",
"subname_re",
",",
"flag",
")",
":",
"total",
"=",
"0",
"for",
"printer",
"in",
"pretty_printers",
":",
"if",
"(",
"hasattr",
"(",
"printer",
",",
"\"name\"",
")",
"and",
"name_re",
".",
"match",
"(",
"printer",
".",
"name",
")",
"or",
"hasattr",
"(",
"printer",
",",
"\"__name__\"",
")",
"and",
"name_re",
".",
"match",
"(",
"printer",
".",
"__name__",
")",
")",
":",
"if",
"(",
"hasattr",
"(",
"printer",
",",
"\"subprinters\"",
")",
"and",
"printer",
".",
"subprinters",
"is",
"not",
"None",
")",
":",
"if",
"not",
"subname_re",
":",
"# Only record printers that change state.",
"if",
"printer_enabled_p",
"(",
"printer",
")",
"!=",
"flag",
":",
"for",
"subprinter",
"in",
"printer",
".",
"subprinters",
":",
"if",
"printer_enabled_p",
"(",
"subprinter",
")",
":",
"total",
"+=",
"1",
"# NOTE: We preserve individual subprinter settings.",
"printer",
".",
"enabled",
"=",
"flag",
"else",
":",
"# NOTE: Whether this actually disables the subprinter",
"# depends on whether the printer's lookup function supports",
"# the \"enable\" API. We can only assume it does.",
"for",
"subprinter",
"in",
"printer",
".",
"subprinters",
":",
"if",
"subname_re",
".",
"match",
"(",
"subprinter",
".",
"name",
")",
":",
"# Only record printers that change state.",
"if",
"(",
"printer_enabled_p",
"(",
"printer",
")",
"and",
"printer_enabled_p",
"(",
"subprinter",
")",
"!=",
"flag",
")",
":",
"total",
"+=",
"1",
"subprinter",
".",
"enabled",
"=",
"flag",
"else",
":",
"# This printer has no subprinters.",
"# If the user does \"disable pretty-printer .* .* foo\"",
"# should we disable printers that don't have subprinters?",
"# How do we apply \"foo\" in this context? Since there is no",
"# \"foo\" subprinter it feels like we should skip this printer.",
"# There's still the issue of how to handle",
"# \"disable pretty-printer .* .* .*\", and every other variation",
"# that can match everything. For now punt and only support",
"# \"disable pretty-printer .* .*\" (i.e. subname is elided)",
"# to disable everything.",
"if",
"not",
"subname_re",
":",
"# Only record printers that change state.",
"if",
"printer_enabled_p",
"(",
"printer",
")",
"!=",
"flag",
":",
"total",
"+=",
"1",
"printer",
".",
"enabled",
"=",
"flag",
"return",
"total"
] |
https://github.com/raspberrypi/tools/blob/13474ee775d0c5ec8a7da4fb0a9fa84187abfc87/arm-bcm2708/arm-rpi-4.9.3-linux-gnueabihf/share/gdb/python/gdb/command/pretty_printers.py#L221-L276
|
|
CaoWGG/TensorRT-CenterNet
|
f949252e37b51e60f873808f46d3683f15735e79
|
onnx-tensorrt/third_party/onnx/onnx/helper.py
|
python
|
make_node
|
(
op_type, # type: Text
inputs, # type: Sequence[Text]
outputs, # type: Sequence[Text]
name=None, # type: Optional[Text]
doc_string=None, # type: Optional[Text]
domain=None, # type: Optional[Text]
**kwargs # type: Any
)
|
return node
|
Construct a NodeProto.
Arguments:
op_type (string): The name of the operator to construct
inputs (list of string): list of input names
outputs (list of string): list of output names
name (string, default None): optional unique identifier for NodeProto
doc_string (string, default None): optional documentation string for NodeProto
domain (string, default None): optional domain for NodeProto.
If it's None, we will just use default domain (which is empty)
**kwargs (dict): the attributes of the node. The acceptable values
are documented in :func:`make_attribute`.
|
Construct a NodeProto.
|
[
"Construct",
"a",
"NodeProto",
"."
] |
def make_node(
op_type, # type: Text
inputs, # type: Sequence[Text]
outputs, # type: Sequence[Text]
name=None, # type: Optional[Text]
doc_string=None, # type: Optional[Text]
domain=None, # type: Optional[Text]
**kwargs # type: Any
): # type: (...) -> NodeProto
"""Construct a NodeProto.
Arguments:
op_type (string): The name of the operator to construct
inputs (list of string): list of input names
outputs (list of string): list of output names
name (string, default None): optional unique identifier for NodeProto
doc_string (string, default None): optional documentation string for NodeProto
domain (string, default None): optional domain for NodeProto.
If it's None, we will just use default domain (which is empty)
**kwargs (dict): the attributes of the node. The acceptable values
are documented in :func:`make_attribute`.
"""
node = NodeProto()
node.op_type = op_type
node.input.extend(inputs)
node.output.extend(outputs)
if name:
node.name = name
if doc_string:
node.doc_string = doc_string
if domain is not None:
node.domain = domain
if kwargs:
node.attribute.extend(
make_attribute(key, value)
for key, value in sorted(kwargs.items()))
return node
|
[
"def",
"make_node",
"(",
"op_type",
",",
"# type: Text",
"inputs",
",",
"# type: Sequence[Text]",
"outputs",
",",
"# type: Sequence[Text]",
"name",
"=",
"None",
",",
"# type: Optional[Text]",
"doc_string",
"=",
"None",
",",
"# type: Optional[Text]",
"domain",
"=",
"None",
",",
"# type: Optional[Text]",
"*",
"*",
"kwargs",
"# type: Any",
")",
":",
"# type: (...) -> NodeProto",
"node",
"=",
"NodeProto",
"(",
")",
"node",
".",
"op_type",
"=",
"op_type",
"node",
".",
"input",
".",
"extend",
"(",
"inputs",
")",
"node",
".",
"output",
".",
"extend",
"(",
"outputs",
")",
"if",
"name",
":",
"node",
".",
"name",
"=",
"name",
"if",
"doc_string",
":",
"node",
".",
"doc_string",
"=",
"doc_string",
"if",
"domain",
"is",
"not",
"None",
":",
"node",
".",
"domain",
"=",
"domain",
"if",
"kwargs",
":",
"node",
".",
"attribute",
".",
"extend",
"(",
"make_attribute",
"(",
"key",
",",
"value",
")",
"for",
"key",
",",
"value",
"in",
"sorted",
"(",
"kwargs",
".",
"items",
"(",
")",
")",
")",
"return",
"node"
] |
https://github.com/CaoWGG/TensorRT-CenterNet/blob/f949252e37b51e60f873808f46d3683f15735e79/onnx-tensorrt/third_party/onnx/onnx/helper.py#L20-L57
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/scikit-learn/py2/sklearn/cluster/mean_shift_.py
|
python
|
estimate_bandwidth
|
(X, quantile=0.3, n_samples=None, random_state=0,
n_jobs=1)
|
return bandwidth / X.shape[0]
|
Estimate the bandwidth to use with the mean-shift algorithm.
That this function takes time at least quadratic in n_samples. For large
datasets, it's wise to set that parameter to a small value.
Parameters
----------
X : array-like, shape=[n_samples, n_features]
Input points.
quantile : float, default 0.3
should be between [0, 1]
0.5 means that the median of all pairwise distances is used.
n_samples : int, optional
The number of samples to use. If not given, all samples are used.
random_state : int or RandomState
Pseudo-random number generator state used for random sampling.
n_jobs : int, optional (default = 1)
The number of parallel jobs to run for neighbors search.
If ``-1``, then the number of jobs is set to the number of CPU cores.
Returns
-------
bandwidth : float
The bandwidth parameter.
|
Estimate the bandwidth to use with the mean-shift algorithm.
|
[
"Estimate",
"the",
"bandwidth",
"to",
"use",
"with",
"the",
"mean",
"-",
"shift",
"algorithm",
"."
] |
def estimate_bandwidth(X, quantile=0.3, n_samples=None, random_state=0,
n_jobs=1):
"""Estimate the bandwidth to use with the mean-shift algorithm.
That this function takes time at least quadratic in n_samples. For large
datasets, it's wise to set that parameter to a small value.
Parameters
----------
X : array-like, shape=[n_samples, n_features]
Input points.
quantile : float, default 0.3
should be between [0, 1]
0.5 means that the median of all pairwise distances is used.
n_samples : int, optional
The number of samples to use. If not given, all samples are used.
random_state : int or RandomState
Pseudo-random number generator state used for random sampling.
n_jobs : int, optional (default = 1)
The number of parallel jobs to run for neighbors search.
If ``-1``, then the number of jobs is set to the number of CPU cores.
Returns
-------
bandwidth : float
The bandwidth parameter.
"""
random_state = check_random_state(random_state)
if n_samples is not None:
idx = random_state.permutation(X.shape[0])[:n_samples]
X = X[idx]
nbrs = NearestNeighbors(n_neighbors=int(X.shape[0] * quantile),
n_jobs=n_jobs)
nbrs.fit(X)
bandwidth = 0.
for batch in gen_batches(len(X), 500):
d, _ = nbrs.kneighbors(X[batch, :], return_distance=True)
bandwidth += np.max(d, axis=1).sum()
return bandwidth / X.shape[0]
|
[
"def",
"estimate_bandwidth",
"(",
"X",
",",
"quantile",
"=",
"0.3",
",",
"n_samples",
"=",
"None",
",",
"random_state",
"=",
"0",
",",
"n_jobs",
"=",
"1",
")",
":",
"random_state",
"=",
"check_random_state",
"(",
"random_state",
")",
"if",
"n_samples",
"is",
"not",
"None",
":",
"idx",
"=",
"random_state",
".",
"permutation",
"(",
"X",
".",
"shape",
"[",
"0",
"]",
")",
"[",
":",
"n_samples",
"]",
"X",
"=",
"X",
"[",
"idx",
"]",
"nbrs",
"=",
"NearestNeighbors",
"(",
"n_neighbors",
"=",
"int",
"(",
"X",
".",
"shape",
"[",
"0",
"]",
"*",
"quantile",
")",
",",
"n_jobs",
"=",
"n_jobs",
")",
"nbrs",
".",
"fit",
"(",
"X",
")",
"bandwidth",
"=",
"0.",
"for",
"batch",
"in",
"gen_batches",
"(",
"len",
"(",
"X",
")",
",",
"500",
")",
":",
"d",
",",
"_",
"=",
"nbrs",
".",
"kneighbors",
"(",
"X",
"[",
"batch",
",",
":",
"]",
",",
"return_distance",
"=",
"True",
")",
"bandwidth",
"+=",
"np",
".",
"max",
"(",
"d",
",",
"axis",
"=",
"1",
")",
".",
"sum",
"(",
")",
"return",
"bandwidth",
"/",
"X",
".",
"shape",
"[",
"0",
"]"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scikit-learn/py2/sklearn/cluster/mean_shift_.py#L31-L75
|
|
yyzybb537/libgo
|
4af17b7c67643c4d54aa354dcc77963ea07847d0
|
third_party/boost.context/tools/build/src/build/virtual_target.py
|
python
|
VirtualTarget.project
|
(self)
|
return self.project_
|
Project of this target.
|
Project of this target.
|
[
"Project",
"of",
"this",
"target",
"."
] |
def project (self):
""" Project of this target.
"""
return self.project_
|
[
"def",
"project",
"(",
"self",
")",
":",
"return",
"self",
".",
"project_"
] |
https://github.com/yyzybb537/libgo/blob/4af17b7c67643c4d54aa354dcc77963ea07847d0/third_party/boost.context/tools/build/src/build/virtual_target.py#L291-L294
|
|
ricardoquesada/Spidermonkey
|
4a75ea2543408bd1b2c515aa95901523eeef7858
|
build/checksums.py
|
python
|
main
|
()
|
This is a main function that parses arguments, sets up logging
and generates a checksum file
|
This is a main function that parses arguments, sets up logging
and generates a checksum file
|
[
"This",
"is",
"a",
"main",
"function",
"that",
"parses",
"arguments",
"sets",
"up",
"logging",
"and",
"generates",
"a",
"checksum",
"file"
] |
def main():
'''This is a main function that parses arguments, sets up logging
and generates a checksum file'''
# Parse command line arguments
parser = OptionParser()
parser.add_option('-d', '--digest', help='checksum algorithm to use',
action='append', dest='digests')
parser.add_option('-o', '--output', help='output file to use',
action='store', dest='outfile', default='checksums')
parser.add_option('-v', '--verbose',
help='Be noisy (takes precedence over quiet)',
action='store_true', dest='verbose', default=False)
parser.add_option('-q', '--quiet', help='Be quiet', action='store_true',
dest='quiet', default=False)
parser.add_option('-s', '--strip',
help='strip this path from the filenames',
dest='strip', default=os.getcwd())
options, args = parser.parse_args()
#Figure out which logging level to use
if options.verbose:
loglevel = logging.DEBUG
elif options.quiet:
loglevel = logging.ERROR
else:
loglevel = logging.INFO
#Set up logging
setup_logging(loglevel)
logger = logging.getLogger('checksums.py')
# Validate the digest type to use
if not options.digests:
options.digests = ['sha1']
try:
for digest in options.digests:
hashlib.new(digest)
except ValueError, ve:
logger.error('Could not create a "%s" hash object (%s)' %
(digest, ve.args[0]))
exit(1)
# Validate the files to checksum
files = []
for i in args:
if os.path.exists(i):
files.append(i)
else:
logger.info('File "%s" was not found on the filesystem' % i)
process_files(files, options.outfile, options.digests, options.strip)
|
[
"def",
"main",
"(",
")",
":",
"# Parse command line arguments",
"parser",
"=",
"OptionParser",
"(",
")",
"parser",
".",
"add_option",
"(",
"'-d'",
",",
"'--digest'",
",",
"help",
"=",
"'checksum algorithm to use'",
",",
"action",
"=",
"'append'",
",",
"dest",
"=",
"'digests'",
")",
"parser",
".",
"add_option",
"(",
"'-o'",
",",
"'--output'",
",",
"help",
"=",
"'output file to use'",
",",
"action",
"=",
"'store'",
",",
"dest",
"=",
"'outfile'",
",",
"default",
"=",
"'checksums'",
")",
"parser",
".",
"add_option",
"(",
"'-v'",
",",
"'--verbose'",
",",
"help",
"=",
"'Be noisy (takes precedence over quiet)'",
",",
"action",
"=",
"'store_true'",
",",
"dest",
"=",
"'verbose'",
",",
"default",
"=",
"False",
")",
"parser",
".",
"add_option",
"(",
"'-q'",
",",
"'--quiet'",
",",
"help",
"=",
"'Be quiet'",
",",
"action",
"=",
"'store_true'",
",",
"dest",
"=",
"'quiet'",
",",
"default",
"=",
"False",
")",
"parser",
".",
"add_option",
"(",
"'-s'",
",",
"'--strip'",
",",
"help",
"=",
"'strip this path from the filenames'",
",",
"dest",
"=",
"'strip'",
",",
"default",
"=",
"os",
".",
"getcwd",
"(",
")",
")",
"options",
",",
"args",
"=",
"parser",
".",
"parse_args",
"(",
")",
"#Figure out which logging level to use",
"if",
"options",
".",
"verbose",
":",
"loglevel",
"=",
"logging",
".",
"DEBUG",
"elif",
"options",
".",
"quiet",
":",
"loglevel",
"=",
"logging",
".",
"ERROR",
"else",
":",
"loglevel",
"=",
"logging",
".",
"INFO",
"#Set up logging",
"setup_logging",
"(",
"loglevel",
")",
"logger",
"=",
"logging",
".",
"getLogger",
"(",
"'checksums.py'",
")",
"# Validate the digest type to use",
"if",
"not",
"options",
".",
"digests",
":",
"options",
".",
"digests",
"=",
"[",
"'sha1'",
"]",
"try",
":",
"for",
"digest",
"in",
"options",
".",
"digests",
":",
"hashlib",
".",
"new",
"(",
"digest",
")",
"except",
"ValueError",
",",
"ve",
":",
"logger",
".",
"error",
"(",
"'Could not create a \"%s\" hash object (%s)'",
"%",
"(",
"digest",
",",
"ve",
".",
"args",
"[",
"0",
"]",
")",
")",
"exit",
"(",
"1",
")",
"# Validate the files to checksum",
"files",
"=",
"[",
"]",
"for",
"i",
"in",
"args",
":",
"if",
"os",
".",
"path",
".",
"exists",
"(",
"i",
")",
":",
"files",
".",
"append",
"(",
"i",
")",
"else",
":",
"logger",
".",
"info",
"(",
"'File \"%s\" was not found on the filesystem'",
"%",
"i",
")",
"process_files",
"(",
"files",
",",
"options",
".",
"outfile",
",",
"options",
".",
"digests",
",",
"options",
".",
"strip",
")"
] |
https://github.com/ricardoquesada/Spidermonkey/blob/4a75ea2543408bd1b2c515aa95901523eeef7858/build/checksums.py#L106-L155
|
||
cartographer-project/cartographer
|
b8228ee6564f5a7ad0d6d0b9a30516521cff2ee9
|
scripts/update_configuration_doc.py
|
python
|
ParseProtoFilesRecursively
|
(root)
|
return message_list
|
Recursively parses all proto files into a list of Message objects.
|
Recursively parses all proto files into a list of Message objects.
|
[
"Recursively",
"parses",
"all",
"proto",
"files",
"into",
"a",
"list",
"of",
"Message",
"objects",
"."
] |
def ParseProtoFilesRecursively(root):
"""Recursively parses all proto files into a list of Message objects."""
message_list = []
for dirpath, dirnames, filenames in os.walk(root):
for name in filenames:
if name.endswith('.proto'):
path = os.path.join(dirpath, name)
print("Found '%s'..." % path)
assert not os.path.islink(path)
message_list.extend(ParseProtoFile(io.open(path, encoding='UTF-8')))
return message_list
|
[
"def",
"ParseProtoFilesRecursively",
"(",
"root",
")",
":",
"message_list",
"=",
"[",
"]",
"for",
"dirpath",
",",
"dirnames",
",",
"filenames",
"in",
"os",
".",
"walk",
"(",
"root",
")",
":",
"for",
"name",
"in",
"filenames",
":",
"if",
"name",
".",
"endswith",
"(",
"'.proto'",
")",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dirpath",
",",
"name",
")",
"print",
"(",
"\"Found '%s'...\"",
"%",
"path",
")",
"assert",
"not",
"os",
".",
"path",
".",
"islink",
"(",
"path",
")",
"message_list",
".",
"extend",
"(",
"ParseProtoFile",
"(",
"io",
".",
"open",
"(",
"path",
",",
"encoding",
"=",
"'UTF-8'",
")",
")",
")",
"return",
"message_list"
] |
https://github.com/cartographer-project/cartographer/blob/b8228ee6564f5a7ad0d6d0b9a30516521cff2ee9/scripts/update_configuration_doc.py#L136-L146
|
|
grpc/grpc
|
27bc6fe7797e43298dc931b96dc57322d0852a9f
|
src/python/grpcio/grpc/experimental/__init__.py
|
python
|
wrap_server_method_handler
|
(wrapper, handler)
|
Wraps the server method handler function.
The server implementation requires all server handlers being wrapped as
RpcMethodHandler objects. This helper function ease the pain of writing
server handler wrappers.
Args:
wrapper: A wrapper function that takes in a method handler behavior
(the actual function) and returns a wrapped function.
handler: A RpcMethodHandler object to be wrapped.
Returns:
A newly created RpcMethodHandler.
|
Wraps the server method handler function.
|
[
"Wraps",
"the",
"server",
"method",
"handler",
"function",
"."
] |
def wrap_server_method_handler(wrapper, handler):
"""Wraps the server method handler function.
The server implementation requires all server handlers being wrapped as
RpcMethodHandler objects. This helper function ease the pain of writing
server handler wrappers.
Args:
wrapper: A wrapper function that takes in a method handler behavior
(the actual function) and returns a wrapped function.
handler: A RpcMethodHandler object to be wrapped.
Returns:
A newly created RpcMethodHandler.
"""
if not handler:
return None
if not handler.request_streaming:
if not handler.response_streaming:
# NOTE(lidiz) _replace is a public API:
# https://docs.python.org/dev/library/collections.html
return handler._replace(unary_unary=wrapper(handler.unary_unary))
else:
return handler._replace(unary_stream=wrapper(handler.unary_stream))
else:
if not handler.response_streaming:
return handler._replace(stream_unary=wrapper(handler.stream_unary))
else:
return handler._replace(
stream_stream=wrapper(handler.stream_stream))
|
[
"def",
"wrap_server_method_handler",
"(",
"wrapper",
",",
"handler",
")",
":",
"if",
"not",
"handler",
":",
"return",
"None",
"if",
"not",
"handler",
".",
"request_streaming",
":",
"if",
"not",
"handler",
".",
"response_streaming",
":",
"# NOTE(lidiz) _replace is a public API:",
"# https://docs.python.org/dev/library/collections.html",
"return",
"handler",
".",
"_replace",
"(",
"unary_unary",
"=",
"wrapper",
"(",
"handler",
".",
"unary_unary",
")",
")",
"else",
":",
"return",
"handler",
".",
"_replace",
"(",
"unary_stream",
"=",
"wrapper",
"(",
"handler",
".",
"unary_stream",
")",
")",
"else",
":",
"if",
"not",
"handler",
".",
"response_streaming",
":",
"return",
"handler",
".",
"_replace",
"(",
"stream_unary",
"=",
"wrapper",
"(",
"handler",
".",
"stream_unary",
")",
")",
"else",
":",
"return",
"handler",
".",
"_replace",
"(",
"stream_stream",
"=",
"wrapper",
"(",
"handler",
".",
"stream_stream",
")",
")"
] |
https://github.com/grpc/grpc/blob/27bc6fe7797e43298dc931b96dc57322d0852a9f/src/python/grpcio/grpc/experimental/__init__.py#L82-L112
|
||
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/AWSPythonSDK/1.5.8/docutils/nodes.py
|
python
|
Element.copy_attr_convert
|
(self, attr, value, replace = True)
|
If attr is an attribute of self, set self[attr] to
[self[attr], value], otherwise set self[attr] to value.
NOTE: replace is not used by this function and is kept only for
compatibility with the other copy functions.
|
If attr is an attribute of self, set self[attr] to
[self[attr], value], otherwise set self[attr] to value.
|
[
"If",
"attr",
"is",
"an",
"attribute",
"of",
"self",
"set",
"self",
"[",
"attr",
"]",
"to",
"[",
"self",
"[",
"attr",
"]",
"value",
"]",
"otherwise",
"set",
"self",
"[",
"attr",
"]",
"to",
"value",
"."
] |
def copy_attr_convert(self, attr, value, replace = True):
"""
If attr is an attribute of self, set self[attr] to
[self[attr], value], otherwise set self[attr] to value.
NOTE: replace is not used by this function and is kept only for
compatibility with the other copy functions.
"""
if self.get(attr) is not value:
self.coerce_append_attr_list(attr, value)
|
[
"def",
"copy_attr_convert",
"(",
"self",
",",
"attr",
",",
"value",
",",
"replace",
"=",
"True",
")",
":",
"if",
"self",
".",
"get",
"(",
"attr",
")",
"is",
"not",
"value",
":",
"self",
".",
"coerce_append_attr_list",
"(",
"attr",
",",
"value",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/AWSPythonSDK/1.5.8/docutils/nodes.py#L744-L753
|
||
ChromiumWebApps/chromium
|
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
|
tools/metrics/histograms/update_policies.py
|
python
|
UpdateHistogramDefinitions
|
(policy_templates, doc)
|
Sets the children of <enum name="EnterprisePolicies" ...> node in |doc| to
values generated from policy ids contained in |policy_templates|.
Args:
policy_templates: A list of dictionaries, defining policies or policy
groups. The format is exactly the same as in
policy_templates.json file.
doc: A minidom.Document object representing parsed histogram definitions
XML file.
|
Sets the children of <enum name="EnterprisePolicies" ...> node in |doc| to
values generated from policy ids contained in |policy_templates|.
|
[
"Sets",
"the",
"children",
"of",
"<enum",
"name",
"=",
"EnterprisePolicies",
"...",
">",
"node",
"in",
"|doc|",
"to",
"values",
"generated",
"from",
"policy",
"ids",
"contained",
"in",
"|policy_templates|",
"."
] |
def UpdateHistogramDefinitions(policy_templates, doc):
"""Sets the children of <enum name="EnterprisePolicies" ...> node in |doc| to
values generated from policy ids contained in |policy_templates|.
Args:
policy_templates: A list of dictionaries, defining policies or policy
groups. The format is exactly the same as in
policy_templates.json file.
doc: A minidom.Document object representing parsed histogram definitions
XML file.
"""
# Find EnterprisePolicies enum.
for enum_node in doc.getElementsByTagName('enum'):
if enum_node.attributes['name'].value == ENUM_NAME:
policy_enum_node = enum_node
break
else:
raise UserError('No policy enum node found')
# Remove existing values.
while policy_enum_node.hasChildNodes():
policy_enum_node.removeChild(policy_enum_node.lastChild)
# Add a "Generated from (...)" comment
comment = ' Generated from {0} '.format(POLICY_TEMPLATES_PATH)
policy_enum_node.appendChild(doc.createComment(comment))
# Add values generated from policy templates.
ordered_policies = []
FlattenPolicies(policy_templates['policy_definitions'], ordered_policies)
ordered_policies.sort(key=lambda policy: policy['id'])
for policy in ordered_policies:
node = doc.createElement('int')
node.attributes['value'] = str(policy['id'])
node.attributes['label'] = ParsePlaceholders(policy['caption'])
policy_enum_node.appendChild(node)
|
[
"def",
"UpdateHistogramDefinitions",
"(",
"policy_templates",
",",
"doc",
")",
":",
"# Find EnterprisePolicies enum.",
"for",
"enum_node",
"in",
"doc",
".",
"getElementsByTagName",
"(",
"'enum'",
")",
":",
"if",
"enum_node",
".",
"attributes",
"[",
"'name'",
"]",
".",
"value",
"==",
"ENUM_NAME",
":",
"policy_enum_node",
"=",
"enum_node",
"break",
"else",
":",
"raise",
"UserError",
"(",
"'No policy enum node found'",
")",
"# Remove existing values.",
"while",
"policy_enum_node",
".",
"hasChildNodes",
"(",
")",
":",
"policy_enum_node",
".",
"removeChild",
"(",
"policy_enum_node",
".",
"lastChild",
")",
"# Add a \"Generated from (...)\" comment",
"comment",
"=",
"' Generated from {0} '",
".",
"format",
"(",
"POLICY_TEMPLATES_PATH",
")",
"policy_enum_node",
".",
"appendChild",
"(",
"doc",
".",
"createComment",
"(",
"comment",
")",
")",
"# Add values generated from policy templates.",
"ordered_policies",
"=",
"[",
"]",
"FlattenPolicies",
"(",
"policy_templates",
"[",
"'policy_definitions'",
"]",
",",
"ordered_policies",
")",
"ordered_policies",
".",
"sort",
"(",
"key",
"=",
"lambda",
"policy",
":",
"policy",
"[",
"'id'",
"]",
")",
"for",
"policy",
"in",
"ordered_policies",
":",
"node",
"=",
"doc",
".",
"createElement",
"(",
"'int'",
")",
"node",
".",
"attributes",
"[",
"'value'",
"]",
"=",
"str",
"(",
"policy",
"[",
"'id'",
"]",
")",
"node",
".",
"attributes",
"[",
"'label'",
"]",
"=",
"ParsePlaceholders",
"(",
"policy",
"[",
"'caption'",
"]",
")",
"policy_enum_node",
".",
"appendChild",
"(",
"node",
")"
] |
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/tools/metrics/histograms/update_policies.py#L69-L104
|
||
yun-liu/RCF
|
91bfb054ad04187dbbe21e539e165ad9bd3ff00b
|
scripts/cpp_lint.py
|
python
|
CheckForNonConstReference
|
(filename, clean_lines, linenum,
nesting_state, error)
|
Check for non-const references.
Separate from CheckLanguage since it scans backwards from current
line, instead of scanning forward.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
nesting_state: A _NestingState instance which maintains information about
the current stack of nested blocks being parsed.
error: The function to call with any errors found.
|
Check for non-const references.
|
[
"Check",
"for",
"non",
"-",
"const",
"references",
"."
] |
def CheckForNonConstReference(filename, clean_lines, linenum,
nesting_state, error):
"""Check for non-const references.
Separate from CheckLanguage since it scans backwards from current
line, instead of scanning forward.
Args:
filename: The name of the current file.
clean_lines: A CleansedLines instance containing the file.
linenum: The number of the line to check.
nesting_state: A _NestingState instance which maintains information about
the current stack of nested blocks being parsed.
error: The function to call with any errors found.
"""
# Do nothing if there is no '&' on current line.
line = clean_lines.elided[linenum]
if '&' not in line:
return
# Long type names may be broken across multiple lines, usually in one
# of these forms:
# LongType
# ::LongTypeContinued &identifier
# LongType::
# LongTypeContinued &identifier
# LongType<
# ...>::LongTypeContinued &identifier
#
# If we detected a type split across two lines, join the previous
# line to current line so that we can match const references
# accordingly.
#
# Note that this only scans back one line, since scanning back
# arbitrary number of lines would be expensive. If you have a type
# that spans more than 2 lines, please use a typedef.
if linenum > 1:
previous = None
if Match(r'\s*::(?:[\w<>]|::)+\s*&\s*\S', line):
# previous_line\n + ::current_line
previous = Search(r'\b((?:const\s*)?(?:[\w<>]|::)+[\w<>])\s*$',
clean_lines.elided[linenum - 1])
elif Match(r'\s*[a-zA-Z_]([\w<>]|::)+\s*&\s*\S', line):
# previous_line::\n + current_line
previous = Search(r'\b((?:const\s*)?(?:[\w<>]|::)+::)\s*$',
clean_lines.elided[linenum - 1])
if previous:
line = previous.group(1) + line.lstrip()
else:
# Check for templated parameter that is split across multiple lines
endpos = line.rfind('>')
if endpos > -1:
(_, startline, startpos) = ReverseCloseExpression(
clean_lines, linenum, endpos)
if startpos > -1 and startline < linenum:
# Found the matching < on an earlier line, collect all
# pieces up to current line.
line = ''
for i in xrange(startline, linenum + 1):
line += clean_lines.elided[i].strip()
# Check for non-const references in function parameters. A single '&' may
# found in the following places:
# inside expression: binary & for bitwise AND
# inside expression: unary & for taking the address of something
# inside declarators: reference parameter
# We will exclude the first two cases by checking that we are not inside a
# function body, including one that was just introduced by a trailing '{'.
# TODO(unknwon): Doesn't account for preprocessor directives.
# TODO(unknown): Doesn't account for 'catch(Exception& e)' [rare].
check_params = False
if not nesting_state.stack:
check_params = True # top level
elif (isinstance(nesting_state.stack[-1], _ClassInfo) or
isinstance(nesting_state.stack[-1], _NamespaceInfo)):
check_params = True # within class or namespace
elif Match(r'.*{\s*$', line):
if (len(nesting_state.stack) == 1 or
isinstance(nesting_state.stack[-2], _ClassInfo) or
isinstance(nesting_state.stack[-2], _NamespaceInfo)):
check_params = True # just opened global/class/namespace block
# We allow non-const references in a few standard places, like functions
# called "swap()" or iostream operators like "<<" or ">>". Do not check
# those function parameters.
#
# We also accept & in static_assert, which looks like a function but
# it's actually a declaration expression.
whitelisted_functions = (r'(?:[sS]wap(?:<\w:+>)?|'
r'operator\s*[<>][<>]|'
r'static_assert|COMPILE_ASSERT'
r')\s*\(')
if Search(whitelisted_functions, line):
check_params = False
elif not Search(r'\S+\([^)]*$', line):
# Don't see a whitelisted function on this line. Actually we
# didn't see any function name on this line, so this is likely a
# multi-line parameter list. Try a bit harder to catch this case.
for i in xrange(2):
if (linenum > i and
Search(whitelisted_functions, clean_lines.elided[linenum - i - 1])):
check_params = False
break
if check_params:
decls = ReplaceAll(r'{[^}]*}', ' ', line) # exclude function body
for parameter in re.findall(_RE_PATTERN_REF_PARAM, decls):
if not Match(_RE_PATTERN_CONST_REF_PARAM, parameter):
error(filename, linenum, 'runtime/references', 2,
'Is this a non-const reference? '
'If so, make const or use a pointer: ' +
ReplaceAll(' *<', '<', parameter))
|
[
"def",
"CheckForNonConstReference",
"(",
"filename",
",",
"clean_lines",
",",
"linenum",
",",
"nesting_state",
",",
"error",
")",
":",
"# Do nothing if there is no '&' on current line.",
"line",
"=",
"clean_lines",
".",
"elided",
"[",
"linenum",
"]",
"if",
"'&'",
"not",
"in",
"line",
":",
"return",
"# Long type names may be broken across multiple lines, usually in one",
"# of these forms:",
"# LongType",
"# ::LongTypeContinued &identifier",
"# LongType::",
"# LongTypeContinued &identifier",
"# LongType<",
"# ...>::LongTypeContinued &identifier",
"#",
"# If we detected a type split across two lines, join the previous",
"# line to current line so that we can match const references",
"# accordingly.",
"#",
"# Note that this only scans back one line, since scanning back",
"# arbitrary number of lines would be expensive. If you have a type",
"# that spans more than 2 lines, please use a typedef.",
"if",
"linenum",
">",
"1",
":",
"previous",
"=",
"None",
"if",
"Match",
"(",
"r'\\s*::(?:[\\w<>]|::)+\\s*&\\s*\\S'",
",",
"line",
")",
":",
"# previous_line\\n + ::current_line",
"previous",
"=",
"Search",
"(",
"r'\\b((?:const\\s*)?(?:[\\w<>]|::)+[\\w<>])\\s*$'",
",",
"clean_lines",
".",
"elided",
"[",
"linenum",
"-",
"1",
"]",
")",
"elif",
"Match",
"(",
"r'\\s*[a-zA-Z_]([\\w<>]|::)+\\s*&\\s*\\S'",
",",
"line",
")",
":",
"# previous_line::\\n + current_line",
"previous",
"=",
"Search",
"(",
"r'\\b((?:const\\s*)?(?:[\\w<>]|::)+::)\\s*$'",
",",
"clean_lines",
".",
"elided",
"[",
"linenum",
"-",
"1",
"]",
")",
"if",
"previous",
":",
"line",
"=",
"previous",
".",
"group",
"(",
"1",
")",
"+",
"line",
".",
"lstrip",
"(",
")",
"else",
":",
"# Check for templated parameter that is split across multiple lines",
"endpos",
"=",
"line",
".",
"rfind",
"(",
"'>'",
")",
"if",
"endpos",
">",
"-",
"1",
":",
"(",
"_",
",",
"startline",
",",
"startpos",
")",
"=",
"ReverseCloseExpression",
"(",
"clean_lines",
",",
"linenum",
",",
"endpos",
")",
"if",
"startpos",
">",
"-",
"1",
"and",
"startline",
"<",
"linenum",
":",
"# Found the matching < on an earlier line, collect all",
"# pieces up to current line.",
"line",
"=",
"''",
"for",
"i",
"in",
"xrange",
"(",
"startline",
",",
"linenum",
"+",
"1",
")",
":",
"line",
"+=",
"clean_lines",
".",
"elided",
"[",
"i",
"]",
".",
"strip",
"(",
")",
"# Check for non-const references in function parameters. A single '&' may",
"# found in the following places:",
"# inside expression: binary & for bitwise AND",
"# inside expression: unary & for taking the address of something",
"# inside declarators: reference parameter",
"# We will exclude the first two cases by checking that we are not inside a",
"# function body, including one that was just introduced by a trailing '{'.",
"# TODO(unknwon): Doesn't account for preprocessor directives.",
"# TODO(unknown): Doesn't account for 'catch(Exception& e)' [rare].",
"check_params",
"=",
"False",
"if",
"not",
"nesting_state",
".",
"stack",
":",
"check_params",
"=",
"True",
"# top level",
"elif",
"(",
"isinstance",
"(",
"nesting_state",
".",
"stack",
"[",
"-",
"1",
"]",
",",
"_ClassInfo",
")",
"or",
"isinstance",
"(",
"nesting_state",
".",
"stack",
"[",
"-",
"1",
"]",
",",
"_NamespaceInfo",
")",
")",
":",
"check_params",
"=",
"True",
"# within class or namespace",
"elif",
"Match",
"(",
"r'.*{\\s*$'",
",",
"line",
")",
":",
"if",
"(",
"len",
"(",
"nesting_state",
".",
"stack",
")",
"==",
"1",
"or",
"isinstance",
"(",
"nesting_state",
".",
"stack",
"[",
"-",
"2",
"]",
",",
"_ClassInfo",
")",
"or",
"isinstance",
"(",
"nesting_state",
".",
"stack",
"[",
"-",
"2",
"]",
",",
"_NamespaceInfo",
")",
")",
":",
"check_params",
"=",
"True",
"# just opened global/class/namespace block",
"# We allow non-const references in a few standard places, like functions",
"# called \"swap()\" or iostream operators like \"<<\" or \">>\". Do not check",
"# those function parameters.",
"#",
"# We also accept & in static_assert, which looks like a function but",
"# it's actually a declaration expression.",
"whitelisted_functions",
"=",
"(",
"r'(?:[sS]wap(?:<\\w:+>)?|'",
"r'operator\\s*[<>][<>]|'",
"r'static_assert|COMPILE_ASSERT'",
"r')\\s*\\('",
")",
"if",
"Search",
"(",
"whitelisted_functions",
",",
"line",
")",
":",
"check_params",
"=",
"False",
"elif",
"not",
"Search",
"(",
"r'\\S+\\([^)]*$'",
",",
"line",
")",
":",
"# Don't see a whitelisted function on this line. Actually we",
"# didn't see any function name on this line, so this is likely a",
"# multi-line parameter list. Try a bit harder to catch this case.",
"for",
"i",
"in",
"xrange",
"(",
"2",
")",
":",
"if",
"(",
"linenum",
">",
"i",
"and",
"Search",
"(",
"whitelisted_functions",
",",
"clean_lines",
".",
"elided",
"[",
"linenum",
"-",
"i",
"-",
"1",
"]",
")",
")",
":",
"check_params",
"=",
"False",
"break",
"if",
"check_params",
":",
"decls",
"=",
"ReplaceAll",
"(",
"r'{[^}]*}'",
",",
"' '",
",",
"line",
")",
"# exclude function body",
"for",
"parameter",
"in",
"re",
".",
"findall",
"(",
"_RE_PATTERN_REF_PARAM",
",",
"decls",
")",
":",
"if",
"not",
"Match",
"(",
"_RE_PATTERN_CONST_REF_PARAM",
",",
"parameter",
")",
":",
"error",
"(",
"filename",
",",
"linenum",
",",
"'runtime/references'",
",",
"2",
",",
"'Is this a non-const reference? '",
"'If so, make const or use a pointer: '",
"+",
"ReplaceAll",
"(",
"' *<'",
",",
"'<'",
",",
"parameter",
")",
")"
] |
https://github.com/yun-liu/RCF/blob/91bfb054ad04187dbbe21e539e165ad9bd3ff00b/scripts/cpp_lint.py#L4134-L4244
|
||
PaddlePaddle/PaddleOCR
|
b756bf5f8c90142e0d89d3db0163965c686b6ffe
|
ppocr/postprocess/locality_aware_nms.py
|
python
|
nms
|
(S, thres)
|
return keep
|
nms.
|
nms.
|
[
"nms",
"."
] |
def nms(S, thres):
"""
nms.
"""
order = np.argsort(S[:, 8])[::-1]
keep = []
while order.size > 0:
i = order[0]
keep.append(i)
ovr = np.array([intersection(S[i], S[t]) for t in order[1:]])
inds = np.where(ovr <= thres)[0]
order = order[inds + 1]
return keep
|
[
"def",
"nms",
"(",
"S",
",",
"thres",
")",
":",
"order",
"=",
"np",
".",
"argsort",
"(",
"S",
"[",
":",
",",
"8",
"]",
")",
"[",
":",
":",
"-",
"1",
"]",
"keep",
"=",
"[",
"]",
"while",
"order",
".",
"size",
">",
"0",
":",
"i",
"=",
"order",
"[",
"0",
"]",
"keep",
".",
"append",
"(",
"i",
")",
"ovr",
"=",
"np",
".",
"array",
"(",
"[",
"intersection",
"(",
"S",
"[",
"i",
"]",
",",
"S",
"[",
"t",
"]",
")",
"for",
"t",
"in",
"order",
"[",
"1",
":",
"]",
"]",
")",
"inds",
"=",
"np",
".",
"where",
"(",
"ovr",
"<=",
"thres",
")",
"[",
"0",
"]",
"order",
"=",
"order",
"[",
"inds",
"+",
"1",
"]",
"return",
"keep"
] |
https://github.com/PaddlePaddle/PaddleOCR/blob/b756bf5f8c90142e0d89d3db0163965c686b6ffe/ppocr/postprocess/locality_aware_nms.py#L89-L103
|
|
tensorflow/tensorflow
|
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
|
tensorflow/python/ops/variable_scope.py
|
python
|
_maybe_wrap_custom_getter
|
(custom_getter, old_getter)
|
return wrapped_custom_getter
|
Wrap a call to a custom_getter to use the old_getter internally.
|
Wrap a call to a custom_getter to use the old_getter internally.
|
[
"Wrap",
"a",
"call",
"to",
"a",
"custom_getter",
"to",
"use",
"the",
"old_getter",
"internally",
"."
] |
def _maybe_wrap_custom_getter(custom_getter, old_getter):
"""Wrap a call to a custom_getter to use the old_getter internally."""
if old_getter is None:
return custom_getter
# The new custom_getter should call the old one
def wrapped_custom_getter(getter, *args, **kwargs):
# Call:
# custom_getter(
# lambda: old_getter(true_getter, ...), *args, **kwargs)
# which means custom_getter will call old_getter, which
# will call the true_getter, perform any intermediate
# processing, and return the results to the current
# getter, which will also perform additional processing.
return custom_getter(functools.partial(old_getter, getter), *args, **kwargs)
return wrapped_custom_getter
|
[
"def",
"_maybe_wrap_custom_getter",
"(",
"custom_getter",
",",
"old_getter",
")",
":",
"if",
"old_getter",
"is",
"None",
":",
"return",
"custom_getter",
"# The new custom_getter should call the old one",
"def",
"wrapped_custom_getter",
"(",
"getter",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# Call:",
"# custom_getter(",
"# lambda: old_getter(true_getter, ...), *args, **kwargs)",
"# which means custom_getter will call old_getter, which",
"# will call the true_getter, perform any intermediate",
"# processing, and return the results to the current",
"# getter, which will also perform additional processing.",
"return",
"custom_getter",
"(",
"functools",
".",
"partial",
"(",
"old_getter",
",",
"getter",
")",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"return",
"wrapped_custom_getter"
] |
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/variable_scope.py#L2107-L2123
|
|
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/py/py/_path/svnwc.py
|
python
|
SvnWCCommandPath.propget
|
(self, name)
|
return res[:-1]
|
get property name on this path.
|
get property name on this path.
|
[
"get",
"property",
"name",
"on",
"this",
"path",
"."
] |
def propget(self, name):
""" get property name on this path. """
res = self._svn('propget', name)
return res[:-1]
|
[
"def",
"propget",
"(",
"self",
",",
"name",
")",
":",
"res",
"=",
"self",
".",
"_svn",
"(",
"'propget'",
",",
"name",
")",
"return",
"res",
"[",
":",
"-",
"1",
"]"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/py/py/_path/svnwc.py#L708-L711
|
|
BSVino/DoubleAction
|
c550b168a3e919926c198c30240f506538b92e75
|
mp/src/thirdparty/protobuf-2.3.0/python/google/protobuf/text_format.py
|
python
|
_Tokenizer.ConsumeFloat
|
(self)
|
return result
|
Consumes an floating point number.
Returns:
The number parsed.
Raises:
ParseError: If a floating point number couldn't be consumed.
|
Consumes an floating point number.
|
[
"Consumes",
"an",
"floating",
"point",
"number",
"."
] |
def ConsumeFloat(self):
"""Consumes an floating point number.
Returns:
The number parsed.
Raises:
ParseError: If a floating point number couldn't be consumed.
"""
text = self.token
if re.match(self._FLOAT_INFINITY, text):
self.NextToken()
if text.startswith('-'):
return -_INFINITY
return _INFINITY
if re.match(self._FLOAT_NAN, text):
self.NextToken()
return _NAN
try:
result = float(text)
except ValueError, e:
raise self._FloatParseError(e)
self.NextToken()
return result
|
[
"def",
"ConsumeFloat",
"(",
"self",
")",
":",
"text",
"=",
"self",
".",
"token",
"if",
"re",
".",
"match",
"(",
"self",
".",
"_FLOAT_INFINITY",
",",
"text",
")",
":",
"self",
".",
"NextToken",
"(",
")",
"if",
"text",
".",
"startswith",
"(",
"'-'",
")",
":",
"return",
"-",
"_INFINITY",
"return",
"_INFINITY",
"if",
"re",
".",
"match",
"(",
"self",
".",
"_FLOAT_NAN",
",",
"text",
")",
":",
"self",
".",
"NextToken",
"(",
")",
"return",
"_NAN",
"try",
":",
"result",
"=",
"float",
"(",
"text",
")",
"except",
"ValueError",
",",
"e",
":",
"raise",
"self",
".",
"_FloatParseError",
"(",
"e",
")",
"self",
".",
"NextToken",
"(",
")",
"return",
"result"
] |
https://github.com/BSVino/DoubleAction/blob/c550b168a3e919926c198c30240f506538b92e75/mp/src/thirdparty/protobuf-2.3.0/python/google/protobuf/text_format.py#L474-L499
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/gtk/dataview.py
|
python
|
DataViewCtrl.PrependToggleColumn
|
(*args, **kwargs)
|
return _dataview.DataViewCtrl_PrependToggleColumn(*args, **kwargs)
|
PrependToggleColumn(self, PyObject label_or_bitmap, unsigned int model_column,
int mode=DATAVIEW_CELL_INERT, int width=DVC_TOGGLE_DEFAULT_WIDTH,
int align=ALIGN_CENTER,
int flags=DATAVIEW_COL_RESIZABLE) -> DataViewColumn
|
PrependToggleColumn(self, PyObject label_or_bitmap, unsigned int model_column,
int mode=DATAVIEW_CELL_INERT, int width=DVC_TOGGLE_DEFAULT_WIDTH,
int align=ALIGN_CENTER,
int flags=DATAVIEW_COL_RESIZABLE) -> DataViewColumn
|
[
"PrependToggleColumn",
"(",
"self",
"PyObject",
"label_or_bitmap",
"unsigned",
"int",
"model_column",
"int",
"mode",
"=",
"DATAVIEW_CELL_INERT",
"int",
"width",
"=",
"DVC_TOGGLE_DEFAULT_WIDTH",
"int",
"align",
"=",
"ALIGN_CENTER",
"int",
"flags",
"=",
"DATAVIEW_COL_RESIZABLE",
")",
"-",
">",
"DataViewColumn"
] |
def PrependToggleColumn(*args, **kwargs):
"""
PrependToggleColumn(self, PyObject label_or_bitmap, unsigned int model_column,
int mode=DATAVIEW_CELL_INERT, int width=DVC_TOGGLE_DEFAULT_WIDTH,
int align=ALIGN_CENTER,
int flags=DATAVIEW_COL_RESIZABLE) -> DataViewColumn
"""
return _dataview.DataViewCtrl_PrependToggleColumn(*args, **kwargs)
|
[
"def",
"PrependToggleColumn",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_dataview",
".",
"DataViewCtrl_PrependToggleColumn",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/dataview.py#L1605-L1612
|
|
vusec/vuzzer64
|
2b1b0ed757a3dca114db0192fa4ab1add92348bc
|
fuzzer-code/gautils.py
|
python
|
delete_out_file
|
(path)
|
this function recives a full path to a file and deletes any file with the same file name, but different extension in the same directory. This is called only when fuzzing creates different files while executing inputs.
|
this function recives a full path to a file and deletes any file with the same file name, but different extension in the same directory. This is called only when fuzzing creates different files while executing inputs.
|
[
"this",
"function",
"recives",
"a",
"full",
"path",
"to",
"a",
"file",
"and",
"deletes",
"any",
"file",
"with",
"the",
"same",
"file",
"name",
"but",
"different",
"extension",
"in",
"the",
"same",
"directory",
".",
"This",
"is",
"called",
"only",
"when",
"fuzzing",
"creates",
"different",
"files",
"while",
"executing",
"inputs",
"."
] |
def delete_out_file(path):
'''this function recives a full path to a file and deletes any file with the same file name, but different extension in the same directory. This is called only when fuzzing creates different files while executing inputs.'''
(h,t)=os.path.split(path)
bs,ex=splitFilename(t)
if ex == '':
die("Canot delete files as ther eis no extension")
files=os.listdir(h)
for fl in files:
b,e=splitFilename(fl)
if b==bs and e!=ex:
tfl=os.path.join(h,fl)
os.remove(tfl)
|
[
"def",
"delete_out_file",
"(",
"path",
")",
":",
"(",
"h",
",",
"t",
")",
"=",
"os",
".",
"path",
".",
"split",
"(",
"path",
")",
"bs",
",",
"ex",
"=",
"splitFilename",
"(",
"t",
")",
"if",
"ex",
"==",
"''",
":",
"die",
"(",
"\"Canot delete files as ther eis no extension\"",
")",
"files",
"=",
"os",
".",
"listdir",
"(",
"h",
")",
"for",
"fl",
"in",
"files",
":",
"b",
",",
"e",
"=",
"splitFilename",
"(",
"fl",
")",
"if",
"b",
"==",
"bs",
"and",
"e",
"!=",
"ex",
":",
"tfl",
"=",
"os",
".",
"path",
".",
"join",
"(",
"h",
",",
"fl",
")",
"os",
".",
"remove",
"(",
"tfl",
")"
] |
https://github.com/vusec/vuzzer64/blob/2b1b0ed757a3dca114db0192fa4ab1add92348bc/fuzzer-code/gautils.py#L54-L65
|
||
hanpfei/chromium-net
|
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
|
third_party/catapult/third_party/gsutil/third_party/boto/boto/sqs/queue.py
|
python
|
Queue.load_from_file
|
(self, fp, sep='\n')
|
return n
|
Utility function to load messages from a file-like object to a queue
|
Utility function to load messages from a file-like object to a queue
|
[
"Utility",
"function",
"to",
"load",
"messages",
"from",
"a",
"file",
"-",
"like",
"object",
"to",
"a",
"queue"
] |
def load_from_file(self, fp, sep='\n'):
"""Utility function to load messages from a file-like object to a queue"""
n = 0
body = ''
l = fp.readline()
while l:
if l == sep:
m = Message(self, body)
self.write(m)
n += 1
print('writing message %d' % n)
body = ''
else:
body = body + l
l = fp.readline()
return n
|
[
"def",
"load_from_file",
"(",
"self",
",",
"fp",
",",
"sep",
"=",
"'\\n'",
")",
":",
"n",
"=",
"0",
"body",
"=",
"''",
"l",
"=",
"fp",
".",
"readline",
"(",
")",
"while",
"l",
":",
"if",
"l",
"==",
"sep",
":",
"m",
"=",
"Message",
"(",
"self",
",",
"body",
")",
"self",
".",
"write",
"(",
"m",
")",
"n",
"+=",
"1",
"print",
"(",
"'writing message %d'",
"%",
"n",
")",
"body",
"=",
"''",
"else",
":",
"body",
"=",
"body",
"+",
"l",
"l",
"=",
"fp",
".",
"readline",
"(",
")",
"return",
"n"
] |
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/boto/boto/sqs/queue.py#L466-L481
|
|
casadi/casadi
|
8d0f80a4d0fe2054384bfb9748f7a0f6bae540ff
|
swig/python/tools/structure3.py
|
python
|
repeated
|
(e)
|
return Repeater(e)
|
From the arguemnt, constructs something that acts like a 'list' with the argument repeated the 'correct' number of times
s = struct_symSX([entry("x",repeat=6)])
s["x"] = repeated(12)
|
From the arguemnt, constructs something that acts like a 'list' with the argument repeated the 'correct' number of times
|
[
"From",
"the",
"arguemnt",
"constructs",
"something",
"that",
"acts",
"like",
"a",
"list",
"with",
"the",
"argument",
"repeated",
"the",
"correct",
"number",
"of",
"times"
] |
def repeated(e):
"""
From the arguemnt, constructs something that acts like a 'list' with the argument repeated the 'correct' number of times
s = struct_symSX([entry("x",repeat=6)])
s["x"] = repeated(12)
"""
return Repeater(e)
|
[
"def",
"repeated",
"(",
"e",
")",
":",
"return",
"Repeater",
"(",
"e",
")"
] |
https://github.com/casadi/casadi/blob/8d0f80a4d0fe2054384bfb9748f7a0f6bae540ff/swig/python/tools/structure3.py#L132-L140
|
|
snap-stanford/snap-python
|
d53c51b0a26aa7e3e7400b014cdf728948fde80a
|
setup/snap.py
|
python
|
TCnCom.Empty
|
(self)
|
return _snap.TCnCom_Empty(self)
|
Empty(TCnCom self) -> bool
Parameters:
self: TCnCom const *
|
Empty(TCnCom self) -> bool
|
[
"Empty",
"(",
"TCnCom",
"self",
")",
"-",
">",
"bool"
] |
def Empty(self):
"""
Empty(TCnCom self) -> bool
Parameters:
self: TCnCom const *
"""
return _snap.TCnCom_Empty(self)
|
[
"def",
"Empty",
"(",
"self",
")",
":",
"return",
"_snap",
".",
"TCnCom_Empty",
"(",
"self",
")"
] |
https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/setup/snap.py#L816-L824
|
|
SoarGroup/Soar
|
a1c5e249499137a27da60533c72969eef3b8ab6b
|
scons/scons-local-4.1.0/SCons/Tool/msgmerge.py
|
python
|
_POUpdateBuilderWrapper
|
(env, target=None, source=_null, **kw)
|
return env._POUpdateBuilder(target, source, **kw)
|
Wrapper for `POUpdate` builder - make user's life easier
|
Wrapper for `POUpdate` builder - make user's life easier
|
[
"Wrapper",
"for",
"POUpdate",
"builder",
"-",
"make",
"user",
"s",
"life",
"easier"
] |
def _POUpdateBuilderWrapper(env, target=None, source=_null, **kw):
""" Wrapper for `POUpdate` builder - make user's life easier """
if source is _null:
if 'POTDOMAIN' in kw:
domain = kw['POTDOMAIN']
elif 'POTDOMAIN' in env and env['POTDOMAIN']:
domain = env['POTDOMAIN']
else:
domain = 'messages'
source = [ domain ] # NOTE: Suffix shall be appended automatically
return env._POUpdateBuilder(target, source, **kw)
|
[
"def",
"_POUpdateBuilderWrapper",
"(",
"env",
",",
"target",
"=",
"None",
",",
"source",
"=",
"_null",
",",
"*",
"*",
"kw",
")",
":",
"if",
"source",
"is",
"_null",
":",
"if",
"'POTDOMAIN'",
"in",
"kw",
":",
"domain",
"=",
"kw",
"[",
"'POTDOMAIN'",
"]",
"elif",
"'POTDOMAIN'",
"in",
"env",
"and",
"env",
"[",
"'POTDOMAIN'",
"]",
":",
"domain",
"=",
"env",
"[",
"'POTDOMAIN'",
"]",
"else",
":",
"domain",
"=",
"'messages'",
"source",
"=",
"[",
"domain",
"]",
"# NOTE: Suffix shall be appended automatically",
"return",
"env",
".",
"_POUpdateBuilder",
"(",
"target",
",",
"source",
",",
"*",
"*",
"kw",
")"
] |
https://github.com/SoarGroup/Soar/blob/a1c5e249499137a27da60533c72969eef3b8ab6b/scons/scons-local-4.1.0/SCons/Tool/msgmerge.py#L56-L66
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
wx/tools/Editra/src/ebmlib/cmenumgr.py
|
python
|
ContextMenuManager.SetMenu
|
(self, menu)
|
Set the menu that this manager should manage
@param menu: wxMenu
|
Set the menu that this manager should manage
@param menu: wxMenu
|
[
"Set",
"the",
"menu",
"that",
"this",
"manager",
"should",
"manage",
"@param",
"menu",
":",
"wxMenu"
] |
def SetMenu(self, menu):
"""Set the menu that this manager should manage
@param menu: wxMenu
"""
assert isinstance(menu, wx.Menu), "menu must be a wxMenu"
self._menu = menu
|
[
"def",
"SetMenu",
"(",
"self",
",",
"menu",
")",
":",
"assert",
"isinstance",
"(",
"menu",
",",
"wx",
".",
"Menu",
")",
",",
"\"menu must be a wxMenu\"",
"self",
".",
"_menu",
"=",
"menu"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/tools/Editra/src/ebmlib/cmenumgr.py#L93-L99
|
||
kamyu104/LeetCode-Solutions
|
77605708a927ea3b85aee5a479db733938c7c211
|
Python/find-the-middle-index-in-array.py
|
python
|
Solution.findMiddleIndex
|
(self, nums)
|
return -1
|
:type nums: List[int]
:rtype: int
|
:type nums: List[int]
:rtype: int
|
[
":",
"type",
"nums",
":",
"List",
"[",
"int",
"]",
":",
"rtype",
":",
"int"
] |
def findMiddleIndex(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
total = sum(nums)
accu = 0
for i, x in enumerate(nums):
if accu*2 == total-x:
return i
accu += x
return -1
|
[
"def",
"findMiddleIndex",
"(",
"self",
",",
"nums",
")",
":",
"total",
"=",
"sum",
"(",
"nums",
")",
"accu",
"=",
"0",
"for",
"i",
",",
"x",
"in",
"enumerate",
"(",
"nums",
")",
":",
"if",
"accu",
"*",
"2",
"==",
"total",
"-",
"x",
":",
"return",
"i",
"accu",
"+=",
"x",
"return",
"-",
"1"
] |
https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/find-the-middle-index-in-array.py#L5-L16
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/core/arrays/categorical.py
|
python
|
Categorical.replace
|
(self, to_replace, value, inplace: bool = False)
|
Replaces all instances of one value with another
Parameters
----------
to_replace: object
The value to be replaced
value: object
The value to replace it with
inplace: bool
Whether the operation is done in-place
Returns
-------
None if inplace is True, otherwise the new Categorical after replacement
Examples
--------
>>> s = pd.Categorical([1, 2, 1, 3])
>>> s.replace(1, 3)
[3, 3, 2, 3]
Categories (2, int64): [2, 3]
|
Replaces all instances of one value with another
|
[
"Replaces",
"all",
"instances",
"of",
"one",
"value",
"with",
"another"
] |
def replace(self, to_replace, value, inplace: bool = False):
"""
Replaces all instances of one value with another
Parameters
----------
to_replace: object
The value to be replaced
value: object
The value to replace it with
inplace: bool
Whether the operation is done in-place
Returns
-------
None if inplace is True, otherwise the new Categorical after replacement
Examples
--------
>>> s = pd.Categorical([1, 2, 1, 3])
>>> s.replace(1, 3)
[3, 3, 2, 3]
Categories (2, int64): [2, 3]
"""
inplace = validate_bool_kwarg(inplace, "inplace")
cat = self if inplace else self.copy()
# build a dict of (to replace -> value) pairs
if is_list_like(to_replace):
# if to_replace is list-like and value is scalar
replace_dict = {replace_value: value for replace_value in to_replace}
else:
# if both to_replace and value are scalar
replace_dict = {to_replace: value}
# other cases, like if both to_replace and value are list-like or if
# to_replace is a dict, are handled separately in NDFrame
for replace_value, new_value in replace_dict.items():
if replace_value in cat.categories:
if isna(new_value):
cat.remove_categories(replace_value, inplace=True)
continue
categories = cat.categories.tolist()
index = categories.index(replace_value)
if new_value in cat.categories:
value_index = categories.index(new_value)
cat._codes[cat._codes == index] = value_index
cat.remove_categories(replace_value, inplace=True)
else:
categories[index] = new_value
cat.rename_categories(categories, inplace=True)
if not inplace:
return cat
|
[
"def",
"replace",
"(",
"self",
",",
"to_replace",
",",
"value",
",",
"inplace",
":",
"bool",
"=",
"False",
")",
":",
"inplace",
"=",
"validate_bool_kwarg",
"(",
"inplace",
",",
"\"inplace\"",
")",
"cat",
"=",
"self",
"if",
"inplace",
"else",
"self",
".",
"copy",
"(",
")",
"# build a dict of (to replace -> value) pairs",
"if",
"is_list_like",
"(",
"to_replace",
")",
":",
"# if to_replace is list-like and value is scalar",
"replace_dict",
"=",
"{",
"replace_value",
":",
"value",
"for",
"replace_value",
"in",
"to_replace",
"}",
"else",
":",
"# if both to_replace and value are scalar",
"replace_dict",
"=",
"{",
"to_replace",
":",
"value",
"}",
"# other cases, like if both to_replace and value are list-like or if",
"# to_replace is a dict, are handled separately in NDFrame",
"for",
"replace_value",
",",
"new_value",
"in",
"replace_dict",
".",
"items",
"(",
")",
":",
"if",
"replace_value",
"in",
"cat",
".",
"categories",
":",
"if",
"isna",
"(",
"new_value",
")",
":",
"cat",
".",
"remove_categories",
"(",
"replace_value",
",",
"inplace",
"=",
"True",
")",
"continue",
"categories",
"=",
"cat",
".",
"categories",
".",
"tolist",
"(",
")",
"index",
"=",
"categories",
".",
"index",
"(",
"replace_value",
")",
"if",
"new_value",
"in",
"cat",
".",
"categories",
":",
"value_index",
"=",
"categories",
".",
"index",
"(",
"new_value",
")",
"cat",
".",
"_codes",
"[",
"cat",
".",
"_codes",
"==",
"index",
"]",
"=",
"value_index",
"cat",
".",
"remove_categories",
"(",
"replace_value",
",",
"inplace",
"=",
"True",
")",
"else",
":",
"categories",
"[",
"index",
"]",
"=",
"new_value",
"cat",
".",
"rename_categories",
"(",
"categories",
",",
"inplace",
"=",
"True",
")",
"if",
"not",
"inplace",
":",
"return",
"cat"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/core/arrays/categorical.py#L2421-L2476
|
||
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_cocoa/_core.py
|
python
|
Object.GetClassName
|
(*args, **kwargs)
|
return _core_.Object_GetClassName(*args, **kwargs)
|
GetClassName(self) -> String
Returns the class name of the C++ class using wxRTTI.
|
GetClassName(self) -> String
|
[
"GetClassName",
"(",
"self",
")",
"-",
">",
"String"
] |
def GetClassName(*args, **kwargs):
"""
GetClassName(self) -> String
Returns the class name of the C++ class using wxRTTI.
"""
return _core_.Object_GetClassName(*args, **kwargs)
|
[
"def",
"GetClassName",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"Object_GetClassName",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_core.py#L804-L810
|
|
okex/V3-Open-API-SDK
|
c5abb0db7e2287718e0055e17e57672ce0ec7fd9
|
okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_vendor/pkg_resources/__init__.py
|
python
|
get_entry_map
|
(dist, group=None)
|
return get_distribution(dist).get_entry_map(group)
|
Return the entry point map for `group`, or the full entry map
|
Return the entry point map for `group`, or the full entry map
|
[
"Return",
"the",
"entry",
"point",
"map",
"for",
"group",
"or",
"the",
"full",
"entry",
"map"
] |
def get_entry_map(dist, group=None):
"""Return the entry point map for `group`, or the full entry map"""
return get_distribution(dist).get_entry_map(group)
|
[
"def",
"get_entry_map",
"(",
"dist",
",",
"group",
"=",
"None",
")",
":",
"return",
"get_distribution",
"(",
"dist",
")",
".",
"get_entry_map",
"(",
"group",
")"
] |
https://github.com/okex/V3-Open-API-SDK/blob/c5abb0db7e2287718e0055e17e57672ce0ec7fd9/okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_vendor/pkg_resources/__init__.py#L490-L492
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/ftplib.py
|
python
|
FTP.rmd
|
(self, dirname)
|
return self.voidcmd('RMD ' + dirname)
|
Remove a directory.
|
Remove a directory.
|
[
"Remove",
"a",
"directory",
"."
] |
def rmd(self, dirname):
'''Remove a directory.'''
return self.voidcmd('RMD ' + dirname)
|
[
"def",
"rmd",
"(",
"self",
",",
"dirname",
")",
":",
"return",
"self",
".",
"voidcmd",
"(",
"'RMD '",
"+",
"dirname",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/ftplib.py#L650-L652
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/osx_cocoa/_gdi.py
|
python
|
RendererNative.GetCheckBoxSize
|
(*args, **kwargs)
|
return _gdi_.RendererNative_GetCheckBoxSize(*args, **kwargs)
|
GetCheckBoxSize(self, Window win) -> Size
|
GetCheckBoxSize(self, Window win) -> Size
|
[
"GetCheckBoxSize",
"(",
"self",
"Window",
"win",
")",
"-",
">",
"Size"
] |
def GetCheckBoxSize(*args, **kwargs):
"""GetCheckBoxSize(self, Window win) -> Size"""
return _gdi_.RendererNative_GetCheckBoxSize(*args, **kwargs)
|
[
"def",
"GetCheckBoxSize",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_gdi_",
".",
"RendererNative_GetCheckBoxSize",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/_gdi.py#L7364-L7366
|
|
Jittor/jittor
|
e9aca0444c2bdc8e2389d99122954cd0903eec46
|
python/jittor/misc.py
|
python
|
meshgrid
|
(*tensors)
|
return grids
|
r'''
Take N tensors, each of which can be 1-dimensional vector, and create N n-dimensional grids,
where the i th grid is defined by expanding the i th input over dimensions defined by other inputs.
|
r'''
Take N tensors, each of which can be 1-dimensional vector, and create N n-dimensional grids,
where the i th grid is defined by expanding the i th input over dimensions defined by other inputs.
|
[
"r",
"Take",
"N",
"tensors",
"each",
"of",
"which",
"can",
"be",
"1",
"-",
"dimensional",
"vector",
"and",
"create",
"N",
"n",
"-",
"dimensional",
"grids",
"where",
"the",
"i",
"th",
"grid",
"is",
"defined",
"by",
"expanding",
"the",
"i",
"th",
"input",
"over",
"dimensions",
"defined",
"by",
"other",
"inputs",
"."
] |
def meshgrid(*tensors):
r'''
Take N tensors, each of which can be 1-dimensional vector, and create N n-dimensional grids,
where the i th grid is defined by expanding the i th input over dimensions defined by other inputs.
'''
if len(tensors)==1 and isinstance(tensors[0], list):
tensors = tensors[0]
size = len(tensors)
shape = []
for i in range(size):
assert isinstance(tensors[i],jt.Var) and tensors[i].ndim==1
shape.append(tensors[i].shape[0])
grids = []
view_shape = [1]*size
for i in range(size):
vs = view_shape[:]
vs[i]=-1
grids.append(tensors[i].reshape(vs).expand(shape))
return grids
|
[
"def",
"meshgrid",
"(",
"*",
"tensors",
")",
":",
"if",
"len",
"(",
"tensors",
")",
"==",
"1",
"and",
"isinstance",
"(",
"tensors",
"[",
"0",
"]",
",",
"list",
")",
":",
"tensors",
"=",
"tensors",
"[",
"0",
"]",
"size",
"=",
"len",
"(",
"tensors",
")",
"shape",
"=",
"[",
"]",
"for",
"i",
"in",
"range",
"(",
"size",
")",
":",
"assert",
"isinstance",
"(",
"tensors",
"[",
"i",
"]",
",",
"jt",
".",
"Var",
")",
"and",
"tensors",
"[",
"i",
"]",
".",
"ndim",
"==",
"1",
"shape",
".",
"append",
"(",
"tensors",
"[",
"i",
"]",
".",
"shape",
"[",
"0",
"]",
")",
"grids",
"=",
"[",
"]",
"view_shape",
"=",
"[",
"1",
"]",
"*",
"size",
"for",
"i",
"in",
"range",
"(",
"size",
")",
":",
"vs",
"=",
"view_shape",
"[",
":",
"]",
"vs",
"[",
"i",
"]",
"=",
"-",
"1",
"grids",
".",
"append",
"(",
"tensors",
"[",
"i",
"]",
".",
"reshape",
"(",
"vs",
")",
".",
"expand",
"(",
"shape",
")",
")",
"return",
"grids"
] |
https://github.com/Jittor/jittor/blob/e9aca0444c2bdc8e2389d99122954cd0903eec46/python/jittor/misc.py#L534-L553
|
|
chromiumembedded/cef
|
80caf947f3fe2210e5344713c5281d8af9bdc295
|
tools/file_util.py
|
python
|
copy_files
|
(src_glob, dst_folder, quiet=True)
|
Copy multiple files.
|
Copy multiple files.
|
[
"Copy",
"multiple",
"files",
"."
] |
def copy_files(src_glob, dst_folder, quiet=True):
""" Copy multiple files. """
for fname in iglob(src_glob):
dst = os.path.join(dst_folder, os.path.basename(fname))
if os.path.isdir(fname):
copy_dir(fname, dst, quiet)
else:
copy_file(fname, dst, quiet)
|
[
"def",
"copy_files",
"(",
"src_glob",
",",
"dst_folder",
",",
"quiet",
"=",
"True",
")",
":",
"for",
"fname",
"in",
"iglob",
"(",
"src_glob",
")",
":",
"dst",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dst_folder",
",",
"os",
".",
"path",
".",
"basename",
"(",
"fname",
")",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"fname",
")",
":",
"copy_dir",
"(",
"fname",
",",
"dst",
",",
"quiet",
")",
"else",
":",
"copy_file",
"(",
"fname",
",",
"dst",
",",
"quiet",
")"
] |
https://github.com/chromiumembedded/cef/blob/80caf947f3fe2210e5344713c5281d8af9bdc295/tools/file_util.py#L95-L102
|
||
RLBot/RLBot
|
34332b12cf158b3ef8dbf174ae67c53683368a9d
|
src/main/python/rlbot/matchconfig/conversions.py
|
python
|
_load_bot_config
|
(index, config_bundle: BotConfigBundle,
looks_config_object: ConfigObject, overall_config: ConfigObject,
human_index_tracker: IncrementingInteger)
|
return bot_configuration
|
Loads the config data of a single bot
:param index: This is the bot index (where it appears in game_cars)
:param bot_configuration: A config object that will eventually be transformed and sent to the game.
:param config_bundle: A config object for a single bot
:param overall_config: This is the config for the entire session not one particular bot
:param human_index_tracker: An object of type HumanIndexManager that helps set human_index correctly.
:return:
|
Loads the config data of a single bot
:param index: This is the bot index (where it appears in game_cars)
:param bot_configuration: A config object that will eventually be transformed and sent to the game.
:param config_bundle: A config object for a single bot
:param overall_config: This is the config for the entire session not one particular bot
:param human_index_tracker: An object of type HumanIndexManager that helps set human_index correctly.
:return:
|
[
"Loads",
"the",
"config",
"data",
"of",
"a",
"single",
"bot",
":",
"param",
"index",
":",
"This",
"is",
"the",
"bot",
"index",
"(",
"where",
"it",
"appears",
"in",
"game_cars",
")",
":",
"param",
"bot_configuration",
":",
"A",
"config",
"object",
"that",
"will",
"eventually",
"be",
"transformed",
"and",
"sent",
"to",
"the",
"game",
".",
":",
"param",
"config_bundle",
":",
"A",
"config",
"object",
"for",
"a",
"single",
"bot",
":",
"param",
"overall_config",
":",
"This",
"is",
"the",
"config",
"for",
"the",
"entire",
"session",
"not",
"one",
"particular",
"bot",
":",
"param",
"human_index_tracker",
":",
"An",
"object",
"of",
"type",
"HumanIndexManager",
"that",
"helps",
"set",
"human_index",
"correctly",
".",
":",
"return",
":"
] |
def _load_bot_config(index, config_bundle: BotConfigBundle,
looks_config_object: ConfigObject, overall_config: ConfigObject,
human_index_tracker: IncrementingInteger) -> PlayerConfig:
"""
Loads the config data of a single bot
:param index: This is the bot index (where it appears in game_cars)
:param bot_configuration: A config object that will eventually be transformed and sent to the game.
:param config_bundle: A config object for a single bot
:param overall_config: This is the config for the entire session not one particular bot
:param human_index_tracker: An object of type HumanIndexManager that helps set human_index correctly.
:return:
"""
bot_configuration = PlayerConfig()
bot_configuration.config_path = config_bundle.config_path
team_num = get_team(overall_config, index)
bot_configuration.team = team_num
# Setting up data about what type of bot it is
bot_type = overall_config.get(PARTICIPANT_CONFIGURATION_HEADER, PARTICIPANT_TYPE_KEY, index)
bot_configuration.bot, bot_configuration.rlbot_controlled = get_bot_options(bot_type)
bot_configuration.bot_skill = overall_config.getfloat(
PARTICIPANT_CONFIGURATION_HEADER, PARTICIPANT_BOT_SKILL_KEY, index)
if not bot_configuration.bot:
bot_configuration.human_index = human_index_tracker.increment()
# Setting up the bots name
bot_configuration.name = config_bundle.name
if looks_config_object:
loadout_config = load_bot_appearance(looks_config_object, team_num)
else:
loadout_config = config_bundle.generate_loadout_config(index, team_num)
bot_configuration.loadout_config = loadout_config
return bot_configuration
|
[
"def",
"_load_bot_config",
"(",
"index",
",",
"config_bundle",
":",
"BotConfigBundle",
",",
"looks_config_object",
":",
"ConfigObject",
",",
"overall_config",
":",
"ConfigObject",
",",
"human_index_tracker",
":",
"IncrementingInteger",
")",
"->",
"PlayerConfig",
":",
"bot_configuration",
"=",
"PlayerConfig",
"(",
")",
"bot_configuration",
".",
"config_path",
"=",
"config_bundle",
".",
"config_path",
"team_num",
"=",
"get_team",
"(",
"overall_config",
",",
"index",
")",
"bot_configuration",
".",
"team",
"=",
"team_num",
"# Setting up data about what type of bot it is",
"bot_type",
"=",
"overall_config",
".",
"get",
"(",
"PARTICIPANT_CONFIGURATION_HEADER",
",",
"PARTICIPANT_TYPE_KEY",
",",
"index",
")",
"bot_configuration",
".",
"bot",
",",
"bot_configuration",
".",
"rlbot_controlled",
"=",
"get_bot_options",
"(",
"bot_type",
")",
"bot_configuration",
".",
"bot_skill",
"=",
"overall_config",
".",
"getfloat",
"(",
"PARTICIPANT_CONFIGURATION_HEADER",
",",
"PARTICIPANT_BOT_SKILL_KEY",
",",
"index",
")",
"if",
"not",
"bot_configuration",
".",
"bot",
":",
"bot_configuration",
".",
"human_index",
"=",
"human_index_tracker",
".",
"increment",
"(",
")",
"# Setting up the bots name",
"bot_configuration",
".",
"name",
"=",
"config_bundle",
".",
"name",
"if",
"looks_config_object",
":",
"loadout_config",
"=",
"load_bot_appearance",
"(",
"looks_config_object",
",",
"team_num",
")",
"else",
":",
"loadout_config",
"=",
"config_bundle",
".",
"generate_loadout_config",
"(",
"index",
",",
"team_num",
")",
"bot_configuration",
".",
"loadout_config",
"=",
"loadout_config",
"return",
"bot_configuration"
] |
https://github.com/RLBot/RLBot/blob/34332b12cf158b3ef8dbf174ae67c53683368a9d/src/main/python/rlbot/matchconfig/conversions.py#L100-L139
|
|
wxWidgets/wxPython-Classic
|
19571e1ae65f1ac445f5491474121998c97a1bf0
|
src/gtk/_core.py
|
python
|
Window.GetMinWidth
|
(*args, **kwargs)
|
return _core_.Window_GetMinWidth(*args, **kwargs)
|
GetMinWidth(self) -> int
|
GetMinWidth(self) -> int
|
[
"GetMinWidth",
"(",
"self",
")",
"-",
">",
"int"
] |
def GetMinWidth(*args, **kwargs):
"""GetMinWidth(self) -> int"""
return _core_.Window_GetMinWidth(*args, **kwargs)
|
[
"def",
"GetMinWidth",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"_core_",
".",
"Window_GetMinWidth",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_core.py#L9772-L9774
|
|
mantidproject/mantid
|
03deeb89254ec4289edb8771e0188c2090a02f32
|
qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/basic_fitting/fit_function_options_view.py
|
python
|
FitFunctionOptionsView.set_datasets_in_function_browser
|
(self, dataset_names: list)
|
Sets the datasets stored in the FunctionBrowser.
|
Sets the datasets stored in the FunctionBrowser.
|
[
"Sets",
"the",
"datasets",
"stored",
"in",
"the",
"FunctionBrowser",
"."
] |
def set_datasets_in_function_browser(self, dataset_names: list) -> None:
"""Sets the datasets stored in the FunctionBrowser."""
index_list = range(self.function_browser.getNumberOfDatasets())
self.function_browser.removeDatasets(index_list)
self.function_browser.addDatasets(dataset_names)
|
[
"def",
"set_datasets_in_function_browser",
"(",
"self",
",",
"dataset_names",
":",
"list",
")",
"->",
"None",
":",
"index_list",
"=",
"range",
"(",
"self",
".",
"function_browser",
".",
"getNumberOfDatasets",
"(",
")",
")",
"self",
".",
"function_browser",
".",
"removeDatasets",
"(",
"index_list",
")",
"self",
".",
"function_browser",
".",
"addDatasets",
"(",
"dataset_names",
")"
] |
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/basic_fitting/fit_function_options_view.py#L165-L169
|
||
microsoft/DirectXShaderCompiler
|
8348ff8d9e0287610ba05d3a828e10af981a1c05
|
tools/clang/bindings/python/clang/cindex.py
|
python
|
CompileCommand.arguments
|
(self)
|
Get an iterable object providing each argument in the
command line for the compiler invocation as a _CXString.
Invariant : the first argument is the compiler executable
|
Get an iterable object providing each argument in the
command line for the compiler invocation as a _CXString.
|
[
"Get",
"an",
"iterable",
"object",
"providing",
"each",
"argument",
"in",
"the",
"command",
"line",
"for",
"the",
"compiler",
"invocation",
"as",
"a",
"_CXString",
"."
] |
def arguments(self):
"""
Get an iterable object providing each argument in the
command line for the compiler invocation as a _CXString.
Invariant : the first argument is the compiler executable
"""
length = conf.lib.clang_CompileCommand_getNumArgs(self.cmd)
for i in xrange(length):
yield conf.lib.clang_CompileCommand_getArg(self.cmd, i)
|
[
"def",
"arguments",
"(",
"self",
")",
":",
"length",
"=",
"conf",
".",
"lib",
".",
"clang_CompileCommand_getNumArgs",
"(",
"self",
".",
"cmd",
")",
"for",
"i",
"in",
"xrange",
"(",
"length",
")",
":",
"yield",
"conf",
".",
"lib",
".",
"clang_CompileCommand_getArg",
"(",
"self",
".",
"cmd",
",",
"i",
")"
] |
https://github.com/microsoft/DirectXShaderCompiler/blob/8348ff8d9e0287610ba05d3a828e10af981a1c05/tools/clang/bindings/python/clang/cindex.py#L2675-L2684
|
||
kamyu104/LeetCode-Solutions
|
77605708a927ea3b85aee5a479db733938c7c211
|
Python/most-beautiful-item-for-each-query.py
|
python
|
Solution.maximumBeauty
|
(self, items, queries)
|
return result
|
:type items: List[List[int]]
:type queries: List[int]
:rtype: List[int]
|
:type items: List[List[int]]
:type queries: List[int]
:rtype: List[int]
|
[
":",
"type",
"items",
":",
"List",
"[",
"List",
"[",
"int",
"]]",
":",
"type",
"queries",
":",
"List",
"[",
"int",
"]",
":",
"rtype",
":",
"List",
"[",
"int",
"]"
] |
def maximumBeauty(self, items, queries):
"""
:type items: List[List[int]]
:type queries: List[int]
:rtype: List[int]
"""
items.sort()
for i in xrange(len(items)-1):
items[i+1][1] = max(items[i+1][1], items[i][1])
result = []
for q in queries:
i = bisect.bisect_left(items, [q+1])
result.append(items[i-1][1] if i else 0)
return result
|
[
"def",
"maximumBeauty",
"(",
"self",
",",
"items",
",",
"queries",
")",
":",
"items",
".",
"sort",
"(",
")",
"for",
"i",
"in",
"xrange",
"(",
"len",
"(",
"items",
")",
"-",
"1",
")",
":",
"items",
"[",
"i",
"+",
"1",
"]",
"[",
"1",
"]",
"=",
"max",
"(",
"items",
"[",
"i",
"+",
"1",
"]",
"[",
"1",
"]",
",",
"items",
"[",
"i",
"]",
"[",
"1",
"]",
")",
"result",
"=",
"[",
"]",
"for",
"q",
"in",
"queries",
":",
"i",
"=",
"bisect",
".",
"bisect_left",
"(",
"items",
",",
"[",
"q",
"+",
"1",
"]",
")",
"result",
".",
"append",
"(",
"items",
"[",
"i",
"-",
"1",
"]",
"[",
"1",
"]",
"if",
"i",
"else",
"0",
")",
"return",
"result"
] |
https://github.com/kamyu104/LeetCode-Solutions/blob/77605708a927ea3b85aee5a479db733938c7c211/Python/most-beautiful-item-for-each-query.py#L8-L21
|
|
hughperkins/tf-coriander
|
970d3df6c11400ad68405f22b0c42a52374e94ca
|
tensorflow/python/ops/gradients.py
|
python
|
gradients
|
(ys,
xs,
grad_ys=None,
name="gradients",
colocate_gradients_with_ops=False,
gate_gradients=False,
aggregation_method=None)
|
return [_GetGrad(grads, x) for x in xs]
|
Constructs symbolic partial derivatives of sum of `ys` w.r.t. x in `xs`.
`ys` and `xs` are each a `Tensor` or a list of tensors. `grad_ys`
is a list of `Tensor`, holding the gradients received by the
`ys`. The list must be the same length as `ys`.
`gradients()` adds ops to the graph to output the partial
derivatives of `ys` with respect to `xs`. It returns a list of
`Tensor` of length `len(xs)` where each tensor is the `sum(dy/dx)`
for y in `ys`.
`grad_ys` is a list of tensors of the same length as `ys` that holds
the initial gradients for each y in `ys`. When `grad_ys` is None,
we fill in a tensor of '1's of the shape of y for each y in `ys`. A
user can provide their own initial `grad_ys` to compute the
derivatives using a different initial gradient for each y (e.g., if
one wanted to weight the gradient differently for each value in
each y).
Args:
ys: A `Tensor` or list of tensors to be differentiated.
xs: A `Tensor` or list of tensors to be used for differentiation.
grad_ys: Optional. A `Tensor` or list of tensors the same size as
`ys` and holding the gradients computed for each y in `ys`.
name: Optional name to use for grouping all the gradient ops together.
defaults to 'gradients'.
colocate_gradients_with_ops: If True, try colocating gradients with
the corresponding op.
gate_gradients: If True, add a tuple around the gradients returned
for an operations. This avoids some race conditions.
aggregation_method: Specifies the method used to combine gradient terms.
Accepted values are constants defined in the class `AggregationMethod`.
Returns:
A list of `sum(dy/dx)` for each x in `xs`.
Raises:
LookupError: if one of the operations between `x` and `y` does not
have a registered gradient function.
ValueError: if the arguments are invalid.
|
Constructs symbolic partial derivatives of sum of `ys` w.r.t. x in `xs`.
|
[
"Constructs",
"symbolic",
"partial",
"derivatives",
"of",
"sum",
"of",
"ys",
"w",
".",
"r",
".",
"t",
".",
"x",
"in",
"xs",
"."
] |
def gradients(ys,
xs,
grad_ys=None,
name="gradients",
colocate_gradients_with_ops=False,
gate_gradients=False,
aggregation_method=None):
"""Constructs symbolic partial derivatives of sum of `ys` w.r.t. x in `xs`.
`ys` and `xs` are each a `Tensor` or a list of tensors. `grad_ys`
is a list of `Tensor`, holding the gradients received by the
`ys`. The list must be the same length as `ys`.
`gradients()` adds ops to the graph to output the partial
derivatives of `ys` with respect to `xs`. It returns a list of
`Tensor` of length `len(xs)` where each tensor is the `sum(dy/dx)`
for y in `ys`.
`grad_ys` is a list of tensors of the same length as `ys` that holds
the initial gradients for each y in `ys`. When `grad_ys` is None,
we fill in a tensor of '1's of the shape of y for each y in `ys`. A
user can provide their own initial `grad_ys` to compute the
derivatives using a different initial gradient for each y (e.g., if
one wanted to weight the gradient differently for each value in
each y).
Args:
ys: A `Tensor` or list of tensors to be differentiated.
xs: A `Tensor` or list of tensors to be used for differentiation.
grad_ys: Optional. A `Tensor` or list of tensors the same size as
`ys` and holding the gradients computed for each y in `ys`.
name: Optional name to use for grouping all the gradient ops together.
defaults to 'gradients'.
colocate_gradients_with_ops: If True, try colocating gradients with
the corresponding op.
gate_gradients: If True, add a tuple around the gradients returned
for an operations. This avoids some race conditions.
aggregation_method: Specifies the method used to combine gradient terms.
Accepted values are constants defined in the class `AggregationMethod`.
Returns:
A list of `sum(dy/dx)` for each x in `xs`.
Raises:
LookupError: if one of the operations between `x` and `y` does not
have a registered gradient function.
ValueError: if the arguments are invalid.
"""
ys = _AsList(ys)
xs = _AsList(xs)
if grad_ys is None:
grad_ys = [None] * len(ys)
else:
grad_ys = _AsList(grad_ys)
with ops.name_scope(name, "gradients", ys + xs + grad_ys):
ys = ops.convert_n_to_tensor_or_indexed_slices(ys, name="y")
xs = ops.convert_n_to_tensor_or_indexed_slices(xs, name="x")
grad_ys = _DefaultGradYs(grad_ys, ys, colocate_gradients_with_ops)
# The approach we take here is as follows: Create a list of all ops in the
# subgraph between the ys and xs. Visit these ops in reverse order of ids
# to ensure that when we visit an op the gradients w.r.t its outputs have
# been collected. Then aggregate these gradients if needed, call the op's
# gradient function, and add the generated gradients to the gradients for
# its input.
# Initialize the pending count for ops in the connected subgraph from ys
# to the xs.
to_ops = [t.op for t in ys]
from_ops = [t.op for t in xs]
pending_count, loop_state = _PendingCount(ops.get_default_graph(),
to_ops, from_ops,
colocate_gradients_with_ops)
# Iterate over the collected ops.
#
# grads: op => list of gradients received on each output endpoint of the
# op. The gradients for each endpoint are initially collected as a list.
# When it is time to call the op's gradient function, for each endpoint we
# aggregate the list of received gradients into a Add() Operation if there
# is more than one.
grads = {}
# Add the initial gradients for the ys.
for y, grad_y in zip(ys, grad_ys):
_SetGrad(grads, y, grad_y)
# Initialize queue with to_ops.
queue = collections.deque()
# Add the ops in 'to_ops' into the queue.
to_ops_set = set()
for op in to_ops:
# 'ready' handles the case where one output gradient relies on
# another output's gradient.
# pylint: disable=protected-access
ready = (pending_count[op._id] == 0)
if ready and op._id not in to_ops_set:
to_ops_set.add(op._id)
queue.append(op)
# pylint: enable=protected-access
if loop_state:
loop_exits = loop_state.ProcessUnusedLoopExits(pending_count, to_ops_set)
for y in loop_exits:
if _IsTrainable(y):
_SetGrad(grads, y, loop_state.ZerosLikeForExit(y))
queue.append(y.op)
# The set of 'from_ops'.
stop_ops = _StopOps(from_ops, pending_count)
while queue:
# generate gradient subgraph for op.
op = queue.popleft()
with _maybe_colocate_with(op, colocate_gradients_with_ops):
if loop_state:
loop_state.EnterGradWhileContext(op, before=True)
out_grads = _AggregatedGrads(grads, op, loop_state, aggregation_method)
if loop_state:
loop_state.ExitGradWhileContext(op, before=True)
grad_fn = None
# pylint: disable=protected-access
is_func_call = ops.get_default_graph()._is_function(op.type)
has_out_grads = any(isinstance(g, ops.Tensor) or g for g in out_grads)
if has_out_grads and (op._id not in stop_ops):
if is_func_call:
grad_fn = ops.get_default_graph()._get_function(
op.type).python_grad_func
# pylint: enable=protected-access
else:
# A grad_fn must be defined, either as a function or as None
# for ops that do not have gradients.
try:
grad_fn = ops.get_gradient_function(op)
except LookupError:
raise LookupError(
"No gradient defined for operation '%s' (op type: %s)" %
(op.name, op.type))
if loop_state:
loop_state.EnterGradWhileContext(op, before=False)
if (grad_fn or is_func_call) and has_out_grads:
# NOTE: If _AggregatedGrads didn't compute a value for the i'th
# output, it means that the cost does not depend on output[i],
# therefore dC/doutput[i] is 0.
for i, out_grad in enumerate(out_grads):
if (not isinstance(out_grad, ops.Tensor)
and not out_grad) and _IsTrainable(op.outputs[i]):
# Only floating-point outputs get a zero gradient. Gradient
# functions should ignore the gradient for other outputs.
if loop_state:
out_grads[i] = loop_state.ZerosLike(op, i)
else:
out_grads[i] = control_flow_ops.ZerosLikeOutsideLoop(op, i)
with ops.name_scope(op.name + "_grad"):
# pylint: disable=protected-access
with ops.get_default_graph()._original_op(op):
# pylint: enable=protected-access
if grad_fn:
# If grad_fn was found, do not use SymbolicGradient even for
# functions.
in_grads = _AsList(grad_fn(op, *out_grads))
else:
# For function call ops, we add a 'SymbolicGradient'
# node to the graph to compute gradients.
f_in = [x for x in op.inputs] + out_grads
f_types = [x.dtype for x in op.inputs]
# pylint: disable=protected-access
in_grads = _AsList(functional_ops._symbolic_gradient(
f_in, f_types, op.type))
# pylint: enable=protected-access
_VerifyGeneratedGradients(in_grads, op)
if gate_gradients and len(
[x for x in in_grads if x is not None]) > 1:
in_grads = control_flow_ops.tuple(in_grads)
_LogOpGradients(op, out_grads, in_grads)
else:
# If no grad_fn is defined or none of out_grads is available,
# just propagates a list of None backwards.
in_grads = [None] * len(op.inputs)
for t_in, in_grad in zip(op.inputs, in_grads):
if in_grad is not None:
if isinstance(in_grad, ops.Tensor):
in_grad.set_shape(t_in.get_shape())
_SetGrad(grads, t_in, in_grad)
if loop_state:
loop_state.ExitGradWhileContext(op, before=False)
# Update pending count for the inputs of op and enqueue ready ops.
_UpdatePendingAndEnqueueReady(grads, op, queue, pending_count, loop_state)
if loop_state:
loop_state.PostProcessing()
return [_GetGrad(grads, x) for x in xs]
|
[
"def",
"gradients",
"(",
"ys",
",",
"xs",
",",
"grad_ys",
"=",
"None",
",",
"name",
"=",
"\"gradients\"",
",",
"colocate_gradients_with_ops",
"=",
"False",
",",
"gate_gradients",
"=",
"False",
",",
"aggregation_method",
"=",
"None",
")",
":",
"ys",
"=",
"_AsList",
"(",
"ys",
")",
"xs",
"=",
"_AsList",
"(",
"xs",
")",
"if",
"grad_ys",
"is",
"None",
":",
"grad_ys",
"=",
"[",
"None",
"]",
"*",
"len",
"(",
"ys",
")",
"else",
":",
"grad_ys",
"=",
"_AsList",
"(",
"grad_ys",
")",
"with",
"ops",
".",
"name_scope",
"(",
"name",
",",
"\"gradients\"",
",",
"ys",
"+",
"xs",
"+",
"grad_ys",
")",
":",
"ys",
"=",
"ops",
".",
"convert_n_to_tensor_or_indexed_slices",
"(",
"ys",
",",
"name",
"=",
"\"y\"",
")",
"xs",
"=",
"ops",
".",
"convert_n_to_tensor_or_indexed_slices",
"(",
"xs",
",",
"name",
"=",
"\"x\"",
")",
"grad_ys",
"=",
"_DefaultGradYs",
"(",
"grad_ys",
",",
"ys",
",",
"colocate_gradients_with_ops",
")",
"# The approach we take here is as follows: Create a list of all ops in the",
"# subgraph between the ys and xs. Visit these ops in reverse order of ids",
"# to ensure that when we visit an op the gradients w.r.t its outputs have",
"# been collected. Then aggregate these gradients if needed, call the op's",
"# gradient function, and add the generated gradients to the gradients for",
"# its input.",
"# Initialize the pending count for ops in the connected subgraph from ys",
"# to the xs.",
"to_ops",
"=",
"[",
"t",
".",
"op",
"for",
"t",
"in",
"ys",
"]",
"from_ops",
"=",
"[",
"t",
".",
"op",
"for",
"t",
"in",
"xs",
"]",
"pending_count",
",",
"loop_state",
"=",
"_PendingCount",
"(",
"ops",
".",
"get_default_graph",
"(",
")",
",",
"to_ops",
",",
"from_ops",
",",
"colocate_gradients_with_ops",
")",
"# Iterate over the collected ops.",
"#",
"# grads: op => list of gradients received on each output endpoint of the",
"# op. The gradients for each endpoint are initially collected as a list.",
"# When it is time to call the op's gradient function, for each endpoint we",
"# aggregate the list of received gradients into a Add() Operation if there",
"# is more than one.",
"grads",
"=",
"{",
"}",
"# Add the initial gradients for the ys.",
"for",
"y",
",",
"grad_y",
"in",
"zip",
"(",
"ys",
",",
"grad_ys",
")",
":",
"_SetGrad",
"(",
"grads",
",",
"y",
",",
"grad_y",
")",
"# Initialize queue with to_ops.",
"queue",
"=",
"collections",
".",
"deque",
"(",
")",
"# Add the ops in 'to_ops' into the queue.",
"to_ops_set",
"=",
"set",
"(",
")",
"for",
"op",
"in",
"to_ops",
":",
"# 'ready' handles the case where one output gradient relies on",
"# another output's gradient.",
"# pylint: disable=protected-access",
"ready",
"=",
"(",
"pending_count",
"[",
"op",
".",
"_id",
"]",
"==",
"0",
")",
"if",
"ready",
"and",
"op",
".",
"_id",
"not",
"in",
"to_ops_set",
":",
"to_ops_set",
".",
"add",
"(",
"op",
".",
"_id",
")",
"queue",
".",
"append",
"(",
"op",
")",
"# pylint: enable=protected-access",
"if",
"loop_state",
":",
"loop_exits",
"=",
"loop_state",
".",
"ProcessUnusedLoopExits",
"(",
"pending_count",
",",
"to_ops_set",
")",
"for",
"y",
"in",
"loop_exits",
":",
"if",
"_IsTrainable",
"(",
"y",
")",
":",
"_SetGrad",
"(",
"grads",
",",
"y",
",",
"loop_state",
".",
"ZerosLikeForExit",
"(",
"y",
")",
")",
"queue",
".",
"append",
"(",
"y",
".",
"op",
")",
"# The set of 'from_ops'.",
"stop_ops",
"=",
"_StopOps",
"(",
"from_ops",
",",
"pending_count",
")",
"while",
"queue",
":",
"# generate gradient subgraph for op.",
"op",
"=",
"queue",
".",
"popleft",
"(",
")",
"with",
"_maybe_colocate_with",
"(",
"op",
",",
"colocate_gradients_with_ops",
")",
":",
"if",
"loop_state",
":",
"loop_state",
".",
"EnterGradWhileContext",
"(",
"op",
",",
"before",
"=",
"True",
")",
"out_grads",
"=",
"_AggregatedGrads",
"(",
"grads",
",",
"op",
",",
"loop_state",
",",
"aggregation_method",
")",
"if",
"loop_state",
":",
"loop_state",
".",
"ExitGradWhileContext",
"(",
"op",
",",
"before",
"=",
"True",
")",
"grad_fn",
"=",
"None",
"# pylint: disable=protected-access",
"is_func_call",
"=",
"ops",
".",
"get_default_graph",
"(",
")",
".",
"_is_function",
"(",
"op",
".",
"type",
")",
"has_out_grads",
"=",
"any",
"(",
"isinstance",
"(",
"g",
",",
"ops",
".",
"Tensor",
")",
"or",
"g",
"for",
"g",
"in",
"out_grads",
")",
"if",
"has_out_grads",
"and",
"(",
"op",
".",
"_id",
"not",
"in",
"stop_ops",
")",
":",
"if",
"is_func_call",
":",
"grad_fn",
"=",
"ops",
".",
"get_default_graph",
"(",
")",
".",
"_get_function",
"(",
"op",
".",
"type",
")",
".",
"python_grad_func",
"# pylint: enable=protected-access",
"else",
":",
"# A grad_fn must be defined, either as a function or as None",
"# for ops that do not have gradients.",
"try",
":",
"grad_fn",
"=",
"ops",
".",
"get_gradient_function",
"(",
"op",
")",
"except",
"LookupError",
":",
"raise",
"LookupError",
"(",
"\"No gradient defined for operation '%s' (op type: %s)\"",
"%",
"(",
"op",
".",
"name",
",",
"op",
".",
"type",
")",
")",
"if",
"loop_state",
":",
"loop_state",
".",
"EnterGradWhileContext",
"(",
"op",
",",
"before",
"=",
"False",
")",
"if",
"(",
"grad_fn",
"or",
"is_func_call",
")",
"and",
"has_out_grads",
":",
"# NOTE: If _AggregatedGrads didn't compute a value for the i'th",
"# output, it means that the cost does not depend on output[i],",
"# therefore dC/doutput[i] is 0.",
"for",
"i",
",",
"out_grad",
"in",
"enumerate",
"(",
"out_grads",
")",
":",
"if",
"(",
"not",
"isinstance",
"(",
"out_grad",
",",
"ops",
".",
"Tensor",
")",
"and",
"not",
"out_grad",
")",
"and",
"_IsTrainable",
"(",
"op",
".",
"outputs",
"[",
"i",
"]",
")",
":",
"# Only floating-point outputs get a zero gradient. Gradient",
"# functions should ignore the gradient for other outputs.",
"if",
"loop_state",
":",
"out_grads",
"[",
"i",
"]",
"=",
"loop_state",
".",
"ZerosLike",
"(",
"op",
",",
"i",
")",
"else",
":",
"out_grads",
"[",
"i",
"]",
"=",
"control_flow_ops",
".",
"ZerosLikeOutsideLoop",
"(",
"op",
",",
"i",
")",
"with",
"ops",
".",
"name_scope",
"(",
"op",
".",
"name",
"+",
"\"_grad\"",
")",
":",
"# pylint: disable=protected-access",
"with",
"ops",
".",
"get_default_graph",
"(",
")",
".",
"_original_op",
"(",
"op",
")",
":",
"# pylint: enable=protected-access",
"if",
"grad_fn",
":",
"# If grad_fn was found, do not use SymbolicGradient even for",
"# functions.",
"in_grads",
"=",
"_AsList",
"(",
"grad_fn",
"(",
"op",
",",
"*",
"out_grads",
")",
")",
"else",
":",
"# For function call ops, we add a 'SymbolicGradient'",
"# node to the graph to compute gradients.",
"f_in",
"=",
"[",
"x",
"for",
"x",
"in",
"op",
".",
"inputs",
"]",
"+",
"out_grads",
"f_types",
"=",
"[",
"x",
".",
"dtype",
"for",
"x",
"in",
"op",
".",
"inputs",
"]",
"# pylint: disable=protected-access",
"in_grads",
"=",
"_AsList",
"(",
"functional_ops",
".",
"_symbolic_gradient",
"(",
"f_in",
",",
"f_types",
",",
"op",
".",
"type",
")",
")",
"# pylint: enable=protected-access",
"_VerifyGeneratedGradients",
"(",
"in_grads",
",",
"op",
")",
"if",
"gate_gradients",
"and",
"len",
"(",
"[",
"x",
"for",
"x",
"in",
"in_grads",
"if",
"x",
"is",
"not",
"None",
"]",
")",
">",
"1",
":",
"in_grads",
"=",
"control_flow_ops",
".",
"tuple",
"(",
"in_grads",
")",
"_LogOpGradients",
"(",
"op",
",",
"out_grads",
",",
"in_grads",
")",
"else",
":",
"# If no grad_fn is defined or none of out_grads is available,",
"# just propagates a list of None backwards.",
"in_grads",
"=",
"[",
"None",
"]",
"*",
"len",
"(",
"op",
".",
"inputs",
")",
"for",
"t_in",
",",
"in_grad",
"in",
"zip",
"(",
"op",
".",
"inputs",
",",
"in_grads",
")",
":",
"if",
"in_grad",
"is",
"not",
"None",
":",
"if",
"isinstance",
"(",
"in_grad",
",",
"ops",
".",
"Tensor",
")",
":",
"in_grad",
".",
"set_shape",
"(",
"t_in",
".",
"get_shape",
"(",
")",
")",
"_SetGrad",
"(",
"grads",
",",
"t_in",
",",
"in_grad",
")",
"if",
"loop_state",
":",
"loop_state",
".",
"ExitGradWhileContext",
"(",
"op",
",",
"before",
"=",
"False",
")",
"# Update pending count for the inputs of op and enqueue ready ops.",
"_UpdatePendingAndEnqueueReady",
"(",
"grads",
",",
"op",
",",
"queue",
",",
"pending_count",
",",
"loop_state",
")",
"if",
"loop_state",
":",
"loop_state",
".",
"PostProcessing",
"(",
")",
"return",
"[",
"_GetGrad",
"(",
"grads",
",",
"x",
")",
"for",
"x",
"in",
"xs",
"]"
] |
https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/python/ops/gradients.py#L307-L501
|
|
Xilinx/Vitis-AI
|
fc74d404563d9951b57245443c73bef389f3657f
|
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/eager/python/examples/rnn_ptb/rnn_ptb.py
|
python
|
Datasets.__init__
|
(self, path)
|
Load the Penn Treebank dataset.
Args:
path: Path to the data/ directory of the dataset from Tomas Mikolov's
webpage - http://www.fit.vutbr.cz/~imikolov/rnnlm/simple-examples.tgz
|
Load the Penn Treebank dataset.
|
[
"Load",
"the",
"Penn",
"Treebank",
"dataset",
"."
] |
def __init__(self, path):
"""Load the Penn Treebank dataset.
Args:
path: Path to the data/ directory of the dataset from Tomas Mikolov's
webpage - http://www.fit.vutbr.cz/~imikolov/rnnlm/simple-examples.tgz
"""
self.word2idx = {} # string -> integer id
self.idx2word = [] # integer id -> word string
# Files represented as a list of integer ids (as opposed to list of string
# words).
self.train = self.tokenize(os.path.join(path, "ptb.train.txt"))
self.valid = self.tokenize(os.path.join(path, "ptb.valid.txt"))
|
[
"def",
"__init__",
"(",
"self",
",",
"path",
")",
":",
"self",
".",
"word2idx",
"=",
"{",
"}",
"# string -> integer id",
"self",
".",
"idx2word",
"=",
"[",
"]",
"# integer id -> word string",
"# Files represented as a list of integer ids (as opposed to list of string",
"# words).",
"self",
".",
"train",
"=",
"self",
".",
"tokenize",
"(",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"\"ptb.train.txt\"",
")",
")",
"self",
".",
"valid",
"=",
"self",
".",
"tokenize",
"(",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"\"ptb.valid.txt\"",
")",
")"
] |
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/eager/python/examples/rnn_ptb/rnn_ptb.py#L219-L232
|
||
metashell/metashell
|
f4177e4854ea00c8dbc722cadab26ef413d798ea
|
3rd/templight/clang/utils/check_cfc/obj_diff.py
|
python
|
dump_debug
|
(objfile)
|
return [line for line in out.split(os.linesep) if keep_line(line)]
|
Dump all of the debug info from a file.
|
Dump all of the debug info from a file.
|
[
"Dump",
"all",
"of",
"the",
"debug",
"info",
"from",
"a",
"file",
"."
] |
def dump_debug(objfile):
"""Dump all of the debug info from a file."""
p = subprocess.Popen([disassembler, '-WliaprmfsoRt', objfile], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = p.communicate()
if p.returncode or err:
print("Dump debug failed: {}".format(objfile))
sys.exit(1)
return [line for line in out.split(os.linesep) if keep_line(line)]
|
[
"def",
"dump_debug",
"(",
"objfile",
")",
":",
"p",
"=",
"subprocess",
".",
"Popen",
"(",
"[",
"disassembler",
",",
"'-WliaprmfsoRt'",
",",
"objfile",
"]",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"PIPE",
")",
"(",
"out",
",",
"err",
")",
"=",
"p",
".",
"communicate",
"(",
")",
"if",
"p",
".",
"returncode",
"or",
"err",
":",
"print",
"(",
"\"Dump debug failed: {}\"",
".",
"format",
"(",
"objfile",
")",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"return",
"[",
"line",
"for",
"line",
"in",
"out",
".",
"split",
"(",
"os",
".",
"linesep",
")",
"if",
"keep_line",
"(",
"line",
")",
"]"
] |
https://github.com/metashell/metashell/blob/f4177e4854ea00c8dbc722cadab26ef413d798ea/3rd/templight/clang/utils/check_cfc/obj_diff.py#L30-L37
|
|
plumonito/dtslam
|
5994bb9cf7a11981b830370db206bceb654c085d
|
3rdparty/opencv-git/3rdparty/jinja2/runtime.py
|
python
|
Context.get_all
|
(self)
|
return dict(self.parent, **self.vars)
|
Return a copy of the complete context as dict including the
exported variables.
|
Return a copy of the complete context as dict including the
exported variables.
|
[
"Return",
"a",
"copy",
"of",
"the",
"complete",
"context",
"as",
"dict",
"including",
"the",
"exported",
"variables",
"."
] |
def get_all(self):
"""Return a copy of the complete context as dict including the
exported variables.
"""
return dict(self.parent, **self.vars)
|
[
"def",
"get_all",
"(",
"self",
")",
":",
"return",
"dict",
"(",
"self",
".",
"parent",
",",
"*",
"*",
"self",
".",
"vars",
")"
] |
https://github.com/plumonito/dtslam/blob/5994bb9cf7a11981b830370db206bceb654c085d/3rdparty/opencv-git/3rdparty/jinja2/runtime.py#L160-L164
|
|
openmm/openmm
|
cb293447c4fc8b03976dfe11399f107bab70f3d9
|
wrappers/python/openmm/app/desmonddmsfile.py
|
python
|
DesmondDMSFile._readSchemas
|
(self, conn)
|
return tables
|
Read and return the schemas of each of the tables in the dms file connection 'conn
|
Read and return the schemas of each of the tables in the dms file connection 'conn
|
[
"Read",
"and",
"return",
"the",
"schemas",
"of",
"each",
"of",
"the",
"tables",
"in",
"the",
"dms",
"file",
"connection",
"conn"
] |
def _readSchemas(self, conn):
"""Read and return the schemas of each of the tables in the dms file connection 'conn'"""
tables = {}
for table in conn.execute("SELECT name FROM sqlite_master WHERE type='table'"):
names = []
for e in conn.execute('PRAGMA table_info(%s)' % table):
names.append(str(e[1]))
tables[str(table[0])] = names
return tables
|
[
"def",
"_readSchemas",
"(",
"self",
",",
"conn",
")",
":",
"tables",
"=",
"{",
"}",
"for",
"table",
"in",
"conn",
".",
"execute",
"(",
"\"SELECT name FROM sqlite_master WHERE type='table'\"",
")",
":",
"names",
"=",
"[",
"]",
"for",
"e",
"in",
"conn",
".",
"execute",
"(",
"'PRAGMA table_info(%s)'",
"%",
"table",
")",
":",
"names",
".",
"append",
"(",
"str",
"(",
"e",
"[",
"1",
"]",
")",
")",
"tables",
"[",
"str",
"(",
"table",
"[",
"0",
"]",
")",
"]",
"=",
"names",
"return",
"tables"
] |
https://github.com/openmm/openmm/blob/cb293447c4fc8b03976dfe11399f107bab70f3d9/wrappers/python/openmm/app/desmonddmsfile.py#L871-L879
|
|
CRYTEK/CRYENGINE
|
232227c59a220cbbd311576f0fbeba7bb53b2a8c
|
Editor/Python/windows/Lib/site-packages/pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py
|
python
|
OrderedDict.itervalues
|
(self)
|
od.itervalues -> an iterator over the values in od
|
od.itervalues -> an iterator over the values in od
|
[
"od",
".",
"itervalues",
"-",
">",
"an",
"iterator",
"over",
"the",
"values",
"in",
"od"
] |
def itervalues(self):
'od.itervalues -> an iterator over the values in od'
for k in self:
yield self[k]
|
[
"def",
"itervalues",
"(",
"self",
")",
":",
"for",
"k",
"in",
"self",
":",
"yield",
"self",
"[",
"k",
"]"
] |
https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Editor/Python/windows/Lib/site-packages/pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py#L132-L135
|
||
Xilinx/Vitis-AI
|
fc74d404563d9951b57245443c73bef389f3657f
|
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/seq2seq/python/ops/helper.py
|
python
|
ScheduledEmbeddingTrainingHelper.__init__
|
(self, inputs, sequence_length, embedding, sampling_probability,
time_major=False, seed=None, scheduling_seed=None, name=None)
|
Initializer.
Args:
inputs: A (structure of) input tensors.
sequence_length: An int32 vector tensor.
embedding: A callable that takes a vector tensor of `ids` (argmax ids),
or the `params` argument for `embedding_lookup`.
sampling_probability: A 0D `float32` tensor: the probability of sampling
categorically from the output ids instead of reading directly from the
inputs.
time_major: Python bool. Whether the tensors in `inputs` are time major.
If `False` (default), they are assumed to be batch major.
seed: The sampling seed.
scheduling_seed: The schedule decision rule sampling seed.
name: Name scope for any created operations.
Raises:
ValueError: if `sampling_probability` is not a scalar or vector.
|
Initializer.
|
[
"Initializer",
"."
] |
def __init__(self, inputs, sequence_length, embedding, sampling_probability,
time_major=False, seed=None, scheduling_seed=None, name=None):
"""Initializer.
Args:
inputs: A (structure of) input tensors.
sequence_length: An int32 vector tensor.
embedding: A callable that takes a vector tensor of `ids` (argmax ids),
or the `params` argument for `embedding_lookup`.
sampling_probability: A 0D `float32` tensor: the probability of sampling
categorically from the output ids instead of reading directly from the
inputs.
time_major: Python bool. Whether the tensors in `inputs` are time major.
If `False` (default), they are assumed to be batch major.
seed: The sampling seed.
scheduling_seed: The schedule decision rule sampling seed.
name: Name scope for any created operations.
Raises:
ValueError: if `sampling_probability` is not a scalar or vector.
"""
with ops.name_scope(name, "ScheduledEmbeddingSamplingWrapper",
[embedding, sampling_probability]):
if callable(embedding):
self._embedding_fn = embedding
else:
self._embedding_fn = (
lambda ids: embedding_ops.embedding_lookup(embedding, ids))
self._sampling_probability = ops.convert_to_tensor(
sampling_probability, name="sampling_probability")
if self._sampling_probability.get_shape().ndims not in (0, 1):
raise ValueError(
"sampling_probability must be either a scalar or a vector. "
"saw shape: %s" % (self._sampling_probability.get_shape()))
self._seed = seed
self._scheduling_seed = scheduling_seed
super(ScheduledEmbeddingTrainingHelper, self).__init__(
inputs=inputs,
sequence_length=sequence_length,
time_major=time_major,
name=name)
|
[
"def",
"__init__",
"(",
"self",
",",
"inputs",
",",
"sequence_length",
",",
"embedding",
",",
"sampling_probability",
",",
"time_major",
"=",
"False",
",",
"seed",
"=",
"None",
",",
"scheduling_seed",
"=",
"None",
",",
"name",
"=",
"None",
")",
":",
"with",
"ops",
".",
"name_scope",
"(",
"name",
",",
"\"ScheduledEmbeddingSamplingWrapper\"",
",",
"[",
"embedding",
",",
"sampling_probability",
"]",
")",
":",
"if",
"callable",
"(",
"embedding",
")",
":",
"self",
".",
"_embedding_fn",
"=",
"embedding",
"else",
":",
"self",
".",
"_embedding_fn",
"=",
"(",
"lambda",
"ids",
":",
"embedding_ops",
".",
"embedding_lookup",
"(",
"embedding",
",",
"ids",
")",
")",
"self",
".",
"_sampling_probability",
"=",
"ops",
".",
"convert_to_tensor",
"(",
"sampling_probability",
",",
"name",
"=",
"\"sampling_probability\"",
")",
"if",
"self",
".",
"_sampling_probability",
".",
"get_shape",
"(",
")",
".",
"ndims",
"not",
"in",
"(",
"0",
",",
"1",
")",
":",
"raise",
"ValueError",
"(",
"\"sampling_probability must be either a scalar or a vector. \"",
"\"saw shape: %s\"",
"%",
"(",
"self",
".",
"_sampling_probability",
".",
"get_shape",
"(",
")",
")",
")",
"self",
".",
"_seed",
"=",
"seed",
"self",
".",
"_scheduling_seed",
"=",
"scheduling_seed",
"super",
"(",
"ScheduledEmbeddingTrainingHelper",
",",
"self",
")",
".",
"__init__",
"(",
"inputs",
"=",
"inputs",
",",
"sequence_length",
"=",
"sequence_length",
",",
"time_major",
"=",
"time_major",
",",
"name",
"=",
"name",
")"
] |
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/seq2seq/python/ops/helper.py#L322-L362
|
||
catboost/catboost
|
167f64f237114a4d10b2b4ee42adb4569137debe
|
contrib/python/setuptools/py3/pkg_resources/_vendor/pyparsing.py
|
python
|
ParserElement.__xor__
|
(self, other )
|
return Or( [ self, other ] )
|
Implementation of ^ operator - returns C{L{Or}}
|
Implementation of ^ operator - returns C{L{Or}}
|
[
"Implementation",
"of",
"^",
"operator",
"-",
"returns",
"C",
"{",
"L",
"{",
"Or",
"}}"
] |
def __xor__(self, other ):
"""
Implementation of ^ operator - returns C{L{Or}}
"""
if isinstance( other, basestring ):
other = ParserElement._literalStringClass( other )
if not isinstance( other, ParserElement ):
warnings.warn("Cannot combine element of type %s with ParserElement" % type(other),
SyntaxWarning, stacklevel=2)
return None
return Or( [ self, other ] )
|
[
"def",
"__xor__",
"(",
"self",
",",
"other",
")",
":",
"if",
"isinstance",
"(",
"other",
",",
"basestring",
")",
":",
"other",
"=",
"ParserElement",
".",
"_literalStringClass",
"(",
"other",
")",
"if",
"not",
"isinstance",
"(",
"other",
",",
"ParserElement",
")",
":",
"warnings",
".",
"warn",
"(",
"\"Cannot combine element of type %s with ParserElement\"",
"%",
"type",
"(",
"other",
")",
",",
"SyntaxWarning",
",",
"stacklevel",
"=",
"2",
")",
"return",
"None",
"return",
"Or",
"(",
"[",
"self",
",",
"other",
"]",
")"
] |
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/setuptools/py3/pkg_resources/_vendor/pyparsing.py#L1972-L1982
|
|
mindspore-ai/mindspore
|
fb8fd3338605bb34fa5cea054e535a8b1d753fab
|
mindspore/python/mindspore/ops/_op_impl/tbe/unsorted_segment_max.py
|
python
|
_unsorted_segment_max_tbe
|
()
|
return
|
UnsortedSegmentMax TBE register
|
UnsortedSegmentMax TBE register
|
[
"UnsortedSegmentMax",
"TBE",
"register"
] |
def _unsorted_segment_max_tbe():
"""UnsortedSegmentMax TBE register"""
return
|
[
"def",
"_unsorted_segment_max_tbe",
"(",
")",
":",
"return"
] |
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/ops/_op_impl/tbe/unsorted_segment_max.py#L47-L49
|
|
eerolanguage/clang
|
91360bee004a1cbdb95fe5eb605ef243152da41b
|
docs/tools/dump_ast_matchers.py
|
python
|
add_matcher
|
(result_type, name, args, comment, is_dyncast=False)
|
Adds a matcher to one of our categories.
|
Adds a matcher to one of our categories.
|
[
"Adds",
"a",
"matcher",
"to",
"one",
"of",
"our",
"categories",
"."
] |
def add_matcher(result_type, name, args, comment, is_dyncast=False):
"""Adds a matcher to one of our categories."""
if name == 'id':
# FIXME: Figure out whether we want to support the 'id' matcher.
return
matcher_id = '%s%d' % (name, ids[name])
ids[name] += 1
args = unify_arguments(args)
matcher_html = TD_TEMPLATE % {
'result': esc('Matcher<%s>' % result_type),
'name': name,
'args': esc(args),
'comment': esc(strip_doxygen(comment)),
'id': matcher_id,
}
if is_dyncast:
node_matchers[result_type + name] = matcher_html
# Use a heuristic to figure out whether a matcher is a narrowing or
# traversal matcher. By default, matchers that take other matchers as
# arguments (and are not node matchers) do traversal. We specifically
# exclude known narrowing matchers that also take other matchers as
# arguments.
elif ('Matcher<' not in args or
name in ['allOf', 'anyOf', 'anything', 'unless']):
narrowing_matchers[result_type + name] = matcher_html
else:
traversal_matchers[result_type + name] = matcher_html
|
[
"def",
"add_matcher",
"(",
"result_type",
",",
"name",
",",
"args",
",",
"comment",
",",
"is_dyncast",
"=",
"False",
")",
":",
"if",
"name",
"==",
"'id'",
":",
"# FIXME: Figure out whether we want to support the 'id' matcher.",
"return",
"matcher_id",
"=",
"'%s%d'",
"%",
"(",
"name",
",",
"ids",
"[",
"name",
"]",
")",
"ids",
"[",
"name",
"]",
"+=",
"1",
"args",
"=",
"unify_arguments",
"(",
"args",
")",
"matcher_html",
"=",
"TD_TEMPLATE",
"%",
"{",
"'result'",
":",
"esc",
"(",
"'Matcher<%s>'",
"%",
"result_type",
")",
",",
"'name'",
":",
"name",
",",
"'args'",
":",
"esc",
"(",
"args",
")",
",",
"'comment'",
":",
"esc",
"(",
"strip_doxygen",
"(",
"comment",
")",
")",
",",
"'id'",
":",
"matcher_id",
",",
"}",
"if",
"is_dyncast",
":",
"node_matchers",
"[",
"result_type",
"+",
"name",
"]",
"=",
"matcher_html",
"# Use a heuristic to figure out whether a matcher is a narrowing or",
"# traversal matcher. By default, matchers that take other matchers as",
"# arguments (and are not node matchers) do traversal. We specifically",
"# exclude known narrowing matchers that also take other matchers as",
"# arguments.",
"elif",
"(",
"'Matcher<'",
"not",
"in",
"args",
"or",
"name",
"in",
"[",
"'allOf'",
",",
"'anyOf'",
",",
"'anything'",
",",
"'unless'",
"]",
")",
":",
"narrowing_matchers",
"[",
"result_type",
"+",
"name",
"]",
"=",
"matcher_html",
"else",
":",
"traversal_matchers",
"[",
"result_type",
"+",
"name",
"]",
"=",
"matcher_html"
] |
https://github.com/eerolanguage/clang/blob/91360bee004a1cbdb95fe5eb605ef243152da41b/docs/tools/dump_ast_matchers.py#L98-L124
|
||
miyosuda/TensorFlowAndroidDemo
|
35903e0221aa5f109ea2dbef27f20b52e317f42d
|
jni-build/jni/include/tensorflow/python/framework/ops.py
|
python
|
Operation.graph
|
(self)
|
return self._graph
|
The `Graph` that contains this operation.
|
The `Graph` that contains this operation.
|
[
"The",
"Graph",
"that",
"contains",
"this",
"operation",
"."
] |
def graph(self):
"""The `Graph` that contains this operation."""
return self._graph
|
[
"def",
"graph",
"(",
"self",
")",
":",
"return",
"self",
".",
"_graph"
] |
https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/python/framework/ops.py#L1481-L1483
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/core/generic.py
|
python
|
NDFrame.reindex_like
|
(
self: FrameOrSeries,
other,
method: Optional[str] = None,
copy: bool_t = True,
limit=None,
tolerance=None,
)
|
return self.reindex(**d)
|
Return an object with matching indices as other object.
Conform the object to the same index on all axes. Optional
filling logic, placing NaN in locations having no value
in the previous index. A new object is produced unless the
new index is equivalent to the current one and copy=False.
Parameters
----------
other : Object of the same data type
Its row and column indices are used to define the new indices
of this object.
method : {None, 'backfill'/'bfill', 'pad'/'ffill', 'nearest'}
Method to use for filling holes in reindexed DataFrame.
Please note: this is only applicable to DataFrames/Series with a
monotonically increasing/decreasing index.
* None (default): don't fill gaps
* pad / ffill: propagate last valid observation forward to next
valid
* backfill / bfill: use next valid observation to fill gap
* nearest: use nearest valid observations to fill gap.
copy : bool, default True
Return a new object, even if the passed indexes are the same.
limit : int, default None
Maximum number of consecutive labels to fill for inexact matches.
tolerance : optional
Maximum distance between original and new labels for inexact
matches. The values of the index at the matching locations most
satisfy the equation ``abs(index[indexer] - target) <= tolerance``.
Tolerance may be a scalar value, which applies the same tolerance
to all values, or list-like, which applies variable tolerance per
element. List-like includes list, tuple, array, Series, and must be
the same size as the index and its dtype must exactly match the
index's type.
.. versionadded:: 0.21.0 (list-like tolerance)
Returns
-------
Series or DataFrame
Same type as caller, but with changed indices on each axis.
See Also
--------
DataFrame.set_index : Set row labels.
DataFrame.reset_index : Remove row labels or move them to new columns.
DataFrame.reindex : Change to new indices or expand indices.
Notes
-----
Same as calling
``.reindex(index=other.index, columns=other.columns,...)``.
Examples
--------
>>> df1 = pd.DataFrame([[24.3, 75.7, 'high'],
... [31, 87.8, 'high'],
... [22, 71.6, 'medium'],
... [35, 95, 'medium']],
... columns=['temp_celsius', 'temp_fahrenheit',
... 'windspeed'],
... index=pd.date_range(start='2014-02-12',
... end='2014-02-15', freq='D'))
>>> df1
temp_celsius temp_fahrenheit windspeed
2014-02-12 24.3 75.7 high
2014-02-13 31.0 87.8 high
2014-02-14 22.0 71.6 medium
2014-02-15 35.0 95.0 medium
>>> df2 = pd.DataFrame([[28, 'low'],
... [30, 'low'],
... [35.1, 'medium']],
... columns=['temp_celsius', 'windspeed'],
... index=pd.DatetimeIndex(['2014-02-12', '2014-02-13',
... '2014-02-15']))
>>> df2
temp_celsius windspeed
2014-02-12 28.0 low
2014-02-13 30.0 low
2014-02-15 35.1 medium
>>> df2.reindex_like(df1)
temp_celsius temp_fahrenheit windspeed
2014-02-12 28.0 NaN low
2014-02-13 30.0 NaN low
2014-02-14 NaN NaN NaN
2014-02-15 35.1 NaN medium
|
Return an object with matching indices as other object.
|
[
"Return",
"an",
"object",
"with",
"matching",
"indices",
"as",
"other",
"object",
"."
] |
def reindex_like(
self: FrameOrSeries,
other,
method: Optional[str] = None,
copy: bool_t = True,
limit=None,
tolerance=None,
) -> FrameOrSeries:
"""
Return an object with matching indices as other object.
Conform the object to the same index on all axes. Optional
filling logic, placing NaN in locations having no value
in the previous index. A new object is produced unless the
new index is equivalent to the current one and copy=False.
Parameters
----------
other : Object of the same data type
Its row and column indices are used to define the new indices
of this object.
method : {None, 'backfill'/'bfill', 'pad'/'ffill', 'nearest'}
Method to use for filling holes in reindexed DataFrame.
Please note: this is only applicable to DataFrames/Series with a
monotonically increasing/decreasing index.
* None (default): don't fill gaps
* pad / ffill: propagate last valid observation forward to next
valid
* backfill / bfill: use next valid observation to fill gap
* nearest: use nearest valid observations to fill gap.
copy : bool, default True
Return a new object, even if the passed indexes are the same.
limit : int, default None
Maximum number of consecutive labels to fill for inexact matches.
tolerance : optional
Maximum distance between original and new labels for inexact
matches. The values of the index at the matching locations most
satisfy the equation ``abs(index[indexer] - target) <= tolerance``.
Tolerance may be a scalar value, which applies the same tolerance
to all values, or list-like, which applies variable tolerance per
element. List-like includes list, tuple, array, Series, and must be
the same size as the index and its dtype must exactly match the
index's type.
.. versionadded:: 0.21.0 (list-like tolerance)
Returns
-------
Series or DataFrame
Same type as caller, but with changed indices on each axis.
See Also
--------
DataFrame.set_index : Set row labels.
DataFrame.reset_index : Remove row labels or move them to new columns.
DataFrame.reindex : Change to new indices or expand indices.
Notes
-----
Same as calling
``.reindex(index=other.index, columns=other.columns,...)``.
Examples
--------
>>> df1 = pd.DataFrame([[24.3, 75.7, 'high'],
... [31, 87.8, 'high'],
... [22, 71.6, 'medium'],
... [35, 95, 'medium']],
... columns=['temp_celsius', 'temp_fahrenheit',
... 'windspeed'],
... index=pd.date_range(start='2014-02-12',
... end='2014-02-15', freq='D'))
>>> df1
temp_celsius temp_fahrenheit windspeed
2014-02-12 24.3 75.7 high
2014-02-13 31.0 87.8 high
2014-02-14 22.0 71.6 medium
2014-02-15 35.0 95.0 medium
>>> df2 = pd.DataFrame([[28, 'low'],
... [30, 'low'],
... [35.1, 'medium']],
... columns=['temp_celsius', 'windspeed'],
... index=pd.DatetimeIndex(['2014-02-12', '2014-02-13',
... '2014-02-15']))
>>> df2
temp_celsius windspeed
2014-02-12 28.0 low
2014-02-13 30.0 low
2014-02-15 35.1 medium
>>> df2.reindex_like(df1)
temp_celsius temp_fahrenheit windspeed
2014-02-12 28.0 NaN low
2014-02-13 30.0 NaN low
2014-02-14 NaN NaN NaN
2014-02-15 35.1 NaN medium
"""
d = other._construct_axes_dict(
axes=self._AXIS_ORDERS,
method=method,
copy=copy,
limit=limit,
tolerance=tolerance,
)
return self.reindex(**d)
|
[
"def",
"reindex_like",
"(",
"self",
":",
"FrameOrSeries",
",",
"other",
",",
"method",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"copy",
":",
"bool_t",
"=",
"True",
",",
"limit",
"=",
"None",
",",
"tolerance",
"=",
"None",
",",
")",
"->",
"FrameOrSeries",
":",
"d",
"=",
"other",
".",
"_construct_axes_dict",
"(",
"axes",
"=",
"self",
".",
"_AXIS_ORDERS",
",",
"method",
"=",
"method",
",",
"copy",
"=",
"copy",
",",
"limit",
"=",
"limit",
",",
"tolerance",
"=",
"tolerance",
",",
")",
"return",
"self",
".",
"reindex",
"(",
"*",
"*",
"d",
")"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/pandas/core/generic.py#L3794-L3905
|
|
aws/lumberyard
|
f85344403c1c2e77ec8c75deb2c116e97b713217
|
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/stat.py
|
python
|
S_ISCHR
|
(mode)
|
return S_IFMT(mode) == S_IFCHR
|
Return True if mode is from a character special device file.
|
Return True if mode is from a character special device file.
|
[
"Return",
"True",
"if",
"mode",
"is",
"from",
"a",
"character",
"special",
"device",
"file",
"."
] |
def S_ISCHR(mode):
"""Return True if mode is from a character special device file."""
return S_IFMT(mode) == S_IFCHR
|
[
"def",
"S_ISCHR",
"(",
"mode",
")",
":",
"return",
"S_IFMT",
"(",
"mode",
")",
"==",
"S_IFCHR"
] |
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/stat.py#L54-L56
|
|
NervanaSystems/ngraph
|
f677a119765ca30636cf407009dabd118664951f
|
python/src/ngraph/runtime.py
|
python
|
Computation.serialize
|
(self, indent: int = 0)
|
return serialize(self.function, indent)
|
Serialize function (compute graph) to a JSON string.
:param indent: set indent of serialized output
:return: serialized model
|
Serialize function (compute graph) to a JSON string.
|
[
"Serialize",
"function",
"(",
"compute",
"graph",
")",
"to",
"a",
"JSON",
"string",
"."
] |
def serialize(self, indent: int = 0) -> str:
"""Serialize function (compute graph) to a JSON string.
:param indent: set indent of serialized output
:return: serialized model
"""
return serialize(self.function, indent)
|
[
"def",
"serialize",
"(",
"self",
",",
"indent",
":",
"int",
"=",
"0",
")",
"->",
"str",
":",
"return",
"serialize",
"(",
"self",
".",
"function",
",",
"indent",
")"
] |
https://github.com/NervanaSystems/ngraph/blob/f677a119765ca30636cf407009dabd118664951f/python/src/ngraph/runtime.py#L127-L133
|
|
forkineye/ESPixelStick
|
22926f1c0d1131f1369fc7cad405689a095ae3cb
|
dist/bin/esptool/esptool.py
|
python
|
ESPLoader.connect
|
(self, mode='default_reset')
|
Try connecting repeatedly until successful, or giving up
|
Try connecting repeatedly until successful, or giving up
|
[
"Try",
"connecting",
"repeatedly",
"until",
"successful",
"or",
"giving",
"up"
] |
def connect(self, mode='default_reset'):
""" Try connecting repeatedly until successful, or giving up """
print('Connecting...', end='')
sys.stdout.flush()
last_error = None
try:
for _ in range(7):
last_error = self._connect_attempt(mode=mode, esp32r0_delay=False)
if last_error is None:
return
last_error = self._connect_attempt(mode=mode, esp32r0_delay=True)
if last_error is None:
return
finally:
print('') # end 'Connecting...' line
raise FatalError('Failed to connect to %s: %s' % (self.CHIP_NAME, last_error))
|
[
"def",
"connect",
"(",
"self",
",",
"mode",
"=",
"'default_reset'",
")",
":",
"print",
"(",
"'Connecting...'",
",",
"end",
"=",
"''",
")",
"sys",
".",
"stdout",
".",
"flush",
"(",
")",
"last_error",
"=",
"None",
"try",
":",
"for",
"_",
"in",
"range",
"(",
"7",
")",
":",
"last_error",
"=",
"self",
".",
"_connect_attempt",
"(",
"mode",
"=",
"mode",
",",
"esp32r0_delay",
"=",
"False",
")",
"if",
"last_error",
"is",
"None",
":",
"return",
"last_error",
"=",
"self",
".",
"_connect_attempt",
"(",
"mode",
"=",
"mode",
",",
"esp32r0_delay",
"=",
"True",
")",
"if",
"last_error",
"is",
"None",
":",
"return",
"finally",
":",
"print",
"(",
"''",
")",
"# end 'Connecting...' line",
"raise",
"FatalError",
"(",
"'Failed to connect to %s: %s'",
"%",
"(",
"self",
".",
"CHIP_NAME",
",",
"last_error",
")",
")"
] |
https://github.com/forkineye/ESPixelStick/blob/22926f1c0d1131f1369fc7cad405689a095ae3cb/dist/bin/esptool/esptool.py#L467-L483
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.