nwo
stringlengths
5
86
sha
stringlengths
40
40
path
stringlengths
4
189
language
stringclasses
1 value
identifier
stringlengths
1
94
parameters
stringlengths
2
4.03k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
11.5k
docstring
stringlengths
1
33.2k
docstring_summary
stringlengths
0
5.15k
docstring_tokens
sequence
function
stringlengths
34
151k
function_tokens
sequence
url
stringlengths
90
278
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/lib2to3/refactor.py
python
RefactoringTool.traverse_by
(self, fixers, traversal)
Traverse an AST, applying a set of fixers to each node. This is a helper method for refactor_tree(). Args: fixers: a list of fixer instances. traversal: a generator that yields AST nodes. Returns: None
Traverse an AST, applying a set of fixers to each node.
[ "Traverse", "an", "AST", "applying", "a", "set", "of", "fixers", "to", "each", "node", "." ]
def traverse_by(self, fixers, traversal): """Traverse an AST, applying a set of fixers to each node. This is a helper method for refactor_tree(). Args: fixers: a list of fixer instances. traversal: a generator that yields AST nodes. Returns: None """ if not fixers: return for node in traversal: for fixer in fixers[node.type]: results = fixer.match(node) if results: new = fixer.transform(node, results) if new is not None: node.replace(new) node = new
[ "def", "traverse_by", "(", "self", ",", "fixers", ",", "traversal", ")", ":", "if", "not", "fixers", ":", "return", "for", "node", "in", "traversal", ":", "for", "fixer", "in", "fixers", "[", "node", ".", "type", "]", ":", "results", "=", "fixer", ".", "match", "(", "node", ")", "if", "results", ":", "new", "=", "fixer", ".", "transform", "(", "node", ",", "results", ")", "if", "new", "is", "not", "None", ":", "node", ".", "replace", "(", "new", ")", "node", "=", "new" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/lib2to3/refactor.py#L484-L505
eclipse/sumo
7132a9b8b6eea734bdec38479026b4d8c4336d03
tools/contributed/sumopy/coremodules/misc/shapeformat.py
python
get_shapefile
(filepath)
return sf
Returns shapefile handler and proj4 parameter string of shape file with given path.
Returns shapefile handler and proj4 parameter string of shape file with given path.
[ "Returns", "shapefile", "handler", "and", "proj4", "parameter", "string", "of", "shape", "file", "with", "given", "path", "." ]
def get_shapefile(filepath): """ Returns shapefile handler and proj4 parameter string of shape file with given path. """ parts = os.path.basename(filepath).split('.') basename = '' for part in parts[:-1]: basename += part dirname = os.path.dirname(filepath) shapefilepath = os.path.join(dirname, basename) print 'import_shapefile *%s*' % (shapefilepath), type(str(shapefilepath)) sf = shapefile.Reader(str(shapefilepath)) return sf
[ "def", "get_shapefile", "(", "filepath", ")", ":", "parts", "=", "os", ".", "path", ".", "basename", "(", "filepath", ")", ".", "split", "(", "'.'", ")", "basename", "=", "''", "for", "part", "in", "parts", "[", ":", "-", "1", "]", ":", "basename", "+=", "part", "dirname", "=", "os", ".", "path", ".", "dirname", "(", "filepath", ")", "shapefilepath", "=", "os", ".", "path", ".", "join", "(", "dirname", ",", "basename", ")", "print", "'import_shapefile *%s*'", "%", "(", "shapefilepath", ")", ",", "type", "(", "str", "(", "shapefilepath", ")", ")", "sf", "=", "shapefile", ".", "Reader", "(", "str", "(", "shapefilepath", ")", ")", "return", "sf" ]
https://github.com/eclipse/sumo/blob/7132a9b8b6eea734bdec38479026b4d8c4336d03/tools/contributed/sumopy/coremodules/misc/shapeformat.py#L230-L247
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
contrib/gizmos/osx_cocoa/gizmos.py
python
TreeListCtrl.GetFirstChild
(*args, **kwargs)
return _gizmos.TreeListCtrl_GetFirstChild(*args, **kwargs)
GetFirstChild(self, TreeItemId item) -> PyObject
GetFirstChild(self, TreeItemId item) -> PyObject
[ "GetFirstChild", "(", "self", "TreeItemId", "item", ")", "-", ">", "PyObject" ]
def GetFirstChild(*args, **kwargs): """GetFirstChild(self, TreeItemId item) -> PyObject""" return _gizmos.TreeListCtrl_GetFirstChild(*args, **kwargs)
[ "def", "GetFirstChild", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_gizmos", ".", "TreeListCtrl_GetFirstChild", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/contrib/gizmos/osx_cocoa/gizmos.py#L770-L772
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/pandas/py3/pandas/io/formats/info.py
python
TableBuilderAbstract.display_memory_usage
(self)
return bool(self.info.memory_usage)
Whether to display memory usage.
Whether to display memory usage.
[ "Whether", "to", "display", "memory", "usage", "." ]
def display_memory_usage(self) -> bool: """Whether to display memory usage.""" return bool(self.info.memory_usage)
[ "def", "display_memory_usage", "(", "self", ")", "->", "bool", ":", "return", "bool", "(", "self", ".", "info", ".", "memory_usage", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py3/pandas/io/formats/info.py#L430-L432
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/difflib.py
python
SequenceMatcher.__init__
(self, isjunk=None, a='', b='', autojunk=True)
Construct a SequenceMatcher. Optional arg isjunk is None (the default), or a one-argument function that takes a sequence element and returns true iff the element is junk. None is equivalent to passing "lambda x: 0", i.e. no elements are considered to be junk. For example, pass lambda x: x in " \\t" if you're comparing lines as sequences of characters, and don't want to synch up on blanks or hard tabs. Optional arg a is the first of two sequences to be compared. By default, an empty string. The elements of a must be hashable. See also .set_seqs() and .set_seq1(). Optional arg b is the second of two sequences to be compared. By default, an empty string. The elements of b must be hashable. See also .set_seqs() and .set_seq2(). Optional arg autojunk should be set to False to disable the "automatic junk heuristic" that treats popular elements as junk (see module documentation for more information).
Construct a SequenceMatcher.
[ "Construct", "a", "SequenceMatcher", "." ]
def __init__(self, isjunk=None, a='', b='', autojunk=True): """Construct a SequenceMatcher. Optional arg isjunk is None (the default), or a one-argument function that takes a sequence element and returns true iff the element is junk. None is equivalent to passing "lambda x: 0", i.e. no elements are considered to be junk. For example, pass lambda x: x in " \\t" if you're comparing lines as sequences of characters, and don't want to synch up on blanks or hard tabs. Optional arg a is the first of two sequences to be compared. By default, an empty string. The elements of a must be hashable. See also .set_seqs() and .set_seq1(). Optional arg b is the second of two sequences to be compared. By default, an empty string. The elements of b must be hashable. See also .set_seqs() and .set_seq2(). Optional arg autojunk should be set to False to disable the "automatic junk heuristic" that treats popular elements as junk (see module documentation for more information). """ # Members: # a # first sequence # b # second sequence; differences are computed as "what do # we need to do to 'a' to change it into 'b'?" # b2j # for x in b, b2j[x] is a list of the indices (into b) # at which x appears; junk and popular elements do not appear # fullbcount # for x in b, fullbcount[x] == the number of times x # appears in b; only materialized if really needed (used # only for computing quick_ratio()) # matching_blocks # a list of (i, j, k) triples, where a[i:i+k] == b[j:j+k]; # ascending & non-overlapping in i and in j; terminated by # a dummy (len(a), len(b), 0) sentinel # opcodes # a list of (tag, i1, i2, j1, j2) tuples, where tag is # one of # 'replace' a[i1:i2] should be replaced by b[j1:j2] # 'delete' a[i1:i2] should be deleted # 'insert' b[j1:j2] should be inserted # 'equal' a[i1:i2] == b[j1:j2] # isjunk # a user-supplied function taking a sequence element and # returning true iff the element is "junk" -- this has # subtle but helpful effects on the algorithm, which I'll # get around to writing up someday <0.9 wink>. # DON'T USE! Only __chain_b uses this. Use "in self.bjunk". # bjunk # the items in b for which isjunk is True. # bpopular # nonjunk items in b treated as junk by the heuristic (if used). self.isjunk = isjunk self.a = self.b = None self.autojunk = autojunk self.set_seqs(a, b)
[ "def", "__init__", "(", "self", ",", "isjunk", "=", "None", ",", "a", "=", "''", ",", "b", "=", "''", ",", "autojunk", "=", "True", ")", ":", "# Members:", "# a", "# first sequence", "# b", "# second sequence; differences are computed as \"what do", "# we need to do to 'a' to change it into 'b'?\"", "# b2j", "# for x in b, b2j[x] is a list of the indices (into b)", "# at which x appears; junk and popular elements do not appear", "# fullbcount", "# for x in b, fullbcount[x] == the number of times x", "# appears in b; only materialized if really needed (used", "# only for computing quick_ratio())", "# matching_blocks", "# a list of (i, j, k) triples, where a[i:i+k] == b[j:j+k];", "# ascending & non-overlapping in i and in j; terminated by", "# a dummy (len(a), len(b), 0) sentinel", "# opcodes", "# a list of (tag, i1, i2, j1, j2) tuples, where tag is", "# one of", "# 'replace' a[i1:i2] should be replaced by b[j1:j2]", "# 'delete' a[i1:i2] should be deleted", "# 'insert' b[j1:j2] should be inserted", "# 'equal' a[i1:i2] == b[j1:j2]", "# isjunk", "# a user-supplied function taking a sequence element and", "# returning true iff the element is \"junk\" -- this has", "# subtle but helpful effects on the algorithm, which I'll", "# get around to writing up someday <0.9 wink>.", "# DON'T USE! Only __chain_b uses this. Use \"in self.bjunk\".", "# bjunk", "# the items in b for which isjunk is True.", "# bpopular", "# nonjunk items in b treated as junk by the heuristic (if used).", "self", ".", "isjunk", "=", "isjunk", "self", ".", "a", "=", "self", ".", "b", "=", "None", "self", ".", "autojunk", "=", "autojunk", "self", ".", "set_seqs", "(", "a", ",", "b", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/difflib.py#L151-L213
ltcmelo/psychec
46672204681d73b40772a7bf24137dca23175e81
cnippet/wrapper/Python/Version.py
python
Version.git_sha
()
return out[:7]
Get git HEAD's sha.
Get git HEAD's sha.
[ "Get", "git", "HEAD", "s", "sha", "." ]
def git_sha(): """ Get git HEAD's sha. """ cmd = ['git', 'rev-parse', 'HEAD'] process = subprocess.Popen(cmd, stdout=subprocess.PIPE) out, err = process.communicate() if err: sys.exit(DiagnosticReporter.fatal(ERROR_FETCHING_GIT_SHA)) return out[:7]
[ "def", "git_sha", "(", ")", ":", "cmd", "=", "[", "'git'", ",", "'rev-parse'", ",", "'HEAD'", "]", "process", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "stdout", "=", "subprocess", ".", "PIPE", ")", "out", ",", "err", "=", "process", ".", "communicate", "(", ")", "if", "err", ":", "sys", ".", "exit", "(", "DiagnosticReporter", ".", "fatal", "(", "ERROR_FETCHING_GIT_SHA", ")", ")", "return", "out", "[", ":", "7", "]" ]
https://github.com/ltcmelo/psychec/blob/46672204681d73b40772a7bf24137dca23175e81/cnippet/wrapper/Python/Version.py#L45-L55
alibaba/weex_js_engine
2bdf4b6f020c1fc99c63f649718f6faf7e27fdde
jni/v8core/v8/build/gyp/pylib/gyp/MSVSVersion.py
python
SelectVisualStudioVersion
(version='auto')
return versions[0]
Select which version of Visual Studio projects to generate. Arguments: version: Hook to allow caller to force a particular version (vs auto). Returns: An object representing a visual studio project format version.
Select which version of Visual Studio projects to generate.
[ "Select", "which", "version", "of", "Visual", "Studio", "projects", "to", "generate", "." ]
def SelectVisualStudioVersion(version='auto'): """Select which version of Visual Studio projects to generate. Arguments: version: Hook to allow caller to force a particular version (vs auto). Returns: An object representing a visual studio project format version. """ # In auto mode, check environment variable for override. if version == 'auto': version = os.environ.get('GYP_MSVS_VERSION', 'auto') version_map = { 'auto': ('10.0', '9.0', '8.0', '11.0'), '2005': ('8.0',), '2005e': ('8.0',), '2008': ('9.0',), '2008e': ('9.0',), '2010': ('10.0',), '2010e': ('10.0',), '2012': ('11.0',), '2012e': ('11.0',), } version = str(version) versions = _DetectVisualStudioVersions(version_map[version], 'e' in version) if not versions: if version == 'auto': # Default to 2005 if we couldn't find anything return _CreateVersion('2005', None) else: return _CreateVersion(version, None) return versions[0]
[ "def", "SelectVisualStudioVersion", "(", "version", "=", "'auto'", ")", ":", "# In auto mode, check environment variable for override.", "if", "version", "==", "'auto'", ":", "version", "=", "os", ".", "environ", ".", "get", "(", "'GYP_MSVS_VERSION'", ",", "'auto'", ")", "version_map", "=", "{", "'auto'", ":", "(", "'10.0'", ",", "'9.0'", ",", "'8.0'", ",", "'11.0'", ")", ",", "'2005'", ":", "(", "'8.0'", ",", ")", ",", "'2005e'", ":", "(", "'8.0'", ",", ")", ",", "'2008'", ":", "(", "'9.0'", ",", ")", ",", "'2008e'", ":", "(", "'9.0'", ",", ")", ",", "'2010'", ":", "(", "'10.0'", ",", ")", ",", "'2010e'", ":", "(", "'10.0'", ",", ")", ",", "'2012'", ":", "(", "'11.0'", ",", ")", ",", "'2012e'", ":", "(", "'11.0'", ",", ")", ",", "}", "version", "=", "str", "(", "version", ")", "versions", "=", "_DetectVisualStudioVersions", "(", "version_map", "[", "version", "]", ",", "'e'", "in", "version", ")", "if", "not", "versions", ":", "if", "version", "==", "'auto'", ":", "# Default to 2005 if we couldn't find anything", "return", "_CreateVersion", "(", "'2005'", ",", "None", ")", "else", ":", "return", "_CreateVersion", "(", "version", ",", "None", ")", "return", "versions", "[", "0", "]" ]
https://github.com/alibaba/weex_js_engine/blob/2bdf4b6f020c1fc99c63f649718f6faf7e27fdde/jni/v8core/v8/build/gyp/pylib/gyp/MSVSVersion.py#L336-L366
scribusproject/scribus
41ec7c775a060912cf251682a8b1437f753f80f4
scribus/plugins/scriptplugin_py2x/scripts/FontSample.py
python
Application.__aboutDlgShow
(self)
Brings up a dialog with support url etc.
Brings up a dialog with support url etc.
[ "Brings", "up", "a", "dialog", "with", "support", "url", "etc", "." ]
def __aboutDlgShow(self): """Brings up a dialog with support url etc.""" about = AboutDialog(self)
[ "def", "__aboutDlgShow", "(", "self", ")", ":", "about", "=", "AboutDialog", "(", "self", ")" ]
https://github.com/scribusproject/scribus/blob/41ec7c775a060912cf251682a8b1437f753f80f4/scribus/plugins/scriptplugin_py2x/scripts/FontSample.py#L1505-L1507
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/npdatetime.py
python
get_best_unit
(unit_a, unit_b)
return unit_a
Get the best (i.e. finer-grained) of two units.
Get the best (i.e. finer-grained) of two units.
[ "Get", "the", "best", "(", "i", ".", "e", ".", "finer", "-", "grained", ")", "of", "two", "units", "." ]
def get_best_unit(unit_a, unit_b): """ Get the best (i.e. finer-grained) of two units. """ a = DATETIME_UNITS[unit_a] b = DATETIME_UNITS[unit_b] if a == 14: return unit_b if b == 14: return unit_a if b > a: return unit_b return unit_a
[ "def", "get_best_unit", "(", "unit_a", ",", "unit_b", ")", ":", "a", "=", "DATETIME_UNITS", "[", "unit_a", "]", "b", "=", "DATETIME_UNITS", "[", "unit_b", "]", "if", "a", "==", "14", ":", "return", "unit_b", "if", "b", "==", "14", ":", "return", "unit_a", "if", "b", ">", "a", ":", "return", "unit_b", "return", "unit_a" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numba/npdatetime.py#L192-L204
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/ipython/py2/IPython/core/crashhandler.py
python
CrashHandler.__call__
(self, etype, evalue, etb)
Handle an exception, call for compatible with sys.excepthook
Handle an exception, call for compatible with sys.excepthook
[ "Handle", "an", "exception", "call", "for", "compatible", "with", "sys", ".", "excepthook" ]
def __call__(self, etype, evalue, etb): """Handle an exception, call for compatible with sys.excepthook""" # do not allow the crash handler to be called twice without reinstalling it # this prevents unlikely errors in the crash handling from entering an # infinite loop. sys.excepthook = sys.__excepthook__ # Report tracebacks shouldn't use color in general (safer for users) color_scheme = 'NoColor' # Use this ONLY for developer debugging (keep commented out for release) #color_scheme = 'Linux' # dbg try: rptdir = self.app.ipython_dir except: rptdir = getcwd() if rptdir is None or not os.path.isdir(rptdir): rptdir = getcwd() report_name = os.path.join(rptdir,self.crash_report_fname) # write the report filename into the instance dict so it can get # properly expanded out in the user message template self.crash_report_fname = report_name self.info['crash_report_fname'] = report_name TBhandler = ultratb.VerboseTB( color_scheme=color_scheme, long_header=1, call_pdb=self.call_pdb, ) if self.call_pdb: TBhandler(etype,evalue,etb) return else: traceback = TBhandler.text(etype,evalue,etb,context=31) # print traceback to screen if self.show_crash_traceback: print(traceback, file=sys.stderr) # and generate a complete report on disk try: report = open(report_name,'w') except: print('Could not create crash report on disk.', file=sys.stderr) return # Inform user on stderr of what happened print('\n'+'*'*70+'\n', file=sys.stderr) print(self.message_template.format(**self.info), file=sys.stderr) # Construct report on disk report.write(self.make_report(traceback)) report.close() input("Hit <Enter> to quit (your terminal may close):")
[ "def", "__call__", "(", "self", ",", "etype", ",", "evalue", ",", "etb", ")", ":", "# do not allow the crash handler to be called twice without reinstalling it", "# this prevents unlikely errors in the crash handling from entering an", "# infinite loop.", "sys", ".", "excepthook", "=", "sys", ".", "__excepthook__", "# Report tracebacks shouldn't use color in general (safer for users)", "color_scheme", "=", "'NoColor'", "# Use this ONLY for developer debugging (keep commented out for release)", "#color_scheme = 'Linux' # dbg", "try", ":", "rptdir", "=", "self", ".", "app", ".", "ipython_dir", "except", ":", "rptdir", "=", "getcwd", "(", ")", "if", "rptdir", "is", "None", "or", "not", "os", ".", "path", ".", "isdir", "(", "rptdir", ")", ":", "rptdir", "=", "getcwd", "(", ")", "report_name", "=", "os", ".", "path", ".", "join", "(", "rptdir", ",", "self", ".", "crash_report_fname", ")", "# write the report filename into the instance dict so it can get", "# properly expanded out in the user message template", "self", ".", "crash_report_fname", "=", "report_name", "self", ".", "info", "[", "'crash_report_fname'", "]", "=", "report_name", "TBhandler", "=", "ultratb", ".", "VerboseTB", "(", "color_scheme", "=", "color_scheme", ",", "long_header", "=", "1", ",", "call_pdb", "=", "self", ".", "call_pdb", ",", ")", "if", "self", ".", "call_pdb", ":", "TBhandler", "(", "etype", ",", "evalue", ",", "etb", ")", "return", "else", ":", "traceback", "=", "TBhandler", ".", "text", "(", "etype", ",", "evalue", ",", "etb", ",", "context", "=", "31", ")", "# print traceback to screen", "if", "self", ".", "show_crash_traceback", ":", "print", "(", "traceback", ",", "file", "=", "sys", ".", "stderr", ")", "# and generate a complete report on disk", "try", ":", "report", "=", "open", "(", "report_name", ",", "'w'", ")", "except", ":", "print", "(", "'Could not create crash report on disk.'", ",", "file", "=", "sys", ".", "stderr", ")", "return", "# Inform user on stderr of what happened", "print", "(", "'\\n'", "+", "'*'", "*", "70", "+", "'\\n'", ",", "file", "=", "sys", ".", "stderr", ")", "print", "(", "self", ".", "message_template", ".", "format", "(", "*", "*", "self", ".", "info", ")", ",", "file", "=", "sys", ".", "stderr", ")", "# Construct report on disk", "report", ".", "write", "(", "self", ".", "make_report", "(", "traceback", ")", ")", "report", ".", "close", "(", ")", "input", "(", "\"Hit <Enter> to quit (your terminal may close):\"", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/ipython/py2/IPython/core/crashhandler.py#L137-L190
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/gsutil/third_party/boto/boto/emr/connection.py
python
EmrConnection.list_bootstrap_actions
(self, cluster_id, marker=None)
return self.get_object('ListBootstrapActions', params, BootstrapActionList)
Get a list of bootstrap actions for an Elastic MapReduce cluster :type cluster_id: str :param cluster_id: The cluster id of interest :type marker: str :param marker: Pagination marker
Get a list of bootstrap actions for an Elastic MapReduce cluster
[ "Get", "a", "list", "of", "bootstrap", "actions", "for", "an", "Elastic", "MapReduce", "cluster" ]
def list_bootstrap_actions(self, cluster_id, marker=None): """ Get a list of bootstrap actions for an Elastic MapReduce cluster :type cluster_id: str :param cluster_id: The cluster id of interest :type marker: str :param marker: Pagination marker """ params = { 'ClusterId': cluster_id } if marker: params['Marker'] = marker return self.get_object('ListBootstrapActions', params, BootstrapActionList)
[ "def", "list_bootstrap_actions", "(", "self", ",", "cluster_id", ",", "marker", "=", "None", ")", ":", "params", "=", "{", "'ClusterId'", ":", "cluster_id", "}", "if", "marker", ":", "params", "[", "'Marker'", "]", "=", "marker", "return", "self", ".", "get_object", "(", "'ListBootstrapActions'", ",", "params", ",", "BootstrapActionList", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/boto/boto/emr/connection.py#L155-L171
lhmRyan/deep-supervised-hashing-DSH
631901f82e2ab031fbac33f914a5b08ef8e21d57
scripts/cpp_lint.py
python
FindNextMatchingAngleBracket
(clean_lines, linenum, init_suffix)
return True
Find the corresponding > to close a template. Args: clean_lines: A CleansedLines instance containing the file. linenum: Current line number. init_suffix: Remainder of the current line after the initial <. Returns: True if a matching bracket exists.
Find the corresponding > to close a template.
[ "Find", "the", "corresponding", ">", "to", "close", "a", "template", "." ]
def FindNextMatchingAngleBracket(clean_lines, linenum, init_suffix): """Find the corresponding > to close a template. Args: clean_lines: A CleansedLines instance containing the file. linenum: Current line number. init_suffix: Remainder of the current line after the initial <. Returns: True if a matching bracket exists. """ line = init_suffix nesting_stack = ['<'] while True: # Find the next operator that can tell us whether < is used as an # opening bracket or as a less-than operator. We only want to # warn on the latter case. # # We could also check all other operators and terminate the search # early, e.g. if we got something like this "a<b+c", the "<" is # most likely a less-than operator, but then we will get false # positives for default arguments and other template expressions. match = Search(r'^[^<>(),;\[\]]*([<>(),;\[\]])(.*)$', line) if match: # Found an operator, update nesting stack operator = match.group(1) line = match.group(2) if nesting_stack[-1] == '<': # Expecting closing angle bracket if operator in ('<', '(', '['): nesting_stack.append(operator) elif operator == '>': nesting_stack.pop() if not nesting_stack: # Found matching angle bracket return True elif operator == ',': # Got a comma after a bracket, this is most likely a template # argument. We have not seen a closing angle bracket yet, but # it's probably a few lines later if we look for it, so just # return early here. return True else: # Got some other operator. return False else: # Expecting closing parenthesis or closing bracket if operator in ('<', '(', '['): nesting_stack.append(operator) elif operator in (')', ']'): # We don't bother checking for matching () or []. If we got # something like (] or [), it would have been a syntax error. nesting_stack.pop() else: # Scan the next line linenum += 1 if linenum >= len(clean_lines.elided): break line = clean_lines.elided[linenum] # Exhausted all remaining lines and still no matching angle bracket. # Most likely the input was incomplete, otherwise we should have # seen a semicolon and returned early. return True
[ "def", "FindNextMatchingAngleBracket", "(", "clean_lines", ",", "linenum", ",", "init_suffix", ")", ":", "line", "=", "init_suffix", "nesting_stack", "=", "[", "'<'", "]", "while", "True", ":", "# Find the next operator that can tell us whether < is used as an", "# opening bracket or as a less-than operator. We only want to", "# warn on the latter case.", "#", "# We could also check all other operators and terminate the search", "# early, e.g. if we got something like this \"a<b+c\", the \"<\" is", "# most likely a less-than operator, but then we will get false", "# positives for default arguments and other template expressions.", "match", "=", "Search", "(", "r'^[^<>(),;\\[\\]]*([<>(),;\\[\\]])(.*)$'", ",", "line", ")", "if", "match", ":", "# Found an operator, update nesting stack", "operator", "=", "match", ".", "group", "(", "1", ")", "line", "=", "match", ".", "group", "(", "2", ")", "if", "nesting_stack", "[", "-", "1", "]", "==", "'<'", ":", "# Expecting closing angle bracket", "if", "operator", "in", "(", "'<'", ",", "'('", ",", "'['", ")", ":", "nesting_stack", ".", "append", "(", "operator", ")", "elif", "operator", "==", "'>'", ":", "nesting_stack", ".", "pop", "(", ")", "if", "not", "nesting_stack", ":", "# Found matching angle bracket", "return", "True", "elif", "operator", "==", "','", ":", "# Got a comma after a bracket, this is most likely a template", "# argument. We have not seen a closing angle bracket yet, but", "# it's probably a few lines later if we look for it, so just", "# return early here.", "return", "True", "else", ":", "# Got some other operator.", "return", "False", "else", ":", "# Expecting closing parenthesis or closing bracket", "if", "operator", "in", "(", "'<'", ",", "'('", ",", "'['", ")", ":", "nesting_stack", ".", "append", "(", "operator", ")", "elif", "operator", "in", "(", "')'", ",", "']'", ")", ":", "# We don't bother checking for matching () or []. If we got", "# something like (] or [), it would have been a syntax error.", "nesting_stack", ".", "pop", "(", ")", "else", ":", "# Scan the next line", "linenum", "+=", "1", "if", "linenum", ">=", "len", "(", "clean_lines", ".", "elided", ")", ":", "break", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "# Exhausted all remaining lines and still no matching angle bracket.", "# Most likely the input was incomplete, otherwise we should have", "# seen a semicolon and returned early.", "return", "True" ]
https://github.com/lhmRyan/deep-supervised-hashing-DSH/blob/631901f82e2ab031fbac33f914a5b08ef8e21d57/scripts/cpp_lint.py#L2517-L2583
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/stc.py
python
StyledTextCtrl.StyleGetEOLFilled
(*args, **kwargs)
return _stc.StyledTextCtrl_StyleGetEOLFilled(*args, **kwargs)
StyleGetEOLFilled(self, int style) -> bool Get is a style to have its end of line filled or not.
StyleGetEOLFilled(self, int style) -> bool
[ "StyleGetEOLFilled", "(", "self", "int", "style", ")", "-", ">", "bool" ]
def StyleGetEOLFilled(*args, **kwargs): """ StyleGetEOLFilled(self, int style) -> bool Get is a style to have its end of line filled or not. """ return _stc.StyledTextCtrl_StyleGetEOLFilled(*args, **kwargs)
[ "def", "StyleGetEOLFilled", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_stc", ".", "StyledTextCtrl_StyleGetEOLFilled", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/stc.py#L2634-L2640
ros-planning/moveit2
dd240ef6fd8b9932a7a53964140f2952786187a9
moveit_commander/src/moveit_commander/move_group.py
python
MoveGroupCommander.detach_object
(self, name="")
return self._g.detach_object(name)
Given the name of a link, detach the object(s) from that link. If no such link exists, the name is interpreted as an object name. If there is no name specified, an attempt is made to detach all objects attached to any link in the group.
Given the name of a link, detach the object(s) from that link. If no such link exists, the name is interpreted as an object name. If there is no name specified, an attempt is made to detach all objects attached to any link in the group.
[ "Given", "the", "name", "of", "a", "link", "detach", "the", "object", "(", "s", ")", "from", "that", "link", ".", "If", "no", "such", "link", "exists", "the", "name", "is", "interpreted", "as", "an", "object", "name", ".", "If", "there", "is", "no", "name", "specified", "an", "attempt", "is", "made", "to", "detach", "all", "objects", "attached", "to", "any", "link", "in", "the", "group", "." ]
def detach_object(self, name=""): """ Given the name of a link, detach the object(s) from that link. If no such link exists, the name is interpreted as an object name. If there is no name specified, an attempt is made to detach all objects attached to any link in the group.""" return self._g.detach_object(name)
[ "def", "detach_object", "(", "self", ",", "name", "=", "\"\"", ")", ":", "return", "self", ".", "_g", ".", "detach_object", "(", "name", ")" ]
https://github.com/ros-planning/moveit2/blob/dd240ef6fd8b9932a7a53964140f2952786187a9/moveit_commander/src/moveit_commander/move_group.py#L693-L695
macchina-io/macchina.io
ef24ba0e18379c3dd48fb84e6dbf991101cb8db0
platform/JS/V8/tools/gyp/pylib/gyp/generator/ninja.py
python
QuoteShellArgument
(arg, flavor)
return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'"
Quote a string such that it will be interpreted as a single argument by the shell.
Quote a string such that it will be interpreted as a single argument by the shell.
[ "Quote", "a", "string", "such", "that", "it", "will", "be", "interpreted", "as", "a", "single", "argument", "by", "the", "shell", "." ]
def QuoteShellArgument(arg, flavor): """Quote a string such that it will be interpreted as a single argument by the shell.""" # Rather than attempting to enumerate the bad shell characters, just # whitelist common OK ones and quote anything else. if re.match(r'^[a-zA-Z0-9_=.\\/-]+$', arg): return arg # No quoting necessary. if flavor == 'win': return gyp.msvs_emulation.QuoteForRspFile(arg) return "'" + arg.replace("'", "'" + '"\'"' + "'") + "'"
[ "def", "QuoteShellArgument", "(", "arg", ",", "flavor", ")", ":", "# Rather than attempting to enumerate the bad shell characters, just", "# whitelist common OK ones and quote anything else.", "if", "re", ".", "match", "(", "r'^[a-zA-Z0-9_=.\\\\/-]+$'", ",", "arg", ")", ":", "return", "arg", "# No quoting necessary.", "if", "flavor", "==", "'win'", ":", "return", "gyp", ".", "msvs_emulation", ".", "QuoteForRspFile", "(", "arg", ")", "return", "\"'\"", "+", "arg", ".", "replace", "(", "\"'\"", ",", "\"'\"", "+", "'\"\\'\"'", "+", "\"'\"", ")", "+", "\"'\"" ]
https://github.com/macchina-io/macchina.io/blob/ef24ba0e18379c3dd48fb84e6dbf991101cb8db0/platform/JS/V8/tools/gyp/pylib/gyp/generator/ninja.py#L73-L82
triton-inference-server/server
11a11d9cb1e9734ed9fd305e752da70f07d1992f
qa/common/sequence_util.py
python
SequenceBatcherTestUtil.check_sequence_shape_tensor_io
(self, model_name, input_dtype, correlation_id, sequence_thresholds, values, expected_result, shm_region_handles, using_dynamic_batcher=False, sequence_name="<unknown>")
Perform sequence of inferences using async run. The 'values' holds a list of tuples, one for each inference with format: (flag_str, shape_value, value, pre_delay_ms)
Perform sequence of inferences using async run. The 'values' holds a list of tuples, one for each inference with format:
[ "Perform", "sequence", "of", "inferences", "using", "async", "run", ".", "The", "values", "holds", "a", "list", "of", "tuples", "one", "for", "each", "inference", "with", "format", ":" ]
def check_sequence_shape_tensor_io(self, model_name, input_dtype, correlation_id, sequence_thresholds, values, expected_result, shm_region_handles, using_dynamic_batcher=False, sequence_name="<unknown>"): """Perform sequence of inferences using async run. The 'values' holds a list of tuples, one for each inference with format: (flag_str, shape_value, value, pre_delay_ms) """ tensor_shape = (1, 1) # shape tensor is 1-D tensor that doesn't contain batch size as first value shape_tensor_shape = (1,) self.assertFalse(_test_cuda_shared_memory, "Shape tensors does not support CUDA shared memory") client_utils = grpcclient triton_client = client_utils.InferenceServerClient("localhost:8001", verbose=True) user_data = UserData() triton_client.start_stream(partial(completion_callback, user_data)) # Execute the sequence of inference... try: seq_start_ms = int(round(time.time() * 1000)) sent_count = 0 shape_values = list() for flag_str, shape_value, value, pre_delay_ms in values: seq_start = False seq_end = False if flag_str is not None: seq_start = ("start" in flag_str) seq_end = ("end" in flag_str) # Construct request IOs inputs = [] outputs = [] # input order: input, shape(, dummy) inputs.append( client_utils.InferInput( "INPUT", tensor_shape, np_to_triton_dtype(np.int32 if using_dynamic_batcher else input_dtype))) inputs.append( client_utils.InferInput("SHAPE_INPUT", shape_tensor_shape, np_to_triton_dtype(np.int32))) if using_dynamic_batcher: inputs.append( client_utils.InferInput( "DUMMY_INPUT", tensor_shape, np_to_triton_dtype(input_dtype))) # output order: shape, output, resized outputs.append( client_utils.InferRequestedOutput("SHAPE_OUTPUT")) outputs.append(client_utils.InferRequestedOutput("OUTPUT")) outputs.append( client_utils.InferRequestedOutput("RESIZED_OUTPUT")) # Set IO values shape_values.append( np.full(shape_tensor_shape, shape_value, dtype=np.int32)) if not _test_system_shared_memory: if using_dynamic_batcher: if input_dtype == np.object_: dummy_in0 = np.full(tensor_shape, value, dtype=np.int32) dummy_in0n = np.array( [str(x) for x in in0.reshape(dummy_in0.size)], dtype=object) dummy_in0 = dummy_in0n.reshape(tensor_shape) else: dummy_in0 = np.full(tensor_shape, value, dtype=input_dtype) in0 = np.full(tensor_shape, value, dtype=np.int32) else: if input_dtype == np.object_: in0 = np.full(tensor_shape, value, dtype=np.int32) in0n = np.array( [str(x) for x in in0.reshape(in0.size)], dtype=object) in0 = in0n.reshape(tensor_shape) else: in0 = np.full(tensor_shape, value, dtype=input_dtype) inputs[0].set_data_from_numpy(in0) inputs[1].set_data_from_numpy(shape_values[-1]) if using_dynamic_batcher: inputs[2].set_data_from_numpy(dummy_in0) else: if using_dynamic_batcher: input_offset = 6 * sent_count output_offset = 6 * sent_count + 3 else: input_offset = 5 * sent_count output_offset = 5 * sent_count + 2 for i in range(len(inputs)): inputs[i].set_shared_memory( shm_region_handles[input_offset + i][0], shm_region_handles[input_offset + i][1]) for i in range(len(outputs)): outputs[i].set_shared_memory( shm_region_handles[output_offset + i][0], shm_region_handles[output_offset + i][1]) if pre_delay_ms is not None: time.sleep(pre_delay_ms / 1000.0) triton_client.async_stream_infer(model_name, inputs, outputs=outputs, sequence_id=correlation_id, sequence_start=seq_start, sequence_end=seq_end) sent_count += 1 # Wait for the results in the order sent result = None processed_count = 0 while processed_count < sent_count: (results, error) = user_data._completed_requests.get() if error is not None: raise error # Get value of "OUTPUT", for shared memory, need to get it via # shared memory utils if (not _test_system_shared_memory): out = results.as_numpy("OUTPUT") else: output = results.get_output("OUTPUT") output_offset = 6 * processed_count + 4 if using_dynamic_batcher else 5 * processed_count + 3 output_shape = output.shape output_type = np.int32 if using_dynamic_batcher else np.float32 out = shm.get_contents_as_numpy( shm_region_handles[output_offset][2], output_type, output_shape) result = out[0][0] # Validate the (debatched) shape of the resized output matches # with the shape input values resized_shape = results.get_output("RESIZED_OUTPUT").shape[1:] self.assertTrue( np.array_equal(resized_shape, shape_values[processed_count]), "{}, {}, slot {}, expected: {}, got {}".format( model_name, "RESIZED_OUTPUT", processed_count, shape_values[processed_count], resized_shape)) print("{}: {}".format(sequence_name, result)) processed_count += 1 seq_end_ms = int(round(time.time() * 1000)) if input_dtype == np.object_: self.assertEqual(int(result), expected_result) else: self.assertEqual(result, expected_result) if sequence_thresholds is not None: lt_ms = sequence_thresholds[0] gt_ms = sequence_thresholds[1] if lt_ms is not None: if _test_jetson: lt_ms *= _jetson_slowdown_factor self.assertTrue((seq_end_ms - seq_start_ms) < lt_ms, "sequence expected less than " + str(lt_ms) + "ms response time, got " + str(seq_end_ms - seq_start_ms) + " ms") if gt_ms is not None: self.assertTrue((seq_end_ms - seq_start_ms) > gt_ms, "sequence expected greater than " + str(gt_ms) + "ms response time, got " + str(seq_end_ms - seq_start_ms) + " ms") except Exception as ex: self.add_deferred_exception(ex) triton_client.stop_stream()
[ "def", "check_sequence_shape_tensor_io", "(", "self", ",", "model_name", ",", "input_dtype", ",", "correlation_id", ",", "sequence_thresholds", ",", "values", ",", "expected_result", ",", "shm_region_handles", ",", "using_dynamic_batcher", "=", "False", ",", "sequence_name", "=", "\"<unknown>\"", ")", ":", "tensor_shape", "=", "(", "1", ",", "1", ")", "# shape tensor is 1-D tensor that doesn't contain batch size as first value", "shape_tensor_shape", "=", "(", "1", ",", ")", "self", ".", "assertFalse", "(", "_test_cuda_shared_memory", ",", "\"Shape tensors does not support CUDA shared memory\"", ")", "client_utils", "=", "grpcclient", "triton_client", "=", "client_utils", ".", "InferenceServerClient", "(", "\"localhost:8001\"", ",", "verbose", "=", "True", ")", "user_data", "=", "UserData", "(", ")", "triton_client", ".", "start_stream", "(", "partial", "(", "completion_callback", ",", "user_data", ")", ")", "# Execute the sequence of inference...", "try", ":", "seq_start_ms", "=", "int", "(", "round", "(", "time", ".", "time", "(", ")", "*", "1000", ")", ")", "sent_count", "=", "0", "shape_values", "=", "list", "(", ")", "for", "flag_str", ",", "shape_value", ",", "value", ",", "pre_delay_ms", "in", "values", ":", "seq_start", "=", "False", "seq_end", "=", "False", "if", "flag_str", "is", "not", "None", ":", "seq_start", "=", "(", "\"start\"", "in", "flag_str", ")", "seq_end", "=", "(", "\"end\"", "in", "flag_str", ")", "# Construct request IOs", "inputs", "=", "[", "]", "outputs", "=", "[", "]", "# input order: input, shape(, dummy)", "inputs", ".", "append", "(", "client_utils", ".", "InferInput", "(", "\"INPUT\"", ",", "tensor_shape", ",", "np_to_triton_dtype", "(", "np", ".", "int32", "if", "using_dynamic_batcher", "else", "input_dtype", ")", ")", ")", "inputs", ".", "append", "(", "client_utils", ".", "InferInput", "(", "\"SHAPE_INPUT\"", ",", "shape_tensor_shape", ",", "np_to_triton_dtype", "(", "np", ".", "int32", ")", ")", ")", "if", "using_dynamic_batcher", ":", "inputs", ".", "append", "(", "client_utils", ".", "InferInput", "(", "\"DUMMY_INPUT\"", ",", "tensor_shape", ",", "np_to_triton_dtype", "(", "input_dtype", ")", ")", ")", "# output order: shape, output, resized", "outputs", ".", "append", "(", "client_utils", ".", "InferRequestedOutput", "(", "\"SHAPE_OUTPUT\"", ")", ")", "outputs", ".", "append", "(", "client_utils", ".", "InferRequestedOutput", "(", "\"OUTPUT\"", ")", ")", "outputs", ".", "append", "(", "client_utils", ".", "InferRequestedOutput", "(", "\"RESIZED_OUTPUT\"", ")", ")", "# Set IO values", "shape_values", ".", "append", "(", "np", ".", "full", "(", "shape_tensor_shape", ",", "shape_value", ",", "dtype", "=", "np", ".", "int32", ")", ")", "if", "not", "_test_system_shared_memory", ":", "if", "using_dynamic_batcher", ":", "if", "input_dtype", "==", "np", ".", "object_", ":", "dummy_in0", "=", "np", ".", "full", "(", "tensor_shape", ",", "value", ",", "dtype", "=", "np", ".", "int32", ")", "dummy_in0n", "=", "np", ".", "array", "(", "[", "str", "(", "x", ")", "for", "x", "in", "in0", ".", "reshape", "(", "dummy_in0", ".", "size", ")", "]", ",", "dtype", "=", "object", ")", "dummy_in0", "=", "dummy_in0n", ".", "reshape", "(", "tensor_shape", ")", "else", ":", "dummy_in0", "=", "np", ".", "full", "(", "tensor_shape", ",", "value", ",", "dtype", "=", "input_dtype", ")", "in0", "=", "np", ".", "full", "(", "tensor_shape", ",", "value", ",", "dtype", "=", "np", ".", "int32", ")", "else", ":", "if", "input_dtype", "==", "np", ".", "object_", ":", "in0", "=", "np", ".", "full", "(", "tensor_shape", ",", "value", ",", "dtype", "=", "np", ".", "int32", ")", "in0n", "=", "np", ".", "array", "(", "[", "str", "(", "x", ")", "for", "x", "in", "in0", ".", "reshape", "(", "in0", ".", "size", ")", "]", ",", "dtype", "=", "object", ")", "in0", "=", "in0n", ".", "reshape", "(", "tensor_shape", ")", "else", ":", "in0", "=", "np", ".", "full", "(", "tensor_shape", ",", "value", ",", "dtype", "=", "input_dtype", ")", "inputs", "[", "0", "]", ".", "set_data_from_numpy", "(", "in0", ")", "inputs", "[", "1", "]", ".", "set_data_from_numpy", "(", "shape_values", "[", "-", "1", "]", ")", "if", "using_dynamic_batcher", ":", "inputs", "[", "2", "]", ".", "set_data_from_numpy", "(", "dummy_in0", ")", "else", ":", "if", "using_dynamic_batcher", ":", "input_offset", "=", "6", "*", "sent_count", "output_offset", "=", "6", "*", "sent_count", "+", "3", "else", ":", "input_offset", "=", "5", "*", "sent_count", "output_offset", "=", "5", "*", "sent_count", "+", "2", "for", "i", "in", "range", "(", "len", "(", "inputs", ")", ")", ":", "inputs", "[", "i", "]", ".", "set_shared_memory", "(", "shm_region_handles", "[", "input_offset", "+", "i", "]", "[", "0", "]", ",", "shm_region_handles", "[", "input_offset", "+", "i", "]", "[", "1", "]", ")", "for", "i", "in", "range", "(", "len", "(", "outputs", ")", ")", ":", "outputs", "[", "i", "]", ".", "set_shared_memory", "(", "shm_region_handles", "[", "output_offset", "+", "i", "]", "[", "0", "]", ",", "shm_region_handles", "[", "output_offset", "+", "i", "]", "[", "1", "]", ")", "if", "pre_delay_ms", "is", "not", "None", ":", "time", ".", "sleep", "(", "pre_delay_ms", "/", "1000.0", ")", "triton_client", ".", "async_stream_infer", "(", "model_name", ",", "inputs", ",", "outputs", "=", "outputs", ",", "sequence_id", "=", "correlation_id", ",", "sequence_start", "=", "seq_start", ",", "sequence_end", "=", "seq_end", ")", "sent_count", "+=", "1", "# Wait for the results in the order sent", "result", "=", "None", "processed_count", "=", "0", "while", "processed_count", "<", "sent_count", ":", "(", "results", ",", "error", ")", "=", "user_data", ".", "_completed_requests", ".", "get", "(", ")", "if", "error", "is", "not", "None", ":", "raise", "error", "# Get value of \"OUTPUT\", for shared memory, need to get it via", "# shared memory utils", "if", "(", "not", "_test_system_shared_memory", ")", ":", "out", "=", "results", ".", "as_numpy", "(", "\"OUTPUT\"", ")", "else", ":", "output", "=", "results", ".", "get_output", "(", "\"OUTPUT\"", ")", "output_offset", "=", "6", "*", "processed_count", "+", "4", "if", "using_dynamic_batcher", "else", "5", "*", "processed_count", "+", "3", "output_shape", "=", "output", ".", "shape", "output_type", "=", "np", ".", "int32", "if", "using_dynamic_batcher", "else", "np", ".", "float32", "out", "=", "shm", ".", "get_contents_as_numpy", "(", "shm_region_handles", "[", "output_offset", "]", "[", "2", "]", ",", "output_type", ",", "output_shape", ")", "result", "=", "out", "[", "0", "]", "[", "0", "]", "# Validate the (debatched) shape of the resized output matches", "# with the shape input values", "resized_shape", "=", "results", ".", "get_output", "(", "\"RESIZED_OUTPUT\"", ")", ".", "shape", "[", "1", ":", "]", "self", ".", "assertTrue", "(", "np", ".", "array_equal", "(", "resized_shape", ",", "shape_values", "[", "processed_count", "]", ")", ",", "\"{}, {}, slot {}, expected: {}, got {}\"", ".", "format", "(", "model_name", ",", "\"RESIZED_OUTPUT\"", ",", "processed_count", ",", "shape_values", "[", "processed_count", "]", ",", "resized_shape", ")", ")", "print", "(", "\"{}: {}\"", ".", "format", "(", "sequence_name", ",", "result", ")", ")", "processed_count", "+=", "1", "seq_end_ms", "=", "int", "(", "round", "(", "time", ".", "time", "(", ")", "*", "1000", ")", ")", "if", "input_dtype", "==", "np", ".", "object_", ":", "self", ".", "assertEqual", "(", "int", "(", "result", ")", ",", "expected_result", ")", "else", ":", "self", ".", "assertEqual", "(", "result", ",", "expected_result", ")", "if", "sequence_thresholds", "is", "not", "None", ":", "lt_ms", "=", "sequence_thresholds", "[", "0", "]", "gt_ms", "=", "sequence_thresholds", "[", "1", "]", "if", "lt_ms", "is", "not", "None", ":", "if", "_test_jetson", ":", "lt_ms", "*=", "_jetson_slowdown_factor", "self", ".", "assertTrue", "(", "(", "seq_end_ms", "-", "seq_start_ms", ")", "<", "lt_ms", ",", "\"sequence expected less than \"", "+", "str", "(", "lt_ms", ")", "+", "\"ms response time, got \"", "+", "str", "(", "seq_end_ms", "-", "seq_start_ms", ")", "+", "\" ms\"", ")", "if", "gt_ms", "is", "not", "None", ":", "self", ".", "assertTrue", "(", "(", "seq_end_ms", "-", "seq_start_ms", ")", ">", "gt_ms", ",", "\"sequence expected greater than \"", "+", "str", "(", "gt_ms", ")", "+", "\"ms response time, got \"", "+", "str", "(", "seq_end_ms", "-", "seq_start_ms", ")", "+", "\" ms\"", ")", "except", "Exception", "as", "ex", ":", "self", ".", "add_deferred_exception", "(", "ex", ")", "triton_client", ".", "stop_stream", "(", ")" ]
https://github.com/triton-inference-server/server/blob/11a11d9cb1e9734ed9fd305e752da70f07d1992f/qa/common/sequence_util.py#L785-L968
pmq20/node-packer
12c46c6e44fbc14d9ee645ebd17d5296b324f7e0
current/deps/v8/third_party/jinja2/environment.py
python
Environment.lex
(self, source, name=None, filename=None)
Lex the given sourcecode and return a generator that yields tokens as tuples in the form ``(lineno, token_type, value)``. This can be useful for :ref:`extension development <writing-extensions>` and debugging templates. This does not perform preprocessing. If you want the preprocessing of the extensions to be applied you have to filter source through the :meth:`preprocess` method.
Lex the given sourcecode and return a generator that yields tokens as tuples in the form ``(lineno, token_type, value)``. This can be useful for :ref:`extension development <writing-extensions>` and debugging templates.
[ "Lex", "the", "given", "sourcecode", "and", "return", "a", "generator", "that", "yields", "tokens", "as", "tuples", "in", "the", "form", "(", "lineno", "token_type", "value", ")", ".", "This", "can", "be", "useful", "for", ":", "ref", ":", "extension", "development", "<writing", "-", "extensions", ">", "and", "debugging", "templates", "." ]
def lex(self, source, name=None, filename=None): """Lex the given sourcecode and return a generator that yields tokens as tuples in the form ``(lineno, token_type, value)``. This can be useful for :ref:`extension development <writing-extensions>` and debugging templates. This does not perform preprocessing. If you want the preprocessing of the extensions to be applied you have to filter source through the :meth:`preprocess` method. """ source = text_type(source) try: return self.lexer.tokeniter(source, name, filename) except TemplateSyntaxError: exc_info = sys.exc_info() self.handle_exception(exc_info, source_hint=source)
[ "def", "lex", "(", "self", ",", "source", ",", "name", "=", "None", ",", "filename", "=", "None", ")", ":", "source", "=", "text_type", "(", "source", ")", "try", ":", "return", "self", ".", "lexer", ".", "tokeniter", "(", "source", ",", "name", ",", "filename", ")", "except", "TemplateSyntaxError", ":", "exc_info", "=", "sys", ".", "exc_info", "(", ")", "self", ".", "handle_exception", "(", "exc_info", ",", "source_hint", "=", "source", ")" ]
https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/current/deps/v8/third_party/jinja2/environment.py#L499-L514
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numpy/core/arrayprint.py
python
set_printoptions
(precision=None, threshold=None, edgeitems=None, linewidth=None, suppress=None, nanstr=None, infstr=None, formatter=None, sign=None, floatmode=None, **kwarg)
Set printing options. These options determine the way floating point numbers, arrays and other NumPy objects are displayed. Parameters ---------- precision : int or None, optional Number of digits of precision for floating point output (default 8). May be None if `floatmode` is not `fixed`, to print as many digits as necessary to uniquely specify the value. threshold : int, optional Total number of array elements which trigger summarization rather than full repr (default 1000). To always use the full repr without summarization, pass `sys.maxsize`. edgeitems : int, optional Number of array items in summary at beginning and end of each dimension (default 3). linewidth : int, optional The number of characters per line for the purpose of inserting line breaks (default 75). suppress : bool, optional If True, always print floating point numbers using fixed point notation, in which case numbers equal to zero in the current precision will print as zero. If False, then scientific notation is used when absolute value of the smallest number is < 1e-4 or the ratio of the maximum absolute value to the minimum is > 1e3. The default is False. nanstr : str, optional String representation of floating point not-a-number (default nan). infstr : str, optional String representation of floating point infinity (default inf). sign : string, either '-', '+', or ' ', optional Controls printing of the sign of floating-point types. If '+', always print the sign of positive values. If ' ', always prints a space (whitespace character) in the sign position of positive values. If '-', omit the sign character of positive values. (default '-') formatter : dict of callables, optional If not None, the keys should indicate the type(s) that the respective formatting function applies to. Callables should return a string. Types that are not specified (by their corresponding keys) are handled by the default formatters. Individual types for which a formatter can be set are: - 'bool' - 'int' - 'timedelta' : a `numpy.timedelta64` - 'datetime' : a `numpy.datetime64` - 'float' - 'longfloat' : 128-bit floats - 'complexfloat' - 'longcomplexfloat' : composed of two 128-bit floats - 'numpystr' : types `numpy.string_` and `numpy.unicode_` - 'object' : `np.object_` arrays - 'str' : all other strings Other keys that can be used to set a group of types at once are: - 'all' : sets all types - 'int_kind' : sets 'int' - 'float_kind' : sets 'float' and 'longfloat' - 'complex_kind' : sets 'complexfloat' and 'longcomplexfloat' - 'str_kind' : sets 'str' and 'numpystr' floatmode : str, optional Controls the interpretation of the `precision` option for floating-point types. Can take the following values (default maxprec_equal): * 'fixed': Always print exactly `precision` fractional digits, even if this would print more or fewer digits than necessary to specify the value uniquely. * 'unique': Print the minimum number of fractional digits necessary to represent each value uniquely. Different elements may have a different number of digits. The value of the `precision` option is ignored. * 'maxprec': Print at most `precision` fractional digits, but if an element can be uniquely represented with fewer digits only print it with that many. * 'maxprec_equal': Print at most `precision` fractional digits, but if every element in the array can be uniquely represented with an equal number of fewer digits, use that many digits for all elements. legacy : string or `False`, optional If set to the string `'1.13'` enables 1.13 legacy printing mode. This approximates numpy 1.13 print output by including a space in the sign position of floats and different behavior for 0d arrays. If set to `False`, disables legacy mode. Unrecognized strings will be ignored with a warning for forward compatibility. .. versionadded:: 1.14.0 See Also -------- get_printoptions, printoptions, set_string_function, array2string Notes ----- `formatter` is always reset with a call to `set_printoptions`. Use `printoptions` as a context manager to set the values temporarily. Examples -------- Floating point precision can be set: >>> np.set_printoptions(precision=4) >>> np.array([1.123456789]) [1.1235] Long arrays can be summarised: >>> np.set_printoptions(threshold=5) >>> np.arange(10) array([0, 1, 2, ..., 7, 8, 9]) Small results can be suppressed: >>> eps = np.finfo(float).eps >>> x = np.arange(4.) >>> x**2 - (x + eps)**2 array([-4.9304e-32, -4.4409e-16, 0.0000e+00, 0.0000e+00]) >>> np.set_printoptions(suppress=True) >>> x**2 - (x + eps)**2 array([-0., -0., 0., 0.]) A custom formatter can be used to display array elements as desired: >>> np.set_printoptions(formatter={'all':lambda x: 'int: '+str(-x)}) >>> x = np.arange(3) >>> x array([int: 0, int: -1, int: -2]) >>> np.set_printoptions() # formatter gets reset >>> x array([0, 1, 2]) To put back the default options, you can use: >>> np.set_printoptions(edgeitems=3, infstr='inf', ... linewidth=75, nanstr='nan', precision=8, ... suppress=False, threshold=1000, formatter=None) Also to temporarily override options, use `printoptions` as a context manager: >>> with np.printoptions(precision=2, suppress=True, threshold=5): ... np.linspace(0, 10, 10) array([ 0. , 1.11, 2.22, ..., 7.78, 8.89, 10. ])
Set printing options.
[ "Set", "printing", "options", "." ]
def set_printoptions(precision=None, threshold=None, edgeitems=None, linewidth=None, suppress=None, nanstr=None, infstr=None, formatter=None, sign=None, floatmode=None, **kwarg): """ Set printing options. These options determine the way floating point numbers, arrays and other NumPy objects are displayed. Parameters ---------- precision : int or None, optional Number of digits of precision for floating point output (default 8). May be None if `floatmode` is not `fixed`, to print as many digits as necessary to uniquely specify the value. threshold : int, optional Total number of array elements which trigger summarization rather than full repr (default 1000). To always use the full repr without summarization, pass `sys.maxsize`. edgeitems : int, optional Number of array items in summary at beginning and end of each dimension (default 3). linewidth : int, optional The number of characters per line for the purpose of inserting line breaks (default 75). suppress : bool, optional If True, always print floating point numbers using fixed point notation, in which case numbers equal to zero in the current precision will print as zero. If False, then scientific notation is used when absolute value of the smallest number is < 1e-4 or the ratio of the maximum absolute value to the minimum is > 1e3. The default is False. nanstr : str, optional String representation of floating point not-a-number (default nan). infstr : str, optional String representation of floating point infinity (default inf). sign : string, either '-', '+', or ' ', optional Controls printing of the sign of floating-point types. If '+', always print the sign of positive values. If ' ', always prints a space (whitespace character) in the sign position of positive values. If '-', omit the sign character of positive values. (default '-') formatter : dict of callables, optional If not None, the keys should indicate the type(s) that the respective formatting function applies to. Callables should return a string. Types that are not specified (by their corresponding keys) are handled by the default formatters. Individual types for which a formatter can be set are: - 'bool' - 'int' - 'timedelta' : a `numpy.timedelta64` - 'datetime' : a `numpy.datetime64` - 'float' - 'longfloat' : 128-bit floats - 'complexfloat' - 'longcomplexfloat' : composed of two 128-bit floats - 'numpystr' : types `numpy.string_` and `numpy.unicode_` - 'object' : `np.object_` arrays - 'str' : all other strings Other keys that can be used to set a group of types at once are: - 'all' : sets all types - 'int_kind' : sets 'int' - 'float_kind' : sets 'float' and 'longfloat' - 'complex_kind' : sets 'complexfloat' and 'longcomplexfloat' - 'str_kind' : sets 'str' and 'numpystr' floatmode : str, optional Controls the interpretation of the `precision` option for floating-point types. Can take the following values (default maxprec_equal): * 'fixed': Always print exactly `precision` fractional digits, even if this would print more or fewer digits than necessary to specify the value uniquely. * 'unique': Print the minimum number of fractional digits necessary to represent each value uniquely. Different elements may have a different number of digits. The value of the `precision` option is ignored. * 'maxprec': Print at most `precision` fractional digits, but if an element can be uniquely represented with fewer digits only print it with that many. * 'maxprec_equal': Print at most `precision` fractional digits, but if every element in the array can be uniquely represented with an equal number of fewer digits, use that many digits for all elements. legacy : string or `False`, optional If set to the string `'1.13'` enables 1.13 legacy printing mode. This approximates numpy 1.13 print output by including a space in the sign position of floats and different behavior for 0d arrays. If set to `False`, disables legacy mode. Unrecognized strings will be ignored with a warning for forward compatibility. .. versionadded:: 1.14.0 See Also -------- get_printoptions, printoptions, set_string_function, array2string Notes ----- `formatter` is always reset with a call to `set_printoptions`. Use `printoptions` as a context manager to set the values temporarily. Examples -------- Floating point precision can be set: >>> np.set_printoptions(precision=4) >>> np.array([1.123456789]) [1.1235] Long arrays can be summarised: >>> np.set_printoptions(threshold=5) >>> np.arange(10) array([0, 1, 2, ..., 7, 8, 9]) Small results can be suppressed: >>> eps = np.finfo(float).eps >>> x = np.arange(4.) >>> x**2 - (x + eps)**2 array([-4.9304e-32, -4.4409e-16, 0.0000e+00, 0.0000e+00]) >>> np.set_printoptions(suppress=True) >>> x**2 - (x + eps)**2 array([-0., -0., 0., 0.]) A custom formatter can be used to display array elements as desired: >>> np.set_printoptions(formatter={'all':lambda x: 'int: '+str(-x)}) >>> x = np.arange(3) >>> x array([int: 0, int: -1, int: -2]) >>> np.set_printoptions() # formatter gets reset >>> x array([0, 1, 2]) To put back the default options, you can use: >>> np.set_printoptions(edgeitems=3, infstr='inf', ... linewidth=75, nanstr='nan', precision=8, ... suppress=False, threshold=1000, formatter=None) Also to temporarily override options, use `printoptions` as a context manager: >>> with np.printoptions(precision=2, suppress=True, threshold=5): ... np.linspace(0, 10, 10) array([ 0. , 1.11, 2.22, ..., 7.78, 8.89, 10. ]) """ legacy = kwarg.pop('legacy', None) if kwarg: msg = "set_printoptions() got unexpected keyword argument '{}'" raise TypeError(msg.format(kwarg.popitem()[0])) opt = _make_options_dict(precision, threshold, edgeitems, linewidth, suppress, nanstr, infstr, sign, formatter, floatmode, legacy) # formatter is always reset opt['formatter'] = formatter _format_options.update(opt) # set the C variable for legacy mode if _format_options['legacy'] == '1.13': set_legacy_print_mode(113) # reset the sign option in legacy mode to avoid confusion _format_options['sign'] = '-' elif _format_options['legacy'] is False: set_legacy_print_mode(0)
[ "def", "set_printoptions", "(", "precision", "=", "None", ",", "threshold", "=", "None", ",", "edgeitems", "=", "None", ",", "linewidth", "=", "None", ",", "suppress", "=", "None", ",", "nanstr", "=", "None", ",", "infstr", "=", "None", ",", "formatter", "=", "None", ",", "sign", "=", "None", ",", "floatmode", "=", "None", ",", "*", "*", "kwarg", ")", ":", "legacy", "=", "kwarg", ".", "pop", "(", "'legacy'", ",", "None", ")", "if", "kwarg", ":", "msg", "=", "\"set_printoptions() got unexpected keyword argument '{}'\"", "raise", "TypeError", "(", "msg", ".", "format", "(", "kwarg", ".", "popitem", "(", ")", "[", "0", "]", ")", ")", "opt", "=", "_make_options_dict", "(", "precision", ",", "threshold", ",", "edgeitems", ",", "linewidth", ",", "suppress", ",", "nanstr", ",", "infstr", ",", "sign", ",", "formatter", ",", "floatmode", ",", "legacy", ")", "# formatter is always reset", "opt", "[", "'formatter'", "]", "=", "formatter", "_format_options", ".", "update", "(", "opt", ")", "# set the C variable for legacy mode", "if", "_format_options", "[", "'legacy'", "]", "==", "'1.13'", ":", "set_legacy_print_mode", "(", "113", ")", "# reset the sign option in legacy mode to avoid confusion", "_format_options", "[", "'sign'", "]", "=", "'-'", "elif", "_format_options", "[", "'legacy'", "]", "is", "False", ":", "set_legacy_print_mode", "(", "0", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numpy/core/arrayprint.py#L101-L270
tomahawk-player/tomahawk-resolvers
7f827bbe410ccfdb0446f7d6a91acc2199c9cc8d
archive/spotify/breakpad/third_party/protobuf/protobuf/python/mox.py
python
MockMethod.__init__
(self, method_name, call_queue, replay_mode)
Construct a new mock method. Args: # method_name: the name of the method # call_queue: deque of calls, verify this call against the head, or add # this call to the queue. # replay_mode: False if we are recording, True if we are verifying calls # against the call queue. method_name: str call_queue: list or deque replay_mode: bool
Construct a new mock method.
[ "Construct", "a", "new", "mock", "method", "." ]
def __init__(self, method_name, call_queue, replay_mode): """Construct a new mock method. Args: # method_name: the name of the method # call_queue: deque of calls, verify this call against the head, or add # this call to the queue. # replay_mode: False if we are recording, True if we are verifying calls # against the call queue. method_name: str call_queue: list or deque replay_mode: bool """ self._name = method_name self._call_queue = call_queue if not isinstance(call_queue, deque): self._call_queue = deque(self._call_queue) self._replay_mode = replay_mode self._params = None self._named_params = None self._return_value = None self._exception = None self._side_effects = None
[ "def", "__init__", "(", "self", ",", "method_name", ",", "call_queue", ",", "replay_mode", ")", ":", "self", ".", "_name", "=", "method_name", "self", ".", "_call_queue", "=", "call_queue", "if", "not", "isinstance", "(", "call_queue", ",", "deque", ")", ":", "self", ".", "_call_queue", "=", "deque", "(", "self", ".", "_call_queue", ")", "self", ".", "_replay_mode", "=", "replay_mode", "self", ".", "_params", "=", "None", "self", ".", "_named_params", "=", "None", "self", ".", "_return_value", "=", "None", "self", ".", "_exception", "=", "None", "self", ".", "_side_effects", "=", "None" ]
https://github.com/tomahawk-player/tomahawk-resolvers/blob/7f827bbe410ccfdb0446f7d6a91acc2199c9cc8d/archive/spotify/breakpad/third_party/protobuf/protobuf/python/mox.py#L519-L543
tensorflow/deepmath
b5b721f54de1d5d6a02d78f5da5995237f9995f9
deepmath/premises/model_definition_hybrid_pool2.py
python
Model.axiom_embedding
(self, axioms)
Compute the embedding for each of the axioms.
Compute the embedding for each of the axioms.
[ "Compute", "the", "embedding", "for", "each", "of", "the", "axioms", "." ]
def axiom_embedding(self, axioms): """Compute the embedding for each of the axioms.""" with tf.variable_scope('axiom'): return self.make_embedding(axioms)
[ "def", "axiom_embedding", "(", "self", ",", "axioms", ")", ":", "with", "tf", ".", "variable_scope", "(", "'axiom'", ")", ":", "return", "self", ".", "make_embedding", "(", "axioms", ")" ]
https://github.com/tensorflow/deepmath/blob/b5b721f54de1d5d6a02d78f5da5995237f9995f9/deepmath/premises/model_definition_hybrid_pool2.py#L81-L84
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/session_bundle/bundle_shim.py
python
_convert_signatures_to_signature_defs
(metagraph_def)
return default_signature_def, named_signature_def
Produce default and named upconverted SignatureDef objects from Signatures. Args: metagraph_def: object of type meta_graph_pb2.MetaGraphDef containing legacy format Session Bundle signatures Returns: default_signature_def: object of type SignatureDef which contains an upconverted version of default signatures in metagraph_def named_signature_def: object of type SignatureDef which contains an upconverted version of named signatures in metagraph_def
Produce default and named upconverted SignatureDef objects from Signatures.
[ "Produce", "default", "and", "named", "upconverted", "SignatureDef", "objects", "from", "Signatures", "." ]
def _convert_signatures_to_signature_defs(metagraph_def): """Produce default and named upconverted SignatureDef objects from Signatures. Args: metagraph_def: object of type meta_graph_pb2.MetaGraphDef containing legacy format Session Bundle signatures Returns: default_signature_def: object of type SignatureDef which contains an upconverted version of default signatures in metagraph_def named_signature_def: object of type SignatureDef which contains an upconverted version of named signatures in metagraph_def """ collection_def = metagraph_def.collection_def signatures_proto = manifest_pb2.Signatures() signatures = collection_def[legacy_constants.SIGNATURES_KEY].any_list.value[0] signatures.Unpack(signatures_proto) default_signature_def = None named_signature_def = None if signatures_proto.HasField("default_signature"): default_signature_def = _convert_default_signature_to_signature_def( signatures_proto) if len(signatures_proto.named_signatures) > 1: named_signature_def = _convert_named_signatures_to_signature_def( signatures_proto) return default_signature_def, named_signature_def
[ "def", "_convert_signatures_to_signature_defs", "(", "metagraph_def", ")", ":", "collection_def", "=", "metagraph_def", ".", "collection_def", "signatures_proto", "=", "manifest_pb2", ".", "Signatures", "(", ")", "signatures", "=", "collection_def", "[", "legacy_constants", ".", "SIGNATURES_KEY", "]", ".", "any_list", ".", "value", "[", "0", "]", "signatures", ".", "Unpack", "(", "signatures_proto", ")", "default_signature_def", "=", "None", "named_signature_def", "=", "None", "if", "signatures_proto", ".", "HasField", "(", "\"default_signature\"", ")", ":", "default_signature_def", "=", "_convert_default_signature_to_signature_def", "(", "signatures_proto", ")", "if", "len", "(", "signatures_proto", ".", "named_signatures", ")", ">", "1", ":", "named_signature_def", "=", "_convert_named_signatures_to_signature_def", "(", "signatures_proto", ")", "return", "default_signature_def", ",", "named_signature_def" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/session_bundle/bundle_shim.py#L151-L178
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/richtext.py
python
RichTextFileHandler.SaveStream
(*args, **kwargs)
return _richtext.RichTextFileHandler_SaveStream(*args, **kwargs)
SaveStream(self, RichTextBuffer buffer, wxOutputStream stream) -> bool
SaveStream(self, RichTextBuffer buffer, wxOutputStream stream) -> bool
[ "SaveStream", "(", "self", "RichTextBuffer", "buffer", "wxOutputStream", "stream", ")", "-", ">", "bool" ]
def SaveStream(*args, **kwargs): """SaveStream(self, RichTextBuffer buffer, wxOutputStream stream) -> bool""" return _richtext.RichTextFileHandler_SaveStream(*args, **kwargs)
[ "def", "SaveStream", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_richtext", ".", "RichTextFileHandler_SaveStream", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/richtext.py#L2756-L2758
openvinotoolkit/openvino
dedcbeafa8b84cccdc55ca64b8da516682b381c7
tools/mo/openvino/tools/mo/utils/graph.py
python
bfs_search
(graph: Graph, start_nodes: list = list())
return result
Performs breadth-first search over a graph and returns a list of nodes in the BFS order. :param graph: networkx graph to traverse. :param start_nodes: list of start nodes of the graph. If the list is empty then start from all nodes that do not have input nodes. :return: the list of nodes in the BFS order.
Performs breadth-first search over a graph and returns a list of nodes in the BFS order. :param graph: networkx graph to traverse. :param start_nodes: list of start nodes of the graph. If the list is empty then start from all nodes that do not have input nodes. :return: the list of nodes in the BFS order.
[ "Performs", "breadth", "-", "first", "search", "over", "a", "graph", "and", "returns", "a", "list", "of", "nodes", "in", "the", "BFS", "order", ".", ":", "param", "graph", ":", "networkx", "graph", "to", "traverse", ".", ":", "param", "start_nodes", ":", "list", "of", "start", "nodes", "of", "the", "graph", ".", "If", "the", "list", "is", "empty", "then", "start", "from", "all", "nodes", "that", "do", "not", "have", "input", "nodes", ".", ":", "return", ":", "the", "list", "of", "nodes", "in", "the", "BFS", "order", "." ]
def bfs_search(graph: Graph, start_nodes: list = list()): """ Performs breadth-first search over a graph and returns a list of nodes in the BFS order. :param graph: networkx graph to traverse. :param start_nodes: list of start nodes of the graph. If the list is empty then start from all nodes that do not have input nodes. :return: the list of nodes in the BFS order. """ result = list() if len(start_nodes) == 0: start_nodes = [node_name for node_name in graph.nodes() if len(graph.in_edges(node_name)) == 0] visited = set(start_nodes) d = deque(start_nodes) while len(d) != 0: cur_node_name = d.popleft() result.append(cur_node_name) for src_node, dst_node in graph.out_edges(cur_node_name): if dst_node not in visited: d.append(dst_node) visited.add(dst_node) return result
[ "def", "bfs_search", "(", "graph", ":", "Graph", ",", "start_nodes", ":", "list", "=", "list", "(", ")", ")", ":", "result", "=", "list", "(", ")", "if", "len", "(", "start_nodes", ")", "==", "0", ":", "start_nodes", "=", "[", "node_name", "for", "node_name", "in", "graph", ".", "nodes", "(", ")", "if", "len", "(", "graph", ".", "in_edges", "(", "node_name", ")", ")", "==", "0", "]", "visited", "=", "set", "(", "start_nodes", ")", "d", "=", "deque", "(", "start_nodes", ")", "while", "len", "(", "d", ")", "!=", "0", ":", "cur_node_name", "=", "d", ".", "popleft", "(", ")", "result", ".", "append", "(", "cur_node_name", ")", "for", "src_node", ",", "dst_node", "in", "graph", ".", "out_edges", "(", "cur_node_name", ")", ":", "if", "dst_node", "not", "in", "visited", ":", "d", ".", "append", "(", "dst_node", ")", "visited", ".", "add", "(", "dst_node", ")", "return", "result" ]
https://github.com/openvinotoolkit/openvino/blob/dedcbeafa8b84cccdc55ca64b8da516682b381c7/tools/mo/openvino/tools/mo/utils/graph.py#L46-L68
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python3/src/Lib/distutils/sysconfig.py
python
expand_makefile_vars
(s, vars)
return s
Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in 'string' according to 'vars' (a dictionary mapping variable names to values). Variables not present in 'vars' are silently expanded to the empty string. The variable values in 'vars' should not contain further variable expansions; if 'vars' is the output of 'parse_makefile()', you're fine. Returns a variable-expanded version of 's'.
Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in 'string' according to 'vars' (a dictionary mapping variable names to values). Variables not present in 'vars' are silently expanded to the empty string. The variable values in 'vars' should not contain further variable expansions; if 'vars' is the output of 'parse_makefile()', you're fine. Returns a variable-expanded version of 's'.
[ "Expand", "Makefile", "-", "style", "variables", "--", "$", "{", "foo", "}", "or", "$", "(", "foo", ")", "--", "in", "string", "according", "to", "vars", "(", "a", "dictionary", "mapping", "variable", "names", "to", "values", ")", ".", "Variables", "not", "present", "in", "vars", "are", "silently", "expanded", "to", "the", "empty", "string", ".", "The", "variable", "values", "in", "vars", "should", "not", "contain", "further", "variable", "expansions", ";", "if", "vars", "is", "the", "output", "of", "parse_makefile", "()", "you", "re", "fine", ".", "Returns", "a", "variable", "-", "expanded", "version", "of", "s", "." ]
def expand_makefile_vars(s, vars): """Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in 'string' according to 'vars' (a dictionary mapping variable names to values). Variables not present in 'vars' are silently expanded to the empty string. The variable values in 'vars' should not contain further variable expansions; if 'vars' is the output of 'parse_makefile()', you're fine. Returns a variable-expanded version of 's'. """ # This algorithm does multiple expansion, so if vars['foo'] contains # "${bar}", it will expand ${foo} to ${bar}, and then expand # ${bar}... and so forth. This is fine as long as 'vars' comes from # 'parse_makefile()', which takes care of such expansions eagerly, # according to make's variable expansion semantics. while True: m = _findvar1_rx.search(s) or _findvar2_rx.search(s) if m: (beg, end) = m.span() s = s[0:beg] + vars.get(m.group(1)) + s[end:] else: break return s
[ "def", "expand_makefile_vars", "(", "s", ",", "vars", ")", ":", "# This algorithm does multiple expansion, so if vars['foo'] contains", "# \"${bar}\", it will expand ${foo} to ${bar}, and then expand", "# ${bar}... and so forth. This is fine as long as 'vars' comes from", "# 'parse_makefile()', which takes care of such expansions eagerly,", "# according to make's variable expansion semantics.", "while", "True", ":", "m", "=", "_findvar1_rx", ".", "search", "(", "s", ")", "or", "_findvar2_rx", ".", "search", "(", "s", ")", "if", "m", ":", "(", "beg", ",", "end", ")", "=", "m", ".", "span", "(", ")", "s", "=", "s", "[", "0", ":", "beg", "]", "+", "vars", ".", "get", "(", "m", ".", "group", "(", "1", ")", ")", "+", "s", "[", "end", ":", "]", "else", ":", "break", "return", "s" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/distutils/sysconfig.py#L411-L433
mongodb/mongo
d8ff665343ad29cf286ee2cf4a1960d29371937b
buildscripts/validate_mongocryptd.py
python
can_validation_be_skipped
(evg_config, variant)
return False
Determine if the given build variant needs to be validated. A build variant does not need to be validated if it does not run the 'push' task or if it does not exist in the configuration (it is dynamically created). :param evg_config: Evergreen configuration. :param variant: Build variant to check. :return: True if validation can be skipped.
Determine if the given build variant needs to be validated.
[ "Determine", "if", "the", "given", "build", "variant", "needs", "to", "be", "validated", "." ]
def can_validation_be_skipped(evg_config, variant): """ Determine if the given build variant needs to be validated. A build variant does not need to be validated if it does not run the 'push' task or if it does not exist in the configuration (it is dynamically created). :param evg_config: Evergreen configuration. :param variant: Build variant to check. :return: True if validation can be skipped. """ variant_config = evg_config.get_variant(variant) if not variant_config: return True if PUSH_TASK_NAME not in variant_config.task_names: return True return False
[ "def", "can_validation_be_skipped", "(", "evg_config", ",", "variant", ")", ":", "variant_config", "=", "evg_config", ".", "get_variant", "(", "variant", ")", "if", "not", "variant_config", ":", "return", "True", "if", "PUSH_TASK_NAME", "not", "in", "variant_config", ".", "task_names", ":", "return", "True", "return", "False" ]
https://github.com/mongodb/mongo/blob/d8ff665343ad29cf286ee2cf4a1960d29371937b/buildscripts/validate_mongocryptd.py#L49-L67
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
third_party/tlslite/tlslite/utils/Python_RSAKey.py
python
Python_RSAKey.parsePEM
(s, passwordCallback=None)
Parse a string containing a <privateKey> or <publicKey>, or PEM-encoded key.
Parse a string containing a <privateKey> or <publicKey>, or PEM-encoded key.
[ "Parse", "a", "string", "containing", "a", "<privateKey", ">", "or", "<publicKey", ">", "or", "PEM", "-", "encoded", "key", "." ]
def parsePEM(s, passwordCallback=None): """Parse a string containing a <privateKey> or <publicKey>, or PEM-encoded key.""" start = s.find("-----BEGIN PRIVATE KEY-----") if start != -1: end = s.find("-----END PRIVATE KEY-----") if end == -1: raise SyntaxError("Missing PEM Postfix") s = s[start+len("-----BEGIN PRIVATE KEY -----") : end] bytes = base64ToBytes(s) return Python_RSAKey._parsePKCS8(bytes) else: start = s.find("-----BEGIN RSA PRIVATE KEY-----") if start != -1: end = s.find("-----END RSA PRIVATE KEY-----") if end == -1: raise SyntaxError("Missing PEM Postfix") s = s[start+len("-----BEGIN RSA PRIVATE KEY -----") : end] bytes = base64ToBytes(s) return Python_RSAKey._parseSSLeay(bytes) raise SyntaxError("Missing PEM Prefix")
[ "def", "parsePEM", "(", "s", ",", "passwordCallback", "=", "None", ")", ":", "start", "=", "s", ".", "find", "(", "\"-----BEGIN PRIVATE KEY-----\"", ")", "if", "start", "!=", "-", "1", ":", "end", "=", "s", ".", "find", "(", "\"-----END PRIVATE KEY-----\"", ")", "if", "end", "==", "-", "1", ":", "raise", "SyntaxError", "(", "\"Missing PEM Postfix\"", ")", "s", "=", "s", "[", "start", "+", "len", "(", "\"-----BEGIN PRIVATE KEY -----\"", ")", ":", "end", "]", "bytes", "=", "base64ToBytes", "(", "s", ")", "return", "Python_RSAKey", ".", "_parsePKCS8", "(", "bytes", ")", "else", ":", "start", "=", "s", ".", "find", "(", "\"-----BEGIN RSA PRIVATE KEY-----\"", ")", "if", "start", "!=", "-", "1", ":", "end", "=", "s", ".", "find", "(", "\"-----END RSA PRIVATE KEY-----\"", ")", "if", "end", "==", "-", "1", ":", "raise", "SyntaxError", "(", "\"Missing PEM Postfix\"", ")", "s", "=", "s", "[", "start", "+", "len", "(", "\"-----BEGIN RSA PRIVATE KEY -----\"", ")", ":", "end", "]", "bytes", "=", "base64ToBytes", "(", "s", ")", "return", "Python_RSAKey", ".", "_parseSSLeay", "(", "bytes", ")", "raise", "SyntaxError", "(", "\"Missing PEM Prefix\"", ")" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/third_party/tlslite/tlslite/utils/Python_RSAKey.py#L112-L133
bristolcrypto/SPDZ-2
721abfae849625a02ea49aabc534f9cf41ca643f
Compiler/comparison.py
python
PRandM
(r_dprime, r_prime, b, k, m, kappa)
r_dprime = random secret integer in range [0, 2^(k + kappa - m) - 1] r_prime = random secret integer in range [0, 2^m - 1] b = array containing bits of r_prime
r_dprime = random secret integer in range [0, 2^(k + kappa - m) - 1] r_prime = random secret integer in range [0, 2^m - 1] b = array containing bits of r_prime
[ "r_dprime", "=", "random", "secret", "integer", "in", "range", "[", "0", "2^", "(", "k", "+", "kappa", "-", "m", ")", "-", "1", "]", "r_prime", "=", "random", "secret", "integer", "in", "range", "[", "0", "2^m", "-", "1", "]", "b", "=", "array", "containing", "bits", "of", "r_prime" ]
def PRandM(r_dprime, r_prime, b, k, m, kappa): """ r_dprime = random secret integer in range [0, 2^(k + kappa - m) - 1] r_prime = random secret integer in range [0, 2^m - 1] b = array containing bits of r_prime """ t = [[program.curr_block.new_reg('s') for j in range(2)] for i in range(m)] t[0][1] = b[-1] PRandInt(r_dprime, k + kappa - m) # r_dprime is always multiplied by 2^m program.curr_tape.require_bit_length(k + kappa) bit(b[-1]) for i in range(1,m): adds(t[i][0], t[i-1][1], t[i-1][1]) bit(b[-i-1]) adds(t[i][1], t[i][0], b[-i-1]) movs(r_prime, t[m-1][1])
[ "def", "PRandM", "(", "r_dprime", ",", "r_prime", ",", "b", ",", "k", ",", "m", ",", "kappa", ")", ":", "t", "=", "[", "[", "program", ".", "curr_block", ".", "new_reg", "(", "'s'", ")", "for", "j", "in", "range", "(", "2", ")", "]", "for", "i", "in", "range", "(", "m", ")", "]", "t", "[", "0", "]", "[", "1", "]", "=", "b", "[", "-", "1", "]", "PRandInt", "(", "r_dprime", ",", "k", "+", "kappa", "-", "m", ")", "# r_dprime is always multiplied by 2^m", "program", ".", "curr_tape", ".", "require_bit_length", "(", "k", "+", "kappa", ")", "bit", "(", "b", "[", "-", "1", "]", ")", "for", "i", "in", "range", "(", "1", ",", "m", ")", ":", "adds", "(", "t", "[", "i", "]", "[", "0", "]", ",", "t", "[", "i", "-", "1", "]", "[", "1", "]", ",", "t", "[", "i", "-", "1", "]", "[", "1", "]", ")", "bit", "(", "b", "[", "-", "i", "-", "1", "]", ")", "adds", "(", "t", "[", "i", "]", "[", "1", "]", ",", "t", "[", "i", "]", "[", "0", "]", ",", "b", "[", "-", "i", "-", "1", "]", ")", "movs", "(", "r_prime", ",", "t", "[", "m", "-", "1", "]", "[", "1", "]", ")" ]
https://github.com/bristolcrypto/SPDZ-2/blob/721abfae849625a02ea49aabc534f9cf41ca643f/Compiler/comparison.py#L188-L204
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python3/src/Lib/ssl.py
python
SSLObject.pending
(self)
return self._sslobj.pending()
Return the number of bytes that can be read immediately.
Return the number of bytes that can be read immediately.
[ "Return", "the", "number", "of", "bytes", "that", "can", "be", "read", "immediately", "." ]
def pending(self): """Return the number of bytes that can be read immediately.""" return self._sslobj.pending()
[ "def", "pending", "(", "self", ")", ":", "return", "self", ".", "_sslobj", ".", "pending", "(", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/ssl.py#L956-L958
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/graphy/graphy/common.py
python
BaseChart._GetAxes
(self)
Return a generator of (position_code, Axis) tuples for this chart's axes. The axes will be sorted by position using the canonical ordering sequence, _POSITION_CODES.
Return a generator of (position_code, Axis) tuples for this chart's axes.
[ "Return", "a", "generator", "of", "(", "position_code", "Axis", ")", "tuples", "for", "this", "chart", "s", "axes", "." ]
def _GetAxes(self): """Return a generator of (position_code, Axis) tuples for this chart's axes. The axes will be sorted by position using the canonical ordering sequence, _POSITION_CODES. """ for code in self._POSITION_CODES: for axis in self._axes.get(code, []): yield (code, axis)
[ "def", "_GetAxes", "(", "self", ")", ":", "for", "code", "in", "self", ".", "_POSITION_CODES", ":", "for", "axis", "in", "self", ".", "_axes", ".", "get", "(", "code", ",", "[", "]", ")", ":", "yield", "(", "code", ",", "axis", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/graphy/graphy/common.py#L384-L392
google/llvm-propeller
45c226984fe8377ebfb2ad7713c680d652ba678d
lldb/third_party/Python/module/six/six.py
python
add_metaclass
(metaclass)
return wrapper
Class decorator for creating a class with a metaclass.
Class decorator for creating a class with a metaclass.
[ "Class", "decorator", "for", "creating", "a", "class", "with", "a", "metaclass", "." ]
def add_metaclass(metaclass): """Class decorator for creating a class with a metaclass.""" def wrapper(cls): orig_vars = cls.__dict__.copy() slots = orig_vars.get('__slots__') if slots is not None: if isinstance(slots, str): slots = [slots] for slots_var in slots: orig_vars.pop(slots_var) orig_vars.pop('__dict__', None) orig_vars.pop('__weakref__', None) return metaclass(cls.__name__, cls.__bases__, orig_vars) return wrapper
[ "def", "add_metaclass", "(", "metaclass", ")", ":", "def", "wrapper", "(", "cls", ")", ":", "orig_vars", "=", "cls", ".", "__dict__", ".", "copy", "(", ")", "slots", "=", "orig_vars", ".", "get", "(", "'__slots__'", ")", "if", "slots", "is", "not", "None", ":", "if", "isinstance", "(", "slots", ",", "str", ")", ":", "slots", "=", "[", "slots", "]", "for", "slots_var", "in", "slots", ":", "orig_vars", ".", "pop", "(", "slots_var", ")", "orig_vars", ".", "pop", "(", "'__dict__'", ",", "None", ")", "orig_vars", ".", "pop", "(", "'__weakref__'", ",", "None", ")", "return", "metaclass", "(", "cls", ".", "__name__", ",", "cls", ".", "__bases__", ",", "orig_vars", ")", "return", "wrapper" ]
https://github.com/google/llvm-propeller/blob/45c226984fe8377ebfb2ad7713c680d652ba678d/lldb/third_party/Python/module/six/six.py#L831-L844
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/imputil.py
python
_compile
(pathname, timestamp)
return code
Compile (and cache) a Python source file. The file specified by <pathname> is compiled to a code object and returned. Presuming the appropriate privileges exist, the bytecodes will be saved back to the filesystem for future imports. The source file's modification timestamp must be provided as a Long value.
Compile (and cache) a Python source file.
[ "Compile", "(", "and", "cache", ")", "a", "Python", "source", "file", "." ]
def _compile(pathname, timestamp): """Compile (and cache) a Python source file. The file specified by <pathname> is compiled to a code object and returned. Presuming the appropriate privileges exist, the bytecodes will be saved back to the filesystem for future imports. The source file's modification timestamp must be provided as a Long value. """ codestring = open(pathname, 'rU').read() if codestring and codestring[-1] != '\n': codestring = codestring + '\n' code = __builtin__.compile(codestring, pathname, 'exec') # try to cache the compiled code try: f = open(pathname + _suffix_char, 'wb') except IOError: pass else: f.write('\0\0\0\0') f.write(struct.pack('<I', timestamp)) marshal.dump(code, f) f.flush() f.seek(0, 0) f.write(imp.get_magic()) f.close() return code
[ "def", "_compile", "(", "pathname", ",", "timestamp", ")", ":", "codestring", "=", "open", "(", "pathname", ",", "'rU'", ")", ".", "read", "(", ")", "if", "codestring", "and", "codestring", "[", "-", "1", "]", "!=", "'\\n'", ":", "codestring", "=", "codestring", "+", "'\\n'", "code", "=", "__builtin__", ".", "compile", "(", "codestring", ",", "pathname", ",", "'exec'", ")", "# try to cache the compiled code", "try", ":", "f", "=", "open", "(", "pathname", "+", "_suffix_char", ",", "'wb'", ")", "except", "IOError", ":", "pass", "else", ":", "f", ".", "write", "(", "'\\0\\0\\0\\0'", ")", "f", ".", "write", "(", "struct", ".", "pack", "(", "'<I'", ",", "timestamp", ")", ")", "marshal", ".", "dump", "(", "code", ",", "f", ")", "f", ".", "flush", "(", ")", "f", ".", "seek", "(", "0", ",", "0", ")", "f", ".", "write", "(", "imp", ".", "get_magic", "(", ")", ")", "f", ".", "close", "(", ")", "return", "code" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/imputil.py#L415-L444
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/oauthlib/oauth1/rfc5849/request_validator.py
python
RequestValidator.validate_client_key
(self, client_key, request)
Validates that supplied client key is a registered and valid client. :param client_key: The client/consumer key. :param request: An oauthlib.common.Request object. :returns: True or False Note that if the dummy client is supplied it should validate in same or nearly the same amount of time as a valid one. Ensure latency inducing tasks are mimiced even for dummy clients. For example, use:: from your_datastore import Client try: return Client.exists(client_key, access_token) except DoesNotExist: return False Rather than:: from your_datastore import Client if access_token == self.dummy_access_token: return False else: return Client.exists(client_key, access_token) This method is used by * AccessTokenEndpoint * RequestTokenEndpoint * ResourceEndpoint * SignatureOnlyEndpoint
Validates that supplied client key is a registered and valid client.
[ "Validates", "that", "supplied", "client", "key", "is", "a", "registered", "and", "valid", "client", "." ]
def validate_client_key(self, client_key, request): """Validates that supplied client key is a registered and valid client. :param client_key: The client/consumer key. :param request: An oauthlib.common.Request object. :returns: True or False Note that if the dummy client is supplied it should validate in same or nearly the same amount of time as a valid one. Ensure latency inducing tasks are mimiced even for dummy clients. For example, use:: from your_datastore import Client try: return Client.exists(client_key, access_token) except DoesNotExist: return False Rather than:: from your_datastore import Client if access_token == self.dummy_access_token: return False else: return Client.exists(client_key, access_token) This method is used by * AccessTokenEndpoint * RequestTokenEndpoint * ResourceEndpoint * SignatureOnlyEndpoint """ raise self._subclass_must_implement("validate_client_key")
[ "def", "validate_client_key", "(", "self", ",", "client_key", ",", "request", ")", ":", "raise", "self", ".", "_subclass_must_implement", "(", "\"validate_client_key\"", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/oauthlib/oauth1/rfc5849/request_validator.py#L461-L495
y123456yz/reading-and-annotate-mongodb-3.6
93280293672ca7586dc24af18132aa61e4ed7fcf
mongo/src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/PathList.py
python
_PathList.__init__
(self, pathlist)
Initializes a PathList object, canonicalizing the input and pre-processing it for quicker substitution later. The stored representation of the PathList is a list of tuples containing (type, value), where the "type" is one of the TYPE_* variables defined above. We distinguish between: strings that contain no '$' and therefore need no delayed-evaluation string substitution (we expect that there will be many of these and that we therefore get a pretty big win from avoiding string substitution) strings that contain '$' and therefore need substitution (the hard case is things like '${TARGET.dir}/include', which require re-evaluation for every target + source) other objects (which may be something like an EntryProxy that needs a method called to return a Node) Pre-identifying the type of each element in the PathList up-front and storing the type in the list of tuples is intended to reduce the amount of calculation when we actually do the substitution over and over for each target.
Initializes a PathList object, canonicalizing the input and pre-processing it for quicker substitution later.
[ "Initializes", "a", "PathList", "object", "canonicalizing", "the", "input", "and", "pre", "-", "processing", "it", "for", "quicker", "substitution", "later", "." ]
def __init__(self, pathlist): """ Initializes a PathList object, canonicalizing the input and pre-processing it for quicker substitution later. The stored representation of the PathList is a list of tuples containing (type, value), where the "type" is one of the TYPE_* variables defined above. We distinguish between: strings that contain no '$' and therefore need no delayed-evaluation string substitution (we expect that there will be many of these and that we therefore get a pretty big win from avoiding string substitution) strings that contain '$' and therefore need substitution (the hard case is things like '${TARGET.dir}/include', which require re-evaluation for every target + source) other objects (which may be something like an EntryProxy that needs a method called to return a Node) Pre-identifying the type of each element in the PathList up-front and storing the type in the list of tuples is intended to reduce the amount of calculation when we actually do the substitution over and over for each target. """ if SCons.Util.is_String(pathlist): pathlist = pathlist.split(os.pathsep) elif not SCons.Util.is_Sequence(pathlist): pathlist = [pathlist] pl = [] for p in pathlist: try: index = p.find('$') except (AttributeError, TypeError): type = TYPE_OBJECT else: if index == -1: type = TYPE_STRING_NO_SUBST else: type = TYPE_STRING_SUBST pl.append((type, p)) self.pathlist = tuple(pl)
[ "def", "__init__", "(", "self", ",", "pathlist", ")", ":", "if", "SCons", ".", "Util", ".", "is_String", "(", "pathlist", ")", ":", "pathlist", "=", "pathlist", ".", "split", "(", "os", ".", "pathsep", ")", "elif", "not", "SCons", ".", "Util", ".", "is_Sequence", "(", "pathlist", ")", ":", "pathlist", "=", "[", "pathlist", "]", "pl", "=", "[", "]", "for", "p", "in", "pathlist", ":", "try", ":", "index", "=", "p", ".", "find", "(", "'$'", ")", "except", "(", "AttributeError", ",", "TypeError", ")", ":", "type", "=", "TYPE_OBJECT", "else", ":", "if", "index", "==", "-", "1", ":", "type", "=", "TYPE_STRING_NO_SUBST", "else", ":", "type", "=", "TYPE_STRING_SUBST", "pl", ".", "append", "(", "(", "type", ",", "p", ")", ")", "self", ".", "pathlist", "=", "tuple", "(", "pl", ")" ]
https://github.com/y123456yz/reading-and-annotate-mongodb-3.6/blob/93280293672ca7586dc24af18132aa61e4ed7fcf/mongo/src/third_party/scons-2.5.0/scons-local-2.5.0/SCons/PathList.py#L73-L117
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/scipy/ndimage/filters.py
python
generic_filter1d
(input, function, filter_size, axis=-1, output=None, mode="reflect", cval=0.0, origin=0, extra_arguments=(), extra_keywords = None)
return return_value
Calculate a one-dimensional filter along the given axis. `generic_filter1d` iterates over the lines of the array, calling the given function at each line. The arguments of the line are the input line, and the output line. The input and output lines are 1D double arrays. The input line is extended appropriately according to the filter size and origin. The output line must be modified in-place with the result. Parameters ---------- %(input)s function : callable Function to apply along given axis. filter_size : scalar Length of the filter. %(axis)s %(output)s %(mode)s %(cval)s %(origin)s %(extra_arguments)s %(extra_keywords)s
Calculate a one-dimensional filter along the given axis.
[ "Calculate", "a", "one", "-", "dimensional", "filter", "along", "the", "given", "axis", "." ]
def generic_filter1d(input, function, filter_size, axis=-1, output=None, mode="reflect", cval=0.0, origin=0, extra_arguments=(), extra_keywords = None): """Calculate a one-dimensional filter along the given axis. `generic_filter1d` iterates over the lines of the array, calling the given function at each line. The arguments of the line are the input line, and the output line. The input and output lines are 1D double arrays. The input line is extended appropriately according to the filter size and origin. The output line must be modified in-place with the result. Parameters ---------- %(input)s function : callable Function to apply along given axis. filter_size : scalar Length of the filter. %(axis)s %(output)s %(mode)s %(cval)s %(origin)s %(extra_arguments)s %(extra_keywords)s """ if extra_keywords is None: extra_keywords = {} input = numpy.asarray(input) if numpy.iscomplexobj(input): raise TypeError('Complex type not supported') output, return_value = _ni_support._get_output(output, input) if filter_size < 1: raise RuntimeError('invalid filter size') axis = _ni_support._check_axis(axis, input.ndim) if (filter_size // 2 + origin < 0) or (filter_size // 2 + origin >= filter_size): raise ValueError('invalid origin') mode = _ni_support._extend_mode_to_code(mode) _nd_image.generic_filter1d(input, function, filter_size, axis, output, mode, cval, origin, extra_arguments, extra_keywords) return return_value
[ "def", "generic_filter1d", "(", "input", ",", "function", ",", "filter_size", ",", "axis", "=", "-", "1", ",", "output", "=", "None", ",", "mode", "=", "\"reflect\"", ",", "cval", "=", "0.0", ",", "origin", "=", "0", ",", "extra_arguments", "=", "(", ")", ",", "extra_keywords", "=", "None", ")", ":", "if", "extra_keywords", "is", "None", ":", "extra_keywords", "=", "{", "}", "input", "=", "numpy", ".", "asarray", "(", "input", ")", "if", "numpy", ".", "iscomplexobj", "(", "input", ")", ":", "raise", "TypeError", "(", "'Complex type not supported'", ")", "output", ",", "return_value", "=", "_ni_support", ".", "_get_output", "(", "output", ",", "input", ")", "if", "filter_size", "<", "1", ":", "raise", "RuntimeError", "(", "'invalid filter size'", ")", "axis", "=", "_ni_support", ".", "_check_axis", "(", "axis", ",", "input", ".", "ndim", ")", "if", "(", "filter_size", "//", "2", "+", "origin", "<", "0", ")", "or", "(", "filter_size", "//", "2", "+", "origin", ">=", "filter_size", ")", ":", "raise", "ValueError", "(", "'invalid origin'", ")", "mode", "=", "_ni_support", ".", "_extend_mode_to_code", "(", "mode", ")", "_nd_image", ".", "generic_filter1d", "(", "input", ",", "function", ",", "filter_size", ",", "axis", ",", "output", ",", "mode", ",", "cval", ",", "origin", ",", "extra_arguments", ",", "extra_keywords", ")", "return", "return_value" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/scipy/ndimage/filters.py#L1152-L1194
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/mailbox.py
python
Maildir.__len__
(self)
return len(self._toc)
Return a count of messages in the mailbox.
Return a count of messages in the mailbox.
[ "Return", "a", "count", "of", "messages", "in", "the", "mailbox", "." ]
def __len__(self): """Return a count of messages in the mailbox.""" self._refresh() return len(self._toc)
[ "def", "__len__", "(", "self", ")", ":", "self", ".", "_refresh", "(", ")", "return", "len", "(", "self", ".", "_toc", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/mailbox.py#L410-L413
klzgrad/naiveproxy
ed2c513637c77b18721fe428d7ed395b4d284c83
src/build/android/gyp/util/resource_utils.py
python
RJavaBuildOptions._MaybeRewriteRTxtPackageIds
(self, r_txt_path)
Rewrites package IDs in the R.txt file if necessary. If SetFinalPackageId() was called, some of the resource IDs may have had their package ID changed. This function rewrites the R.txt file to match those changes.
Rewrites package IDs in the R.txt file if necessary.
[ "Rewrites", "package", "IDs", "in", "the", "R", ".", "txt", "file", "if", "necessary", "." ]
def _MaybeRewriteRTxtPackageIds(self, r_txt_path): """Rewrites package IDs in the R.txt file if necessary. If SetFinalPackageId() was called, some of the resource IDs may have had their package ID changed. This function rewrites the R.txt file to match those changes. """ if self.final_package_id is None: return entries = _ParseTextSymbolsFile(r_txt_path) with open(r_txt_path, 'w') as f: for entry in entries: value = entry.value if self._IsResourceFinal(entry): value = re.sub(r'0x(?:00|7f)', '0x{:02x}'.format(self.final_package_id), value) f.write('{} {} {} {}\n'.format(entry.java_type, entry.resource_type, entry.name, value))
[ "def", "_MaybeRewriteRTxtPackageIds", "(", "self", ",", "r_txt_path", ")", ":", "if", "self", ".", "final_package_id", "is", "None", ":", "return", "entries", "=", "_ParseTextSymbolsFile", "(", "r_txt_path", ")", "with", "open", "(", "r_txt_path", ",", "'w'", ")", "as", "f", ":", "for", "entry", "in", "entries", ":", "value", "=", "entry", ".", "value", "if", "self", ".", "_IsResourceFinal", "(", "entry", ")", ":", "value", "=", "re", ".", "sub", "(", "r'0x(?:00|7f)'", ",", "'0x{:02x}'", ".", "format", "(", "self", ".", "final_package_id", ")", ",", "value", ")", "f", ".", "write", "(", "'{} {} {} {}\\n'", ".", "format", "(", "entry", ".", "java_type", ",", "entry", ".", "resource_type", ",", "entry", ".", "name", ",", "value", ")", ")" ]
https://github.com/klzgrad/naiveproxy/blob/ed2c513637c77b18721fe428d7ed395b4d284c83/src/build/android/gyp/util/resource_utils.py#L491-L509
OSGeo/gdal
3748fc4ba4fba727492774b2b908a2130c864a83
swig/python/osgeo/gdal.py
python
Dataset.WriteRaster
(self, *args, **kwargs)
return _gdal.Dataset_WriteRaster(self, *args, **kwargs)
r"""WriteRaster(Dataset self, int xoff, int yoff, int xsize, int ysize, GIntBig buf_len, int * buf_xsize=None, int * buf_ysize=None, GDALDataType * buf_type=None, int band_list=0, GIntBig * buf_pixel_space=None, GIntBig * buf_line_space=None, GIntBig * buf_band_space=None) -> CPLErr
r"""WriteRaster(Dataset self, int xoff, int yoff, int xsize, int ysize, GIntBig buf_len, int * buf_xsize=None, int * buf_ysize=None, GDALDataType * buf_type=None, int band_list=0, GIntBig * buf_pixel_space=None, GIntBig * buf_line_space=None, GIntBig * buf_band_space=None) -> CPLErr
[ "r", "WriteRaster", "(", "Dataset", "self", "int", "xoff", "int", "yoff", "int", "xsize", "int", "ysize", "GIntBig", "buf_len", "int", "*", "buf_xsize", "=", "None", "int", "*", "buf_ysize", "=", "None", "GDALDataType", "*", "buf_type", "=", "None", "int", "band_list", "=", "0", "GIntBig", "*", "buf_pixel_space", "=", "None", "GIntBig", "*", "buf_line_space", "=", "None", "GIntBig", "*", "buf_band_space", "=", "None", ")", "-", ">", "CPLErr" ]
def WriteRaster(self, *args, **kwargs): r"""WriteRaster(Dataset self, int xoff, int yoff, int xsize, int ysize, GIntBig buf_len, int * buf_xsize=None, int * buf_ysize=None, GDALDataType * buf_type=None, int band_list=0, GIntBig * buf_pixel_space=None, GIntBig * buf_line_space=None, GIntBig * buf_band_space=None) -> CPLErr""" return _gdal.Dataset_WriteRaster(self, *args, **kwargs)
[ "def", "WriteRaster", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_gdal", ".", "Dataset_WriteRaster", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/OSGeo/gdal/blob/3748fc4ba4fba727492774b2b908a2130c864a83/swig/python/osgeo/gdal.py#L2217-L2219
snap-stanford/snap-python
d53c51b0a26aa7e3e7400b014cdf728948fde80a
setup/snap.py
python
TStr_AddToFMid
(*args)
return _snap.TStr_AddToFMid(*args)
TStr_AddToFMid(TStr FNm, TStr ExtFMid) -> TStr Parameters: FNm: TStr const & ExtFMid: TStr const &
TStr_AddToFMid(TStr FNm, TStr ExtFMid) -> TStr
[ "TStr_AddToFMid", "(", "TStr", "FNm", "TStr", "ExtFMid", ")", "-", ">", "TStr" ]
def TStr_AddToFMid(*args): """ TStr_AddToFMid(TStr FNm, TStr ExtFMid) -> TStr Parameters: FNm: TStr const & ExtFMid: TStr const & """ return _snap.TStr_AddToFMid(*args)
[ "def", "TStr_AddToFMid", "(", "*", "args", ")", ":", "return", "_snap", ".", "TStr_AddToFMid", "(", "*", "args", ")" ]
https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/setup/snap.py#L11257-L11266
root-project/root
fcd3583bb14852bf2e8cd2415717cbaac0e75896
interpreter/llvm/src/tools/clang/tools/scan-build-py/libscanbuild/arguments.py
python
normalize_args_for_analyze
(args, from_build_command)
Normalize parsed arguments for analyze-build and scan-build. :param args: Parsed argument object. (Will be mutated.) :param from_build_command: Boolean value tells is the command suppose to run the analyzer against a build command or a compilation db.
Normalize parsed arguments for analyze-build and scan-build.
[ "Normalize", "parsed", "arguments", "for", "analyze", "-", "build", "and", "scan", "-", "build", "." ]
def normalize_args_for_analyze(args, from_build_command): """ Normalize parsed arguments for analyze-build and scan-build. :param args: Parsed argument object. (Will be mutated.) :param from_build_command: Boolean value tells is the command suppose to run the analyzer against a build command or a compilation db. """ # make plugins always a list. (it might be None when not specified.) if args.plugins is None: args.plugins = [] # make exclude directory list unique and absolute. uniq_excludes = set(os.path.abspath(entry) for entry in args.excludes) args.excludes = list(uniq_excludes) # because shared codes for all tools, some common used methods are # expecting some argument to be present. so, instead of query the args # object about the presence of the flag, we fake it here. to make those # methods more readable. (it's an arguable choice, took it only for those # which have good default value.) if from_build_command: # add cdb parameter invisibly to make report module working. args.cdb = 'compile_commands.json' # Make ctu_dir an abspath as it is needed inside clang if not from_build_command and hasattr(args, 'ctu_phases') \ and hasattr(args.ctu_phases, 'dir'): args.ctu_dir = os.path.abspath(args.ctu_dir)
[ "def", "normalize_args_for_analyze", "(", "args", ",", "from_build_command", ")", ":", "# make plugins always a list. (it might be None when not specified.)", "if", "args", ".", "plugins", "is", "None", ":", "args", ".", "plugins", "=", "[", "]", "# make exclude directory list unique and absolute.", "uniq_excludes", "=", "set", "(", "os", ".", "path", ".", "abspath", "(", "entry", ")", "for", "entry", "in", "args", ".", "excludes", ")", "args", ".", "excludes", "=", "list", "(", "uniq_excludes", ")", "# because shared codes for all tools, some common used methods are", "# expecting some argument to be present. so, instead of query the args", "# object about the presence of the flag, we fake it here. to make those", "# methods more readable. (it's an arguable choice, took it only for those", "# which have good default value.)", "if", "from_build_command", ":", "# add cdb parameter invisibly to make report module working.", "args", ".", "cdb", "=", "'compile_commands.json'", "# Make ctu_dir an abspath as it is needed inside clang", "if", "not", "from_build_command", "and", "hasattr", "(", "args", ",", "'ctu_phases'", ")", "and", "hasattr", "(", "args", ".", "ctu_phases", ",", "'dir'", ")", ":", "args", ".", "ctu_dir", "=", "os", ".", "path", ".", "abspath", "(", "args", ".", "ctu_dir", ")" ]
https://github.com/root-project/root/blob/fcd3583bb14852bf2e8cd2415717cbaac0e75896/interpreter/llvm/src/tools/clang/tools/scan-build-py/libscanbuild/arguments.py#L77-L104
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/setuptools/py3/setuptools/_distutils/fancy_getopt.py
python
FancyGetopt.get_attr_name
(self, long_option)
return long_option.translate(longopt_xlate)
Translate long option name 'long_option' to the form it has as an attribute of some object: ie., translate hyphens to underscores.
Translate long option name 'long_option' to the form it has as an attribute of some object: ie., translate hyphens to underscores.
[ "Translate", "long", "option", "name", "long_option", "to", "the", "form", "it", "has", "as", "an", "attribute", "of", "some", "object", ":", "ie", ".", "translate", "hyphens", "to", "underscores", "." ]
def get_attr_name(self, long_option): """Translate long option name 'long_option' to the form it has as an attribute of some object: ie., translate hyphens to underscores.""" return long_option.translate(longopt_xlate)
[ "def", "get_attr_name", "(", "self", ",", "long_option", ")", ":", "return", "long_option", ".", "translate", "(", "longopt_xlate", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/setuptools/py3/setuptools/_distutils/fancy_getopt.py#L104-L108
cms-sw/cmssw
fd9de012d503d3405420bcbeec0ec879baa57cf2
Alignment/MuonAlignment/python/svgfig.py
python
Ticks.regular_miniticks
(self, N)
return output
Return exactly N linear ticks. Normally only used internally.
Return exactly N linear ticks.
[ "Return", "exactly", "N", "linear", "ticks", "." ]
def regular_miniticks(self, N): """Return exactly N linear ticks. Normally only used internally. """ output = [] x = self.low for i in range(N): output.append(x) x += (self.high - self.low)/(N-1.) return output
[ "def", "regular_miniticks", "(", "self", ",", "N", ")", ":", "output", "=", "[", "]", "x", "=", "self", ".", "low", "for", "i", "in", "range", "(", "N", ")", ":", "output", ".", "append", "(", "x", ")", "x", "+=", "(", "self", ".", "high", "-", "self", ".", "low", ")", "/", "(", "N", "-", "1.", ")", "return", "output" ]
https://github.com/cms-sw/cmssw/blob/fd9de012d503d3405420bcbeec0ec879baa57cf2/Alignment/MuonAlignment/python/svgfig.py#L2678-L2688
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/math_grad.py
python
_AcoshGrad
(op, grad)
Returns grad * 1/sinh(y).
Returns grad * 1/sinh(y).
[ "Returns", "grad", "*", "1", "/", "sinh", "(", "y", ")", "." ]
def _AcoshGrad(op, grad): """Returns grad * 1/sinh(y).""" y = op.outputs[0] with ops.control_dependencies([grad]): y = math_ops.conj(y) if compat.forward_compatible(2019, 9, 14): return math_ops.xdivy(grad, math_ops.sinh(y)) else: return grad / math_ops.sinh(y)
[ "def", "_AcoshGrad", "(", "op", ",", "grad", ")", ":", "y", "=", "op", ".", "outputs", "[", "0", "]", "with", "ops", ".", "control_dependencies", "(", "[", "grad", "]", ")", ":", "y", "=", "math_ops", ".", "conj", "(", "y", ")", "if", "compat", ".", "forward_compatible", "(", "2019", ",", "9", ",", "14", ")", ":", "return", "math_ops", ".", "xdivy", "(", "grad", ",", "math_ops", ".", "sinh", "(", "y", ")", ")", "else", ":", "return", "grad", "/", "math_ops", ".", "sinh", "(", "y", ")" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/math_grad.py#L727-L735
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_windows.py
python
Printout._setCallbackInfo
(*args, **kwargs)
return _windows_.Printout__setCallbackInfo(*args, **kwargs)
_setCallbackInfo(self, PyObject self, PyObject _class)
_setCallbackInfo(self, PyObject self, PyObject _class)
[ "_setCallbackInfo", "(", "self", "PyObject", "self", "PyObject", "_class", ")" ]
def _setCallbackInfo(*args, **kwargs): """_setCallbackInfo(self, PyObject self, PyObject _class)""" return _windows_.Printout__setCallbackInfo(*args, **kwargs)
[ "def", "_setCallbackInfo", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_windows_", ".", "Printout__setCallbackInfo", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_windows.py#L5275-L5277
s9xie/hed
94fb22f10cbfec8d84fbc0642b224022014b6bd6
scripts/cpp_lint.py
python
_FunctionState.Check
(self, error, filename, linenum)
Report if too many lines in function body. Args: error: The function to call with any errors found. filename: The name of the current file. linenum: The number of the line to check.
Report if too many lines in function body.
[ "Report", "if", "too", "many", "lines", "in", "function", "body", "." ]
def Check(self, error, filename, linenum): """Report if too many lines in function body. Args: error: The function to call with any errors found. filename: The name of the current file. linenum: The number of the line to check. """ if Match(r'T(EST|est)', self.current_function): base_trigger = self._TEST_TRIGGER else: base_trigger = self._NORMAL_TRIGGER trigger = base_trigger * 2**_VerboseLevel() if self.lines_in_function > trigger: error_level = int(math.log(self.lines_in_function / base_trigger, 2)) # 50 => 0, 100 => 1, 200 => 2, 400 => 3, 800 => 4, 1600 => 5, ... if error_level > 5: error_level = 5 error(filename, linenum, 'readability/fn_size', error_level, 'Small and focused functions are preferred:' ' %s has %d non-comment lines' ' (error triggered by exceeding %d lines).' % ( self.current_function, self.lines_in_function, trigger))
[ "def", "Check", "(", "self", ",", "error", ",", "filename", ",", "linenum", ")", ":", "if", "Match", "(", "r'T(EST|est)'", ",", "self", ".", "current_function", ")", ":", "base_trigger", "=", "self", ".", "_TEST_TRIGGER", "else", ":", "base_trigger", "=", "self", ".", "_NORMAL_TRIGGER", "trigger", "=", "base_trigger", "*", "2", "**", "_VerboseLevel", "(", ")", "if", "self", ".", "lines_in_function", ">", "trigger", ":", "error_level", "=", "int", "(", "math", ".", "log", "(", "self", ".", "lines_in_function", "/", "base_trigger", ",", "2", ")", ")", "# 50 => 0, 100 => 1, 200 => 2, 400 => 3, 800 => 4, 1600 => 5, ...", "if", "error_level", ">", "5", ":", "error_level", "=", "5", "error", "(", "filename", ",", "linenum", ",", "'readability/fn_size'", ",", "error_level", ",", "'Small and focused functions are preferred:'", "' %s has %d non-comment lines'", "' (error triggered by exceeding %d lines).'", "%", "(", "self", ".", "current_function", ",", "self", ".", "lines_in_function", ",", "trigger", ")", ")" ]
https://github.com/s9xie/hed/blob/94fb22f10cbfec8d84fbc0642b224022014b6bd6/scripts/cpp_lint.py#L836-L859
google/usd_from_gltf
6d288cce8b68744494a226574ae1d7ba6a9c46eb
tools/ufgbatch/ufgcommon/diff.py
python
handle_crc_difference
(golden_zip, golden_info, test_zip, test_info)
Returns results of more relaxed comparisons for the files.
Returns results of more relaxed comparisons for the files.
[ "Returns", "results", "of", "more", "relaxed", "comparisons", "for", "the", "files", "." ]
def handle_crc_difference(golden_zip, golden_info, test_zip, test_info): """Returns results of more relaxed comparisons for the files.""" try: with golden_zip.open(golden_info) as golden, test_zip.open( test_info) as test: return compare_images(golden, test) except IOError: # Non-images always return false. return False
[ "def", "handle_crc_difference", "(", "golden_zip", ",", "golden_info", ",", "test_zip", ",", "test_info", ")", ":", "try", ":", "with", "golden_zip", ".", "open", "(", "golden_info", ")", "as", "golden", ",", "test_zip", ".", "open", "(", "test_info", ")", "as", "test", ":", "return", "compare_images", "(", "golden", ",", "test", ")", "except", "IOError", ":", "# Non-images always return false.", "return", "False" ]
https://github.com/google/usd_from_gltf/blob/6d288cce8b68744494a226574ae1d7ba6a9c46eb/tools/ufgbatch/ufgcommon/diff.py#L102-L110
ricardoquesada/Spidermonkey
4a75ea2543408bd1b2c515aa95901523eeef7858
media/webrtc/trunk/tools/gyp/pylib/gyp/xcode_emulation.py
python
XcodeSettings.GetWrapperName
(self)
return self.GetProductName() + self.GetWrapperExtension()
Returns the directory name of the bundle represented by this target. Only valid for bundles.
Returns the directory name of the bundle represented by this target. Only valid for bundles.
[ "Returns", "the", "directory", "name", "of", "the", "bundle", "represented", "by", "this", "target", ".", "Only", "valid", "for", "bundles", "." ]
def GetWrapperName(self): """Returns the directory name of the bundle represented by this target. Only valid for bundles.""" assert self._IsBundle() return self.GetProductName() + self.GetWrapperExtension()
[ "def", "GetWrapperName", "(", "self", ")", ":", "assert", "self", ".", "_IsBundle", "(", ")", "return", "self", ".", "GetProductName", "(", ")", "+", "self", ".", "GetWrapperExtension", "(", ")" ]
https://github.com/ricardoquesada/Spidermonkey/blob/4a75ea2543408bd1b2c515aa95901523eeef7858/media/webrtc/trunk/tools/gyp/pylib/gyp/xcode_emulation.py#L95-L99
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/ops/tensor_array_ops.py
python
_GraphTensorArrayV2.split
(self, value, lengths, name=None)
See TensorArray.
See TensorArray.
[ "See", "TensorArray", "." ]
def split(self, value, lengths, name=None): """See TensorArray.""" with ops.name_scope(name, "TensorArraySplit", [self._flow, value, lengths]): # TODO(b/129870929): Fix after all callers provide proper init dtype. value = ops.convert_to_tensor( value, preferred_dtype=self._dtype, name="value") _check_dtypes(value, self._dtype) lengths_64 = math_ops.cast(lengths, dtypes.int64) if not context.executing_eagerly(): clengths = tensor_util.constant_value(lengths_64) if value.shape.dims is not None and clengths is not None: if clengths.shape and clengths.max() == clengths.min(): self._check_element_shape( tensor_shape.TensorShape([clengths[0] ]).concatenate(value.shape[1:])) flow_out = list_ops.tensor_list_split( tensor=value, lengths=lengths_64, element_shape=self.element_shape, name=name) return build_ta_with_new_flow(self, flow_out)
[ "def", "split", "(", "self", ",", "value", ",", "lengths", ",", "name", "=", "None", ")", ":", "with", "ops", ".", "name_scope", "(", "name", ",", "\"TensorArraySplit\"", ",", "[", "self", ".", "_flow", ",", "value", ",", "lengths", "]", ")", ":", "# TODO(b/129870929): Fix after all callers provide proper init dtype.", "value", "=", "ops", ".", "convert_to_tensor", "(", "value", ",", "preferred_dtype", "=", "self", ".", "_dtype", ",", "name", "=", "\"value\"", ")", "_check_dtypes", "(", "value", ",", "self", ".", "_dtype", ")", "lengths_64", "=", "math_ops", ".", "cast", "(", "lengths", ",", "dtypes", ".", "int64", ")", "if", "not", "context", ".", "executing_eagerly", "(", ")", ":", "clengths", "=", "tensor_util", ".", "constant_value", "(", "lengths_64", ")", "if", "value", ".", "shape", ".", "dims", "is", "not", "None", "and", "clengths", "is", "not", "None", ":", "if", "clengths", ".", "shape", "and", "clengths", ".", "max", "(", ")", "==", "clengths", ".", "min", "(", ")", ":", "self", ".", "_check_element_shape", "(", "tensor_shape", ".", "TensorShape", "(", "[", "clengths", "[", "0", "]", "]", ")", ".", "concatenate", "(", "value", ".", "shape", "[", "1", ":", "]", ")", ")", "flow_out", "=", "list_ops", ".", "tensor_list_split", "(", "tensor", "=", "value", ",", "lengths", "=", "lengths_64", ",", "element_shape", "=", "self", ".", "element_shape", ",", "name", "=", "name", ")", "return", "build_ta_with_new_flow", "(", "self", ",", "flow_out", ")" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/tensor_array_ops.py#L623-L643
LiXizhi/NPLRuntime
a42720e5fe9a6960e0a9ce40bbbcd809192906be
Server/trunk/flann-1.8.4-src/src/python/pyflann/index.py
python
FLANN.save_index
(self, filename)
This saves the index to a disk file.
This saves the index to a disk file.
[ "This", "saves", "the", "index", "to", "a", "disk", "file", "." ]
def save_index(self, filename): """ This saves the index to a disk file. """ if self.__curindex != None: flann.save_index[self.__curindex_type](self.__curindex, c_char_p(to_bytes(filename)))
[ "def", "save_index", "(", "self", ",", "filename", ")", ":", "if", "self", ".", "__curindex", "!=", "None", ":", "flann", ".", "save_index", "[", "self", ".", "__curindex_type", "]", "(", "self", ".", "__curindex", ",", "c_char_p", "(", "to_bytes", "(", "filename", ")", ")", ")" ]
https://github.com/LiXizhi/NPLRuntime/blob/a42720e5fe9a6960e0a9ce40bbbcd809192906be/Server/trunk/flann-1.8.4-src/src/python/pyflann/index.py#L175-L180
snap-stanford/snap-python
d53c51b0a26aa7e3e7400b014cdf728948fde80a
setup/snap.py
python
TUCh.__lt__
(self, *args)
return _snap.TUCh___lt__(self, *args)
__lt__(TUCh self, TUCh UCh) -> bool Parameters: UCh: TUCh const &
__lt__(TUCh self, TUCh UCh) -> bool
[ "__lt__", "(", "TUCh", "self", "TUCh", "UCh", ")", "-", ">", "bool" ]
def __lt__(self, *args): """ __lt__(TUCh self, TUCh UCh) -> bool Parameters: UCh: TUCh const & """ return _snap.TUCh___lt__(self, *args)
[ "def", "__lt__", "(", "self", ",", "*", "args", ")", ":", "return", "_snap", ".", "TUCh___lt__", "(", "self", ",", "*", "args", ")" ]
https://github.com/snap-stanford/snap-python/blob/d53c51b0a26aa7e3e7400b014cdf728948fde80a/setup/snap.py#L12789-L12797
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/win32comext/axdebug/gateways.py
python
DebugDocumentContext.GetDocument
(self)
Return value must be a PyIDebugDocument object
Return value must be a PyIDebugDocument object
[ "Return", "value", "must", "be", "a", "PyIDebugDocument", "object" ]
def GetDocument(self): """Return value must be a PyIDebugDocument object """ RaiseNotImpl("GetDocument")
[ "def", "GetDocument", "(", "self", ")", ":", "RaiseNotImpl", "(", "\"GetDocument\"", ")" ]
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/site-packages/win32comext/axdebug/gateways.py#L235-L238
pyne/pyne
0c2714d7c0d1b5e20be6ae6527da2c660dd6b1b3
pyne/mcnp.py
python
Xsdir.find_table
(self, name)
return tables
Find all tables for a given ZIAD. Parameters ---------- name : str The ZIAD name. Returns ------- tables : list All XsdirTable objects for a given ZIAD.
Find all tables for a given ZIAD.
[ "Find", "all", "tables", "for", "a", "given", "ZIAD", "." ]
def find_table(self, name): """Find all tables for a given ZIAD. Parameters ---------- name : str The ZIAD name. Returns ------- tables : list All XsdirTable objects for a given ZIAD. """ tables = [] for table in self: if name in table.name: tables.append(table) return tables
[ "def", "find_table", "(", "self", ",", "name", ")", ":", "tables", "=", "[", "]", "for", "table", "in", "self", ":", "if", "name", "in", "table", ".", "name", ":", "tables", ".", "append", "(", "table", ")", "return", "tables" ]
https://github.com/pyne/pyne/blob/0c2714d7c0d1b5e20be6ae6527da2c660dd6b1b3/pyne/mcnp.py#L821-L839
metashell/metashell
f4177e4854ea00c8dbc722cadab26ef413d798ea
3rd/templight/clang/tools/scan-build-py/libscanbuild/report.py
python
reindent
(text, indent)
return result
Utility function to format html output and keep indentation.
Utility function to format html output and keep indentation.
[ "Utility", "function", "to", "format", "html", "output", "and", "keep", "indentation", "." ]
def reindent(text, indent): """ Utility function to format html output and keep indentation. """ result = '' for line in text.splitlines(): if len(line.strip()): result += ' ' * indent + line.split('|')[1] + os.linesep return result
[ "def", "reindent", "(", "text", ",", "indent", ")", ":", "result", "=", "''", "for", "line", "in", "text", ".", "splitlines", "(", ")", ":", "if", "len", "(", "line", ".", "strip", "(", ")", ")", ":", "result", "+=", "' '", "*", "indent", "+", "line", ".", "split", "(", "'|'", ")", "[", "1", "]", "+", "os", ".", "linesep", "return", "result" ]
https://github.com/metashell/metashell/blob/f4177e4854ea00c8dbc722cadab26ef413d798ea/3rd/templight/clang/tools/scan-build-py/libscanbuild/report.py#L461-L468
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/build/waf-1.7.13/waflib/Runner.py
python
TaskConsumer.loop
(self)
Obtain tasks from :py:attr:`waflib.Runner.TaskConsumer.ready` and call :py:meth:`waflib.Task.TaskBase.process`. If the object is a function, execute it.
Obtain tasks from :py:attr:`waflib.Runner.TaskConsumer.ready` and call :py:meth:`waflib.Task.TaskBase.process`. If the object is a function, execute it.
[ "Obtain", "tasks", "from", ":", "py", ":", "attr", ":", "waflib", ".", "Runner", ".", "TaskConsumer", ".", "ready", "and", "call", ":", "py", ":", "meth", ":", "waflib", ".", "Task", ".", "TaskBase", ".", "process", ".", "If", "the", "object", "is", "a", "function", "execute", "it", "." ]
def loop(self): """ Obtain tasks from :py:attr:`waflib.Runner.TaskConsumer.ready` and call :py:meth:`waflib.Task.TaskBase.process`. If the object is a function, execute it. """ while 1: tsk = self.ready.get() if not isinstance(tsk, Task.TaskBase): tsk(self) else: tsk.process()
[ "def", "loop", "(", "self", ")", ":", "while", "1", ":", "tsk", "=", "self", ".", "ready", ".", "get", "(", ")", "if", "not", "isinstance", "(", "tsk", ",", "Task", ".", "TaskBase", ")", ":", "tsk", "(", "self", ")", "else", ":", "tsk", ".", "process", "(", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/build/waf-1.7.13/waflib/Runner.py#L45-L55
apache/incubator-mxnet
f03fb23f1d103fec9541b5ae59ee06b1734a51d9
python/mxnet/numpy/multiarray.py
python
atleast_1d
(*arys)
return _mx_nd_np.atleast_1d(*res)
Convert inputs to arrays with at least one dimension. Scalar inputs are converted to 1-dimensional arrays, whilst higher-dimensional inputs are preserved. Parameters ---------- arys1, arys2, ... : ndarray One or more input arrays. Returns ------- ret : ndarray An array, or list of arrays, each with a.ndim >= 1. Copies are made only if necessary. See also -------- atleast_2d, atleast_3d Examples -------- >>> np.atleast_1d(1.0) array([1.]) >>> x = np.arange(9.0).reshape(3,3) >>> np.atleast_1d(x) array([[0., 1., 2.], [3., 4., 5.], [6., 7., 8.]]) >>> np.atleast_1d(np.array(1), np.array([3, 4])) [array([1.]), array([3., 4.])]
Convert inputs to arrays with at least one dimension.
[ "Convert", "inputs", "to", "arrays", "with", "at", "least", "one", "dimension", "." ]
def atleast_1d(*arys): """ Convert inputs to arrays with at least one dimension. Scalar inputs are converted to 1-dimensional arrays, whilst higher-dimensional inputs are preserved. Parameters ---------- arys1, arys2, ... : ndarray One or more input arrays. Returns ------- ret : ndarray An array, or list of arrays, each with a.ndim >= 1. Copies are made only if necessary. See also -------- atleast_2d, atleast_3d Examples -------- >>> np.atleast_1d(1.0) array([1.]) >>> x = np.arange(9.0).reshape(3,3) >>> np.atleast_1d(x) array([[0., 1., 2.], [3., 4., 5.], [6., 7., 8.]]) >>> np.atleast_1d(np.array(1), np.array([3, 4])) [array([1.]), array([3., 4.])] """ res = [] for ary in arys: if not isinstance(ary, NDArray): ary = array(ary) res.append(ary) return _mx_nd_np.atleast_1d(*res)
[ "def", "atleast_1d", "(", "*", "arys", ")", ":", "res", "=", "[", "]", "for", "ary", "in", "arys", ":", "if", "not", "isinstance", "(", "ary", ",", "NDArray", ")", ":", "ary", "=", "array", "(", "ary", ")", "res", ".", "append", "(", "ary", ")", "return", "_mx_nd_np", ".", "atleast_1d", "(", "*", "res", ")" ]
https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/numpy/multiarray.py#L12407-L12444
0ad/0ad
f58db82e0e925016d83f4e3fa7ca599e3866e2af
source/tools/i18n/extractors/jslexer.py
python
indicates_division
(token)
return token.type in ('name', 'number', 'string', 'regexp')
A helper function that helps the tokenizer to decide if the current token may be followed by a division operator.
A helper function that helps the tokenizer to decide if the current token may be followed by a division operator.
[ "A", "helper", "function", "that", "helps", "the", "tokenizer", "to", "decide", "if", "the", "current", "token", "may", "be", "followed", "by", "a", "division", "operator", "." ]
def indicates_division(token): """A helper function that helps the tokenizer to decide if the current token may be followed by a division operator. """ if token.type == 'operator': return token.value in (')', ']', '}', '++', '--') return token.type in ('name', 'number', 'string', 'regexp')
[ "def", "indicates_division", "(", "token", ")", ":", "if", "token", ".", "type", "==", "'operator'", ":", "return", "token", ".", "value", "in", "(", "')'", ",", "']'", ",", "'}'", ",", "'++'", ",", "'--'", ")", "return", "token", ".", "type", "in", "(", "'name'", ",", "'number'", ",", "'string'", ",", "'regexp'", ")" ]
https://github.com/0ad/0ad/blob/f58db82e0e925016d83f4e3fa7ca599e3866e2af/source/tools/i18n/extractors/jslexer.py#L83-L89
SIPp/sipp
f44d0cf5dec0013eff8fd7b4da885d455aa82e0e
cpplint.py
python
CheckEmptyLoopBody
(filename, clean_lines, linenum, error)
Loop for empty loop body with only a single semicolon. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found.
Loop for empty loop body with only a single semicolon.
[ "Loop", "for", "empty", "loop", "body", "with", "only", "a", "single", "semicolon", "." ]
def CheckEmptyLoopBody(filename, clean_lines, linenum, error): """Loop for empty loop body with only a single semicolon. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found. """ # Search for loop keywords at the beginning of the line. Because only # whitespaces are allowed before the keywords, this will also ignore most # do-while-loops, since those lines should start with closing brace. line = clean_lines.elided[linenum] if Match(r'\s*(for|while)\s*\(', line): # Find the end of the conditional expression (end_line, end_linenum, end_pos) = CloseExpression( clean_lines, linenum, line.find('(')) # Output warning if what follows the condition expression is a semicolon. # No warning for all other cases, including whitespace or newline, since we # have a separate check for semicolons preceded by whitespace. if end_pos >= 0 and Match(r';', end_line[end_pos:]): error(filename, end_linenum, 'whitespace/empty_loop_body', 5, 'Empty loop bodies should use {} or continue')
[ "def", "CheckEmptyLoopBody", "(", "filename", ",", "clean_lines", ",", "linenum", ",", "error", ")", ":", "# Search for loop keywords at the beginning of the line. Because only", "# whitespaces are allowed before the keywords, this will also ignore most", "# do-while-loops, since those lines should start with closing brace.", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "if", "Match", "(", "r'\\s*(for|while)\\s*\\('", ",", "line", ")", ":", "# Find the end of the conditional expression", "(", "end_line", ",", "end_linenum", ",", "end_pos", ")", "=", "CloseExpression", "(", "clean_lines", ",", "linenum", ",", "line", ".", "find", "(", "'('", ")", ")", "# Output warning if what follows the condition expression is a semicolon.", "# No warning for all other cases, including whitespace or newline, since we", "# have a separate check for semicolons preceded by whitespace.", "if", "end_pos", ">=", "0", "and", "Match", "(", "r';'", ",", "end_line", "[", "end_pos", ":", "]", ")", ":", "error", "(", "filename", ",", "end_linenum", ",", "'whitespace/empty_loop_body'", ",", "5", ",", "'Empty loop bodies should use {} or continue'", ")" ]
https://github.com/SIPp/sipp/blob/f44d0cf5dec0013eff8fd7b4da885d455aa82e0e/cpplint.py#L2643-L2667
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/debug/lib/debug_graphs.py
python
DebugGraph._prune_nodes_from_input_and_recipient_maps
(self, nodes_to_prune)
Prune nodes out of input and recipient maps. Args: nodes_to_prune: (`list` of `str`) Names of the nodes to be pruned.
Prune nodes out of input and recipient maps.
[ "Prune", "nodes", "out", "of", "input", "and", "recipient", "maps", "." ]
def _prune_nodes_from_input_and_recipient_maps(self, nodes_to_prune): """Prune nodes out of input and recipient maps. Args: nodes_to_prune: (`list` of `str`) Names of the nodes to be pruned. """ for node in nodes_to_prune: del self._node_inputs[node] del self._node_ctrl_inputs[node] del self._node_recipients[node] del self._node_ctrl_recipients[node]
[ "def", "_prune_nodes_from_input_and_recipient_maps", "(", "self", ",", "nodes_to_prune", ")", ":", "for", "node", "in", "nodes_to_prune", ":", "del", "self", ".", "_node_inputs", "[", "node", "]", "del", "self", ".", "_node_ctrl_inputs", "[", "node", "]", "del", "self", ".", "_node_recipients", "[", "node", "]", "del", "self", ".", "_node_ctrl_recipients", "[", "node", "]" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/debug/lib/debug_graphs.py#L391-L401
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/stc.py
python
StyledTextCtrl.StyleSetFaceName
(*args, **kwargs)
return _stc.StyledTextCtrl_StyleSetFaceName(*args, **kwargs)
StyleSetFaceName(self, int style, String fontName) Set the font of a style.
StyleSetFaceName(self, int style, String fontName)
[ "StyleSetFaceName", "(", "self", "int", "style", "String", "fontName", ")" ]
def StyleSetFaceName(*args, **kwargs): """ StyleSetFaceName(self, int style, String fontName) Set the font of a style. """ return _stc.StyledTextCtrl_StyleSetFaceName(*args, **kwargs)
[ "def", "StyleSetFaceName", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_stc", ".", "StyledTextCtrl_StyleSetFaceName", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/stc.py#L2554-L2560
apple/turicreate
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
deps/src/libxml2-2.9.1/python/libxml2class.py
python
parserCtxt.parseVersionNum
(self)
return ret
parse the XML version value. [26] VersionNum ::= '1.' [0-9]+ In practice allow [0-9].[0-9]+ at that level
parse the XML version value. [26] VersionNum ::= '1.' [0-9]+ In practice allow [0-9].[0-9]+ at that level
[ "parse", "the", "XML", "version", "value", ".", "[", "26", "]", "VersionNum", "::", "=", "1", ".", "[", "0", "-", "9", "]", "+", "In", "practice", "allow", "[", "0", "-", "9", "]", ".", "[", "0", "-", "9", "]", "+", "at", "that", "level" ]
def parseVersionNum(self): """parse the XML version value. [26] VersionNum ::= '1.' [0-9]+ In practice allow [0-9].[0-9]+ at that level """ ret = libxml2mod.xmlParseVersionNum(self._o) return ret
[ "def", "parseVersionNum", "(", "self", ")", ":", "ret", "=", "libxml2mod", ".", "xmlParseVersionNum", "(", "self", ".", "_o", ")", "return", "ret" ]
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2class.py#L4689-L4693
GJDuck/LowFat
ecf6a0f0fa1b73a27a626cf493cc39e477b6faea
llvm-4.0.0.src/utils/llvm-build/llvmbuild/configutil.py
python
configure_file
(input_path, output_path, substitutions)
return True
configure_file(input_path, output_path, substitutions) -> bool Given an input and output path, "configure" the file at the given input path by replacing variables in the file with those given in the substitutions list. Returns true if the output file was written. The substitutions list should be given as a list of tuples (regex string, replacement), where the regex and replacement will be used as in 're.sub' to execute the variable replacement. The output path's parent directory need not exist (it will be created). If the output path does exist and the configured data is not different than it's current contents, the output file will not be modified. This is designed to limit the impact of configured files on build dependencies.
configure_file(input_path, output_path, substitutions) -> bool
[ "configure_file", "(", "input_path", "output_path", "substitutions", ")", "-", ">", "bool" ]
def configure_file(input_path, output_path, substitutions): """configure_file(input_path, output_path, substitutions) -> bool Given an input and output path, "configure" the file at the given input path by replacing variables in the file with those given in the substitutions list. Returns true if the output file was written. The substitutions list should be given as a list of tuples (regex string, replacement), where the regex and replacement will be used as in 're.sub' to execute the variable replacement. The output path's parent directory need not exist (it will be created). If the output path does exist and the configured data is not different than it's current contents, the output file will not be modified. This is designed to limit the impact of configured files on build dependencies. """ # Read in the input data. f = open(input_path, "rb") try: data = f.read() finally: f.close() # Perform the substitutions. for regex_string,replacement in substitutions: regex = re.compile(regex_string) data = regex.sub(replacement, data) # Ensure the output parent directory exists. output_parent_path = os.path.dirname(os.path.abspath(output_path)) if not os.path.exists(output_parent_path): os.makedirs(output_parent_path) # If the output path exists, load it and compare to the configured contents. if os.path.exists(output_path): current_data = None try: f = open(output_path, "rb") try: current_data = f.read() except: current_data = None f.close() except: current_data = None if current_data is not None and current_data == data: return False # Write the output contents. f = open(output_path, "wb") try: f.write(data) finally: f.close() return True
[ "def", "configure_file", "(", "input_path", ",", "output_path", ",", "substitutions", ")", ":", "# Read in the input data.", "f", "=", "open", "(", "input_path", ",", "\"rb\"", ")", "try", ":", "data", "=", "f", ".", "read", "(", ")", "finally", ":", "f", ".", "close", "(", ")", "# Perform the substitutions.", "for", "regex_string", ",", "replacement", "in", "substitutions", ":", "regex", "=", "re", ".", "compile", "(", "regex_string", ")", "data", "=", "regex", ".", "sub", "(", "replacement", ",", "data", ")", "# Ensure the output parent directory exists.", "output_parent_path", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "output_path", ")", ")", "if", "not", "os", ".", "path", ".", "exists", "(", "output_parent_path", ")", ":", "os", ".", "makedirs", "(", "output_parent_path", ")", "# If the output path exists, load it and compare to the configured contents.", "if", "os", ".", "path", ".", "exists", "(", "output_path", ")", ":", "current_data", "=", "None", "try", ":", "f", "=", "open", "(", "output_path", ",", "\"rb\"", ")", "try", ":", "current_data", "=", "f", ".", "read", "(", ")", "except", ":", "current_data", "=", "None", "f", ".", "close", "(", ")", "except", ":", "current_data", "=", "None", "if", "current_data", "is", "not", "None", "and", "current_data", "==", "data", ":", "return", "False", "# Write the output contents.", "f", "=", "open", "(", "output_path", ",", "\"wb\"", ")", "try", ":", "f", ".", "write", "(", "data", ")", "finally", ":", "f", ".", "close", "(", ")", "return", "True" ]
https://github.com/GJDuck/LowFat/blob/ecf6a0f0fa1b73a27a626cf493cc39e477b6faea/llvm-4.0.0.src/utils/llvm-build/llvmbuild/configutil.py#L8-L66
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemPlayerAccount/AWS/resource-manager-code/pa_service_api.py
python
command_reset_player_password
(context, args)
Reset a player's password
Reset a player's password
[ "Reset", "a", "player", "s", "password" ]
def command_reset_player_password(context, args) -> None: """ Reset a player's password """ client = __get_service_api_client(context, args) try: response = client.navigate('admin', 'identityProviders', 'Cognito', 'users', __encode_query_parameter(args.username), 'resetUserPassword').POST(body=None) pprint.pprint(response.DATA) except cgf_service_client.error.NotFoundError as nfe: print(f"[Error] {args.username} not found. {str(nfe)}")
[ "def", "command_reset_player_password", "(", "context", ",", "args", ")", "->", "None", ":", "client", "=", "__get_service_api_client", "(", "context", ",", "args", ")", "try", ":", "response", "=", "client", ".", "navigate", "(", "'admin'", ",", "'identityProviders'", ",", "'Cognito'", ",", "'users'", ",", "__encode_query_parameter", "(", "args", ".", "username", ")", ",", "'resetUserPassword'", ")", ".", "POST", "(", "body", "=", "None", ")", "pprint", ".", "pprint", "(", "response", ".", "DATA", ")", "except", "cgf_service_client", ".", "error", ".", "NotFoundError", "as", "nfe", ":", "print", "(", "f\"[Error] {args.username} not found. {str(nfe)}\"", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemPlayerAccount/AWS/resource-manager-code/pa_service_api.py#L181-L191
hughperkins/tf-coriander
970d3df6c11400ad68405f22b0c42a52374e94ca
tensorflow/contrib/slim/python/slim/data/data_provider.py
python
DataProvider._validate_items
(self, items)
Verifies that each given item is a member of the list from ListItems(). Args: items: a list or tuple of strings. Raises: ValueError: if `items` is not a tuple or list or if any of the elements of `items` is not found in the list provided by self.ListItems().
Verifies that each given item is a member of the list from ListItems().
[ "Verifies", "that", "each", "given", "item", "is", "a", "member", "of", "the", "list", "from", "ListItems", "()", "." ]
def _validate_items(self, items): """Verifies that each given item is a member of the list from ListItems(). Args: items: a list or tuple of strings. Raises: ValueError: if `items` is not a tuple or list or if any of the elements of `items` is not found in the list provided by self.ListItems(). """ if not isinstance(items, (list, tuple)): raise ValueError('items must be a list or tuple') valid_items = self.list_items() for item in items: if item not in valid_items: raise ValueError( 'Item [%s] is invalid. Valid entries include: %s' % (item, valid_items))
[ "def", "_validate_items", "(", "self", ",", "items", ")", ":", "if", "not", "isinstance", "(", "items", ",", "(", "list", ",", "tuple", ")", ")", ":", "raise", "ValueError", "(", "'items must be a list or tuple'", ")", "valid_items", "=", "self", ".", "list_items", "(", ")", "for", "item", "in", "items", ":", "if", "item", "not", "in", "valid_items", ":", "raise", "ValueError", "(", "'Item [%s] is invalid. Valid entries include: %s'", "%", "(", "item", ",", "valid_items", ")", ")" ]
https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/contrib/slim/python/slim/data/data_provider.py#L99-L117
tkn-tub/ns3-gym
19bfe0a583e641142609939a090a09dfc63a095f
utils/check-style.py
python
PatchChunkLine.set_src
(self,line)
! Set source @param self The current class @param line source line @return none
! Set source
[ "!", "Set", "source" ]
def set_src(self,line): """! Set source @param self The current class @param line source line @return none """ self.__type = self.SRC self.__line = line
[ "def", "set_src", "(", "self", ",", "line", ")", ":", "self", ".", "__type", "=", "self", ".", "SRC", "self", ".", "__line", "=", "line" ]
https://github.com/tkn-tub/ns3-gym/blob/19bfe0a583e641142609939a090a09dfc63a095f/utils/check-style.py#L155-L162
ROCmSoftwarePlatform/hipCaffe
4ec5d482515cce532348553b6db6d00d015675d5
scripts/cpp_lint.py
python
_SetOutputFormat
(output_format)
Sets the module's output format.
Sets the module's output format.
[ "Sets", "the", "module", "s", "output", "format", "." ]
def _SetOutputFormat(output_format): """Sets the module's output format.""" _cpplint_state.SetOutputFormat(output_format)
[ "def", "_SetOutputFormat", "(", "output_format", ")", ":", "_cpplint_state", ".", "SetOutputFormat", "(", "output_format", ")" ]
https://github.com/ROCmSoftwarePlatform/hipCaffe/blob/4ec5d482515cce532348553b6db6d00d015675d5/scripts/cpp_lint.py#L772-L774
eventql/eventql
7ca0dbb2e683b525620ea30dc40540a22d5eb227
deps/3rdparty/spidermonkey/mozjs/config/configobj.py
python
Section.popitem
(self)
return key, val
Pops the first (key,val)
Pops the first (key,val)
[ "Pops", "the", "first", "(", "key", "val", ")" ]
def popitem(self): """Pops the first (key,val)""" sequence = (self.scalars + self.sections) if not sequence: raise KeyError, ": 'popitem(): dictionary is empty'" key = sequence[0] val = self[key] del self[key] return key, val
[ "def", "popitem", "(", "self", ")", ":", "sequence", "=", "(", "self", ".", "scalars", "+", "self", ".", "sections", ")", "if", "not", "sequence", ":", "raise", "KeyError", ",", "\": 'popitem(): dictionary is empty'\"", "key", "=", "sequence", "[", "0", "]", "val", "=", "self", "[", "key", "]", "del", "self", "[", "key", "]", "return", "key", ",", "val" ]
https://github.com/eventql/eventql/blob/7ca0dbb2e683b525620ea30dc40540a22d5eb227/deps/3rdparty/spidermonkey/mozjs/config/configobj.py#L634-L642
mhammond/pywin32
44afd86ba8485194df93234639243252deeb40d5
Pythonwin/pywin/mfc/dialog.py
python
PropertySheet.DoAddSinglePage
(self, page)
Page may be page, or int ID. Assumes DLL setup
Page may be page, or int ID. Assumes DLL setup
[ "Page", "may", "be", "page", "or", "int", "ID", ".", "Assumes", "DLL", "setup" ]
def DoAddSinglePage(self, page): "Page may be page, or int ID. Assumes DLL setup" if type(page) == type(0): self.sheet.AddPage(win32ui.CreatePropertyPage(page)) else: self.sheet.AddPage(page)
[ "def", "DoAddSinglePage", "(", "self", ",", "page", ")", ":", "if", "type", "(", "page", ")", "==", "type", "(", "0", ")", ":", "self", ".", "sheet", ".", "AddPage", "(", "win32ui", ".", "CreatePropertyPage", "(", "page", ")", ")", "else", ":", "self", ".", "sheet", ".", "AddPage", "(", "page", ")" ]
https://github.com/mhammond/pywin32/blob/44afd86ba8485194df93234639243252deeb40d5/Pythonwin/pywin/mfc/dialog.py#L244-L249
ArduPilot/ardupilot
6e684b3496122b8158ac412b609d00004b7ac306
libraries/AP_MSP/Tools/msposd.py
python
display_text
(item, message, x_offset=0)
display a string message for an item
display a string message for an item
[ "display", "a", "string", "message", "for", "an", "item" ]
def display_text(item, message, x_offset=0): '''display a string message for an item''' XY = item_to_pos(item) if XY is None: return (X,Y) = XY text = font.render(message, True, white, black) textRect = text.get_rect() slen = len(message) px = X * FontWidth + x_offset py = Y * FontHeight textRect.center = (px+textRect.width//2, py+textRect.height//2) display_surface.blit(text, textRect)
[ "def", "display_text", "(", "item", ",", "message", ",", "x_offset", "=", "0", ")", ":", "XY", "=", "item_to_pos", "(", "item", ")", "if", "XY", "is", "None", ":", "return", "(", "X", ",", "Y", ")", "=", "XY", "text", "=", "font", ".", "render", "(", "message", ",", "True", ",", "white", ",", "black", ")", "textRect", "=", "text", ".", "get_rect", "(", ")", "slen", "=", "len", "(", "message", ")", "px", "=", "X", "*", "FontWidth", "+", "x_offset", "py", "=", "Y", "*", "FontHeight", "textRect", ".", "center", "=", "(", "px", "+", "textRect", ".", "width", "//", "2", ",", "py", "+", "textRect", ".", "height", "//", "2", ")", "display_surface", ".", "blit", "(", "text", ",", "textRect", ")" ]
https://github.com/ArduPilot/ardupilot/blob/6e684b3496122b8158ac412b609d00004b7ac306/libraries/AP_MSP/Tools/msposd.py#L85-L98
Kitware/ParaView
f760af9124ff4634b23ebbeab95a4f56e0261955
Plugins/pvblot/blotish.py
python
rotate
(*rotations)
Rotates the 3D mesh. Each (axis, ndeg) parameter pair specifies an axis of rotation (x, y, or z) or (elevation, azimuth, or roll) and the number of degrees to rotate. The axes have the following meanings: * x or elevation: rotate the camera around the focal point in the horizontal direction. * y or azimuth: rotate the camera around the focal point in the vertical direction. * z or roll: roll the camera about the view direction. The identifiers elevation, azimuth, and roll can be abbreviated with any unique prefix.
Rotates the 3D mesh. Each (axis, ndeg) parameter pair specifies an axis of rotation (x, y, or z) or (elevation, azimuth, or roll) and the number of degrees to rotate. The axes have the following meanings:
[ "Rotates", "the", "3D", "mesh", ".", "Each", "(", "axis", "ndeg", ")", "parameter", "pair", "specifies", "an", "axis", "of", "rotation", "(", "x", "y", "or", "z", ")", "or", "(", "elevation", "azimuth", "or", "roll", ")", "and", "the", "number", "of", "degrees", "to", "rotate", ".", "The", "axes", "have", "the", "following", "meanings", ":" ]
def rotate(*rotations): """ Rotates the 3D mesh. Each (axis, ndeg) parameter pair specifies an axis of rotation (x, y, or z) or (elevation, azimuth, or roll) and the number of degrees to rotate. The axes have the following meanings: * x or elevation: rotate the camera around the focal point in the horizontal direction. * y or azimuth: rotate the camera around the focal point in the vertical direction. * z or roll: roll the camera about the view direction. The identifiers elevation, azimuth, and roll can be abbreviated with any unique prefix. """ camera = paraview.simple.GetActiveCamera() # Some manging to rotation into list of pairs. rpairs = zip(rotations[::2], rotations[1::2]) for axis, degrees in rpairs: axis = axis.lower() degrees = float(degrees) if axis == x or elevation.startswith(axis): sign = degrees/abs(degrees) degrees = abs(degrees) while degrees > 0: d = min(45, degrees) degrees = degrees - d camera.Elevation(sign*d) camera.OrthogonalizeViewUp() elif axis == y or azimuth.startswith(axis): camera.Azimuth(-degrees) elif axis == z or roll.startswith(axis): camera.Roll(degrees) else: print "Unknown axis: ", axis _finish_plot_change()
[ "def", "rotate", "(", "*", "rotations", ")", ":", "camera", "=", "paraview", ".", "simple", ".", "GetActiveCamera", "(", ")", "# Some manging to rotation into list of pairs.", "rpairs", "=", "zip", "(", "rotations", "[", ":", ":", "2", "]", ",", "rotations", "[", "1", ":", ":", "2", "]", ")", "for", "axis", ",", "degrees", "in", "rpairs", ":", "axis", "=", "axis", ".", "lower", "(", ")", "degrees", "=", "float", "(", "degrees", ")", "if", "axis", "==", "x", "or", "elevation", ".", "startswith", "(", "axis", ")", ":", "sign", "=", "degrees", "/", "abs", "(", "degrees", ")", "degrees", "=", "abs", "(", "degrees", ")", "while", "degrees", ">", "0", ":", "d", "=", "min", "(", "45", ",", "degrees", ")", "degrees", "=", "degrees", "-", "d", "camera", ".", "Elevation", "(", "sign", "*", "d", ")", "camera", ".", "OrthogonalizeViewUp", "(", ")", "elif", "axis", "==", "y", "or", "azimuth", ".", "startswith", "(", "axis", ")", ":", "camera", ".", "Azimuth", "(", "-", "degrees", ")", "elif", "axis", "==", "z", "or", "roll", ".", "startswith", "(", "axis", ")", ":", "camera", ".", "Roll", "(", "degrees", ")", "else", ":", "print", "\"Unknown axis: \"", ",", "axis", "_finish_plot_change", "(", ")" ]
https://github.com/Kitware/ParaView/blob/f760af9124ff4634b23ebbeab95a4f56e0261955/Plugins/pvblot/blotish.py#L981-L1018
apiaryio/snowcrash
b5b39faa85f88ee17459edf39fdc6fe4fc70d2e3
tools/gyp/pylib/gyp/xcodeproj_file.py
python
XCConfigurationList.HasBuildSetting
(self, key)
return 1
Determines the state of a build setting in all XCBuildConfiguration child objects. If all child objects have key in their build settings, and the value is the same in all child objects, returns 1. If no child objects have the key in their build settings, returns 0. If some, but not all, child objects have the key in their build settings, or if any children have different values for the key, returns -1.
Determines the state of a build setting in all XCBuildConfiguration child objects.
[ "Determines", "the", "state", "of", "a", "build", "setting", "in", "all", "XCBuildConfiguration", "child", "objects", "." ]
def HasBuildSetting(self, key): """Determines the state of a build setting in all XCBuildConfiguration child objects. If all child objects have key in their build settings, and the value is the same in all child objects, returns 1. If no child objects have the key in their build settings, returns 0. If some, but not all, child objects have the key in their build settings, or if any children have different values for the key, returns -1. """ has = None value = None for configuration in self._properties['buildConfigurations']: configuration_has = configuration.HasBuildSetting(key) if has is None: has = configuration_has elif has != configuration_has: return -1 if configuration_has: configuration_value = configuration.GetBuildSetting(key) if value is None: value = configuration_value elif value != configuration_value: return -1 if not has: return 0 return 1
[ "def", "HasBuildSetting", "(", "self", ",", "key", ")", ":", "has", "=", "None", "value", "=", "None", "for", "configuration", "in", "self", ".", "_properties", "[", "'buildConfigurations'", "]", ":", "configuration_has", "=", "configuration", ".", "HasBuildSetting", "(", "key", ")", "if", "has", "is", "None", ":", "has", "=", "configuration_has", "elif", "has", "!=", "configuration_has", ":", "return", "-", "1", "if", "configuration_has", ":", "configuration_value", "=", "configuration", ".", "GetBuildSetting", "(", "key", ")", "if", "value", "is", "None", ":", "value", "=", "configuration_value", "elif", "value", "!=", "configuration_value", ":", "return", "-", "1", "if", "not", "has", ":", "return", "0", "return", "1" ]
https://github.com/apiaryio/snowcrash/blob/b5b39faa85f88ee17459edf39fdc6fe4fc70d2e3/tools/gyp/pylib/gyp/xcodeproj_file.py#L1617-L1649
google/clif
cab24d6a105609a65c95a36a1712ae3c20c7b5df
clif/python/pyext.py
python
Module.GenerateBase
(self, ast, more_headers)
Extension module generation.
Extension module generation.
[ "Extension", "module", "generation", "." ]
def GenerateBase(self, ast, more_headers): """Extension module generation.""" ast_manipulations.MoveExtendsBackIntoClassesInPlace(ast) self.init += ast.extra_init for s in gen.Headlines( ast.source, [ 'PYTHON', 'absl/memory/memory.h', 'absl/types/optional.h' ] + more_headers + # Container templates calling PyObj* go last. [ 'clif/python/stltypes.h', 'clif/python/slots.h' ], open_ns=self.wrap_namespace): yield s yield '' yield 'using namespace clif;' yield '' yield 'static const char* ThisModuleName = "%s";' % self.path for s in postconv.GenPostConvTable(self.typemap): yield s if astutils.HaveEnum(ast.decls): yield '' yield 'static PyObject *_Enum{}, *_IntEnum{}; // set below in Init()' self.catch_cpp_exceptions = ast.catch_exceptions for d in ast.decls: for s in self.WrapDecl(d): yield s assert not self.nested, 'decl stack not exhausted (in GenBase)' yield '' yield '// Initialize module' if self.methods: for s in gen.MethodDef(self.methods): yield s for s in self.GenTypesReady(): # extends self.init yield s for s in self.GenInitFunction(ast.source): # consumes self.init yield s yield '' yield '} // namespace %s' % self.wrap_namespace if self.types: # Assumed we always want a deterministic output. Since dict/set order # is not, do sorted() order traversal. # # Save sorted types for GenerateHeader. self.types = sorted(self.types, key=types.Order) for ns, ts in itertools.groupby(self.types, types.Namespace): for s in gen.TypeConverters(ns, ts, self.wrap_namespace): yield s if self.static_init: for s in gen.PyModInitFunction( init_name=self.static_init, ns=self.wrap_namespace): yield s
[ "def", "GenerateBase", "(", "self", ",", "ast", ",", "more_headers", ")", ":", "ast_manipulations", ".", "MoveExtendsBackIntoClassesInPlace", "(", "ast", ")", "self", ".", "init", "+=", "ast", ".", "extra_init", "for", "s", "in", "gen", ".", "Headlines", "(", "ast", ".", "source", ",", "[", "'PYTHON'", ",", "'absl/memory/memory.h'", ",", "'absl/types/optional.h'", "]", "+", "more_headers", "+", "# Container templates calling PyObj* go last.", "[", "'clif/python/stltypes.h'", ",", "'clif/python/slots.h'", "]", ",", "open_ns", "=", "self", ".", "wrap_namespace", ")", ":", "yield", "s", "yield", "''", "yield", "'using namespace clif;'", "yield", "''", "yield", "'static const char* ThisModuleName = \"%s\";'", "%", "self", ".", "path", "for", "s", "in", "postconv", ".", "GenPostConvTable", "(", "self", ".", "typemap", ")", ":", "yield", "s", "if", "astutils", ".", "HaveEnum", "(", "ast", ".", "decls", ")", ":", "yield", "''", "yield", "'static PyObject *_Enum{}, *_IntEnum{}; // set below in Init()'", "self", ".", "catch_cpp_exceptions", "=", "ast", ".", "catch_exceptions", "for", "d", "in", "ast", ".", "decls", ":", "for", "s", "in", "self", ".", "WrapDecl", "(", "d", ")", ":", "yield", "s", "assert", "not", "self", ".", "nested", ",", "'decl stack not exhausted (in GenBase)'", "yield", "''", "yield", "'// Initialize module'", "if", "self", ".", "methods", ":", "for", "s", "in", "gen", ".", "MethodDef", "(", "self", ".", "methods", ")", ":", "yield", "s", "for", "s", "in", "self", ".", "GenTypesReady", "(", ")", ":", "# extends self.init", "yield", "s", "for", "s", "in", "self", ".", "GenInitFunction", "(", "ast", ".", "source", ")", ":", "# consumes self.init", "yield", "s", "yield", "''", "yield", "'} // namespace %s'", "%", "self", ".", "wrap_namespace", "if", "self", ".", "types", ":", "# Assumed we always want a deterministic output. Since dict/set order", "# is not, do sorted() order traversal.", "#", "# Save sorted types for GenerateHeader.", "self", ".", "types", "=", "sorted", "(", "self", ".", "types", ",", "key", "=", "types", ".", "Order", ")", "for", "ns", ",", "ts", "in", "itertools", ".", "groupby", "(", "self", ".", "types", ",", "types", ".", "Namespace", ")", ":", "for", "s", "in", "gen", ".", "TypeConverters", "(", "ns", ",", "ts", ",", "self", ".", "wrap_namespace", ")", ":", "yield", "s", "if", "self", ".", "static_init", ":", "for", "s", "in", "gen", ".", "PyModInitFunction", "(", "init_name", "=", "self", ".", "static_init", ",", "ns", "=", "self", ".", "wrap_namespace", ")", ":", "yield", "s" ]
https://github.com/google/clif/blob/cab24d6a105609a65c95a36a1712ae3c20c7b5df/clif/python/pyext.py#L676-L731
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
third_party/re2/lib/codereview/codereview.py
python
AbstractRpcServer._CreateRequest
(self, url, data=None)
return req
Creates a new urllib request.
Creates a new urllib request.
[ "Creates", "a", "new", "urllib", "request", "." ]
def _CreateRequest(self, url, data=None): """Creates a new urllib request.""" logging.debug("Creating request for: '%s' with payload:\n%s", url, data) req = urllib2.Request(url, data=data) if self.host_override: req.add_header("Host", self.host_override) for key, value in self.extra_headers.iteritems(): req.add_header(key, value) return req
[ "def", "_CreateRequest", "(", "self", ",", "url", ",", "data", "=", "None", ")", ":", "logging", ".", "debug", "(", "\"Creating request for: '%s' with payload:\\n%s\"", ",", "url", ",", "data", ")", "req", "=", "urllib2", ".", "Request", "(", "url", ",", "data", "=", "data", ")", "if", "self", ".", "host_override", ":", "req", ".", "add_header", "(", "\"Host\"", ",", "self", ".", "host_override", ")", "for", "key", ",", "value", "in", "self", ".", "extra_headers", ".", "iteritems", "(", ")", ":", "req", ".", "add_header", "(", "key", ",", "value", ")", "return", "req" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/third_party/re2/lib/codereview/codereview.py#L2798-L2806
okex/V3-Open-API-SDK
c5abb0db7e2287718e0055e17e57672ce0ec7fd9
okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_vendor/distlib/locators.py
python
Locator.prefer_url
(self, url1, url2)
return result
Choose one of two URLs where both are candidates for distribution archives for the same version of a distribution (for example, .tar.gz vs. zip). The current implementation favours https:// URLs over http://, archives from PyPI over those from other locations, wheel compatibility (if a wheel) and then the archive name.
Choose one of two URLs where both are candidates for distribution archives for the same version of a distribution (for example, .tar.gz vs. zip).
[ "Choose", "one", "of", "two", "URLs", "where", "both", "are", "candidates", "for", "distribution", "archives", "for", "the", "same", "version", "of", "a", "distribution", "(", "for", "example", ".", "tar", ".", "gz", "vs", ".", "zip", ")", "." ]
def prefer_url(self, url1, url2): """ Choose one of two URLs where both are candidates for distribution archives for the same version of a distribution (for example, .tar.gz vs. zip). The current implementation favours https:// URLs over http://, archives from PyPI over those from other locations, wheel compatibility (if a wheel) and then the archive name. """ result = url2 if url1: s1 = self.score_url(url1) s2 = self.score_url(url2) if s1 > s2: result = url1 if result != url2: logger.debug('Not replacing %r with %r', url1, url2) else: logger.debug('Replacing %r with %r', url1, url2) return result
[ "def", "prefer_url", "(", "self", ",", "url1", ",", "url2", ")", ":", "result", "=", "url2", "if", "url1", ":", "s1", "=", "self", ".", "score_url", "(", "url1", ")", "s2", "=", "self", ".", "score_url", "(", "url2", ")", "if", "s1", ">", "s2", ":", "result", "=", "url1", "if", "result", "!=", "url2", ":", "logger", ".", "debug", "(", "'Not replacing %r with %r'", ",", "url1", ",", "url2", ")", "else", ":", "logger", ".", "debug", "(", "'Replacing %r with %r'", ",", "url1", ",", "url2", ")", "return", "result" ]
https://github.com/okex/V3-Open-API-SDK/blob/c5abb0db7e2287718e0055e17e57672ce0ec7fd9/okex-python-sdk-api/venv/Lib/site-packages/pip-19.0.3-py3.8.egg/pip/_vendor/distlib/locators.py#L203-L223
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/grid.py
python
Grid.EnableDragCell
(*args, **kwargs)
return _grid.Grid_EnableDragCell(*args, **kwargs)
EnableDragCell(self, bool enable=True)
EnableDragCell(self, bool enable=True)
[ "EnableDragCell", "(", "self", "bool", "enable", "=", "True", ")" ]
def EnableDragCell(*args, **kwargs): """EnableDragCell(self, bool enable=True)""" return _grid.Grid_EnableDragCell(*args, **kwargs)
[ "def", "EnableDragCell", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_grid", ".", "Grid_EnableDragCell", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/grid.py#L1654-L1656
htcondor/htcondor
4829724575176d1d6c936e4693dfd78a728569b0
src/condor_scripts/gdrive_plugin.py
python
parse_args
()
return {'infile': infile, 'outfile': outfile, 'upload': is_upload}
The optparse library can't handle the types of arguments that the file transfer plugin sends, the argparse library can't be expected to be found on machines running EL 6 (Python 2.6), and a plugin should not reach outside the standard library, so the plugin must roll its own argument parser. The expected input is very rigid, so this isn't too awful.
The optparse library can't handle the types of arguments that the file transfer plugin sends, the argparse library can't be expected to be found on machines running EL 6 (Python 2.6), and a plugin should not reach outside the standard library, so the plugin must roll its own argument parser. The expected input is very rigid, so this isn't too awful.
[ "The", "optparse", "library", "can", "t", "handle", "the", "types", "of", "arguments", "that", "the", "file", "transfer", "plugin", "sends", "the", "argparse", "library", "can", "t", "be", "expected", "to", "be", "found", "on", "machines", "running", "EL", "6", "(", "Python", "2", ".", "6", ")", "and", "a", "plugin", "should", "not", "reach", "outside", "the", "standard", "library", "so", "the", "plugin", "must", "roll", "its", "own", "argument", "parser", ".", "The", "expected", "input", "is", "very", "rigid", "so", "this", "isn", "t", "too", "awful", "." ]
def parse_args(): '''The optparse library can't handle the types of arguments that the file transfer plugin sends, the argparse library can't be expected to be found on machines running EL 6 (Python 2.6), and a plugin should not reach outside the standard library, so the plugin must roll its own argument parser. The expected input is very rigid, so this isn't too awful.''' # The only argument lists that are acceptable are # <this> -classad # <this> -infile <input-filename> -outfile <output-filename> # <this> -outfile <output-filename> -infile <input-filename> if not len(sys.argv) in [2, 5, 6]: print_help() sys.exit(1) # If -classad, print the capabilities of the plugin and exit early elif (len(sys.argv) == 2) and (sys.argv[1] == '-classad'): print_capabilities() sys.exit(0) # If -upload, set is_upload to True and remove it from the args list is_upload = False if '-upload' in sys.argv[1:]: is_upload = True sys.argv.remove('-upload') # -infile and -outfile must be in the first and third position if not ( ('-infile' in sys.argv[1:]) and ('-outfile' in sys.argv[1:]) and (sys.argv[1] in ['-infile', '-outfile']) and (sys.argv[3] in ['-infile', '-outfile']) and (len(sys.argv) == 5)): print_help() sys.exit(1) infile = None outfile = None try: for i, arg in enumerate(sys.argv): if i == 0: continue elif arg == '-infile': infile = sys.argv[i+1] elif arg == '-outfile': outfile = sys.argv[i+1] except IndexError: print_help() sys.exit(1) return {'infile': infile, 'outfile': outfile, 'upload': is_upload}
[ "def", "parse_args", "(", ")", ":", "# The only argument lists that are acceptable are", "# <this> -classad", "# <this> -infile <input-filename> -outfile <output-filename>", "# <this> -outfile <output-filename> -infile <input-filename>", "if", "not", "len", "(", "sys", ".", "argv", ")", "in", "[", "2", ",", "5", ",", "6", "]", ":", "print_help", "(", ")", "sys", ".", "exit", "(", "1", ")", "# If -classad, print the capabilities of the plugin and exit early", "elif", "(", "len", "(", "sys", ".", "argv", ")", "==", "2", ")", "and", "(", "sys", ".", "argv", "[", "1", "]", "==", "'-classad'", ")", ":", "print_capabilities", "(", ")", "sys", ".", "exit", "(", "0", ")", "# If -upload, set is_upload to True and remove it from the args list", "is_upload", "=", "False", "if", "'-upload'", "in", "sys", ".", "argv", "[", "1", ":", "]", ":", "is_upload", "=", "True", "sys", ".", "argv", ".", "remove", "(", "'-upload'", ")", "# -infile and -outfile must be in the first and third position", "if", "not", "(", "(", "'-infile'", "in", "sys", ".", "argv", "[", "1", ":", "]", ")", "and", "(", "'-outfile'", "in", "sys", ".", "argv", "[", "1", ":", "]", ")", "and", "(", "sys", ".", "argv", "[", "1", "]", "in", "[", "'-infile'", ",", "'-outfile'", "]", ")", "and", "(", "sys", ".", "argv", "[", "3", "]", "in", "[", "'-infile'", ",", "'-outfile'", "]", ")", "and", "(", "len", "(", "sys", ".", "argv", ")", "==", "5", ")", ")", ":", "print_help", "(", ")", "sys", ".", "exit", "(", "1", ")", "infile", "=", "None", "outfile", "=", "None", "try", ":", "for", "i", ",", "arg", "in", "enumerate", "(", "sys", ".", "argv", ")", ":", "if", "i", "==", "0", ":", "continue", "elif", "arg", "==", "'-infile'", ":", "infile", "=", "sys", ".", "argv", "[", "i", "+", "1", "]", "elif", "arg", "==", "'-outfile'", ":", "outfile", "=", "sys", ".", "argv", "[", "i", "+", "1", "]", "except", "IndexError", ":", "print_help", "(", ")", "sys", ".", "exit", "(", "1", ")", "return", "{", "'infile'", ":", "infile", ",", "'outfile'", ":", "outfile", ",", "'upload'", ":", "is_upload", "}" ]
https://github.com/htcondor/htcondor/blob/4829724575176d1d6c936e4693dfd78a728569b0/src/condor_scripts/gdrive_plugin.py#L50-L99
ceph/ceph
959663007321a369c83218414a29bd9dbc8bda3a
qa/tasks/ceph_manager.py
python
CephManager.get_osd_df
(self, osdid)
return j['nodes'][0]
Get the osd df stats
Get the osd df stats
[ "Get", "the", "osd", "df", "stats" ]
def get_osd_df(self, osdid): """ Get the osd df stats """ out = self.raw_cluster_cmd('osd', 'df', 'name', 'osd.{}'.format(osdid), '--format=json') j = json.loads('\n'.join(out.split('\n')[1:])) return j['nodes'][0]
[ "def", "get_osd_df", "(", "self", ",", "osdid", ")", ":", "out", "=", "self", ".", "raw_cluster_cmd", "(", "'osd'", ",", "'df'", ",", "'name'", ",", "'osd.{}'", ".", "format", "(", "osdid", ")", ",", "'--format=json'", ")", "j", "=", "json", ".", "loads", "(", "'\\n'", ".", "join", "(", "out", ".", "split", "(", "'\\n'", ")", "[", "1", ":", "]", ")", ")", "return", "j", "[", "'nodes'", "]", "[", "0", "]" ]
https://github.com/ceph/ceph/blob/959663007321a369c83218414a29bd9dbc8bda3a/qa/tasks/ceph_manager.py#L2340-L2347
OpenChemistry/tomviz
0a903679318f191cb7dd3eb5ff5bc3a7d3320d9a
tomviz/python/tomviz/operators.py
python
Progress.value
(self, value)
Updates the progress of the the operator. :param value The current progress value. :type value: int
Updates the progress of the the operator.
[ "Updates", "the", "progress", "of", "the", "the", "operator", "." ]
def value(self, value): """ Updates the progress of the the operator. :param value The current progress value. :type value: int """ self._operator._operator_wrapper.progress_value = value
[ "def", "value", "(", "self", ",", "value", ")", ":", "self", ".", "_operator", ".", "_operator_wrapper", ".", "progress_value", "=", "value" ]
https://github.com/OpenChemistry/tomviz/blob/0a903679318f191cb7dd3eb5ff5bc3a7d3320d9a/tomviz/python/tomviz/operators.py#L36-L43
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python3/src/Lib/xml/etree/ElementTree.py
python
Comment
(text=None)
return element
Comment element factory. This function creates a special element which the standard serializer serializes as an XML comment. *text* is a string containing the comment string.
Comment element factory.
[ "Comment", "element", "factory", "." ]
def Comment(text=None): """Comment element factory. This function creates a special element which the standard serializer serializes as an XML comment. *text* is a string containing the comment string. """ element = Element(Comment) element.text = text return element
[ "def", "Comment", "(", "text", "=", "None", ")", ":", "element", "=", "Element", "(", "Comment", ")", "element", ".", "text", "=", "text", "return", "element" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/xml/etree/ElementTree.py#L444-L455
facebook/redex
fac189a289bca2647061f9e364016afc1096500d
tools/python/dex.py
python
Progard.lookup_class
(self, new_class)
return None
Translate a new class name to the old class name.
Translate a new class name to the old class name.
[ "Translate", "a", "new", "class", "name", "to", "the", "old", "class", "name", "." ]
def lookup_class(self, new_class): """Translate a new class name to the old class name.""" if new_class in self.classes_dict: (old_class, class_dict) = self.classes_dict[new_class] if old_class is not None: return old_class return None
[ "def", "lookup_class", "(", "self", ",", "new_class", ")", ":", "if", "new_class", "in", "self", ".", "classes_dict", ":", "(", "old_class", ",", "class_dict", ")", "=", "self", ".", "classes_dict", "[", "new_class", "]", "if", "old_class", "is", "not", "None", ":", "return", "old_class", "return", "None" ]
https://github.com/facebook/redex/blob/fac189a289bca2647061f9e364016afc1096500d/tools/python/dex.py#L1180-L1186
bundy-dns/bundy
3d41934996b82b0cd2fe22dd74d2abc1daba835d
src/bin/bundyctl/moduleinfo.py
python
CommandInfo.command_help
(self)
Prints the help info for this command to stdout
Prints the help info for this command to stdout
[ "Prints", "the", "help", "info", "for", "this", "command", "to", "stdout" ]
def command_help(self): """Prints the help info for this command to stdout""" print("Command ", self) print("\t\thelp (Get help for command)") params = self.params.copy() del params["help"] if len(params) == 0: print("This command has no parameters") return print("Parameters:") for info in params.values(): print(" %s" % info.get_basic_info()) description = info.get_desc() if description != "": print(textwrap.fill(description, initial_indent=" ", subsequent_indent=" ", width=70))
[ "def", "command_help", "(", "self", ")", ":", "print", "(", "\"Command \"", ",", "self", ")", "print", "(", "\"\\t\\thelp (Get help for command)\"", ")", "params", "=", "self", ".", "params", ".", "copy", "(", ")", "del", "params", "[", "\"help\"", "]", "if", "len", "(", "params", ")", "==", "0", ":", "print", "(", "\"This command has no parameters\"", ")", "return", "print", "(", "\"Parameters:\"", ")", "for", "info", "in", "params", ".", "values", "(", ")", ":", "print", "(", "\" %s\"", "%", "info", ".", "get_basic_info", "(", ")", ")", "description", "=", "info", ".", "get_desc", "(", ")", "if", "description", "!=", "\"\"", ":", "print", "(", "textwrap", ".", "fill", "(", "description", ",", "initial_indent", "=", "\" \"", ",", "subsequent_indent", "=", "\" \"", ",", "width", "=", "70", ")", ")" ]
https://github.com/bundy-dns/bundy/blob/3d41934996b82b0cd2fe22dd74d2abc1daba835d/src/bin/bundyctl/moduleinfo.py#L158-L178
GarageGames/Torque2D
72c8891f192b44d58a8bd5ec2b293a3b48a818f4
engine/lib/freetype/android/freetype-2.4.12/src/tools/glnames.py
python
adobe_glyph_values
()
return glyphs, values
return the list of glyph names and their unicode values
return the list of glyph names and their unicode values
[ "return", "the", "list", "of", "glyph", "names", "and", "their", "unicode", "values" ]
def adobe_glyph_values(): """return the list of glyph names and their unicode values""" lines = string.split( adobe_glyph_list, '\n' ) glyphs = [] values = [] for line in lines: if line: fields = string.split( line, ';' ) # print fields[1] + ' - ' + fields[0] subfields = string.split( fields[1], ' ' ) if len( subfields ) == 1: glyphs.append( fields[0] ) values.append( fields[1] ) return glyphs, values
[ "def", "adobe_glyph_values", "(", ")", ":", "lines", "=", "string", ".", "split", "(", "adobe_glyph_list", ",", "'\\n'", ")", "glyphs", "=", "[", "]", "values", "=", "[", "]", "for", "line", "in", "lines", ":", "if", "line", ":", "fields", "=", "string", ".", "split", "(", "line", ",", "';'", ")", "# print fields[1] + ' - ' + fields[0]", "subfields", "=", "string", ".", "split", "(", "fields", "[", "1", "]", ",", "' '", ")", "if", "len", "(", "subfields", ")", "==", "1", ":", "glyphs", ".", "append", "(", "fields", "[", "0", "]", ")", "values", ".", "append", "(", "fields", "[", "1", "]", ")", "return", "glyphs", ",", "values" ]
https://github.com/GarageGames/Torque2D/blob/72c8891f192b44d58a8bd5ec2b293a3b48a818f4/engine/lib/freetype/android/freetype-2.4.12/src/tools/glnames.py#L5152-L5168
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/setuptools/command/easy_install.py
python
WindowsScriptWriter.best
(cls)
return writer_lookup[launcher]
Select the best ScriptWriter suitable for Windows
Select the best ScriptWriter suitable for Windows
[ "Select", "the", "best", "ScriptWriter", "suitable", "for", "Windows" ]
def best(cls): """ Select the best ScriptWriter suitable for Windows """ writer_lookup = dict( executable=WindowsExecutableLauncherWriter, natural=cls, ) # for compatibility, use the executable launcher by default launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable') return writer_lookup[launcher]
[ "def", "best", "(", "cls", ")", ":", "writer_lookup", "=", "dict", "(", "executable", "=", "WindowsExecutableLauncherWriter", ",", "natural", "=", "cls", ",", ")", "# for compatibility, use the executable launcher by default", "launcher", "=", "os", ".", "environ", ".", "get", "(", "'SETUPTOOLS_LAUNCHER'", ",", "'executable'", ")", "return", "writer_lookup", "[", "launcher", "]" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemDefectReporter/v1/AWS/common-code/Lib/setuptools/command/easy_install.py#L2158-L2168
mongodb/mongo
d8ff665343ad29cf286ee2cf4a1960d29371937b
src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Builder.py
python
BuilderBase.subst_src_suffixes
(self, env)
return suffixes
The suffix list may contain construction variable expansions, so we have to evaluate the individual strings. To avoid doing this over and over, we memoize the results for each construction environment.
The suffix list may contain construction variable expansions, so we have to evaluate the individual strings. To avoid doing this over and over, we memoize the results for each construction environment.
[ "The", "suffix", "list", "may", "contain", "construction", "variable", "expansions", "so", "we", "have", "to", "evaluate", "the", "individual", "strings", ".", "To", "avoid", "doing", "this", "over", "and", "over", "we", "memoize", "the", "results", "for", "each", "construction", "environment", "." ]
def subst_src_suffixes(self, env): """ The suffix list may contain construction variable expansions, so we have to evaluate the individual strings. To avoid doing this over and over, we memoize the results for each construction environment. """ memo_key = id(env) try: memo_dict = self._memo['subst_src_suffixes'] except KeyError: memo_dict = {} self._memo['subst_src_suffixes'] = memo_dict else: try: return memo_dict[memo_key] except KeyError: pass suffixes = [env.subst(x) for x in self.src_suffix] memo_dict[memo_key] = suffixes return suffixes
[ "def", "subst_src_suffixes", "(", "self", ",", "env", ")", ":", "memo_key", "=", "id", "(", "env", ")", "try", ":", "memo_dict", "=", "self", ".", "_memo", "[", "'subst_src_suffixes'", "]", "except", "KeyError", ":", "memo_dict", "=", "{", "}", "self", ".", "_memo", "[", "'subst_src_suffixes'", "]", "=", "memo_dict", "else", ":", "try", ":", "return", "memo_dict", "[", "memo_key", "]", "except", "KeyError", ":", "pass", "suffixes", "=", "[", "env", ".", "subst", "(", "x", ")", "for", "x", "in", "self", ".", "src_suffix", "]", "memo_dict", "[", "memo_key", "]", "=", "suffixes", "return", "suffixes" ]
https://github.com/mongodb/mongo/blob/d8ff665343ad29cf286ee2cf4a1960d29371937b/src/third_party/scons-3.1.2/scons-local-3.1.2/SCons/Builder.py#L820-L840
mindspore-ai/mindspore
fb8fd3338605bb34fa5cea054e535a8b1d753fab
mindspore/python/mindspore/nn/metrics/metric.py
python
Metric.set_indexes
(self, indexes)
return self
This interface is to rearrange the inputs of `update`. Given (label0, label1, logits), set the `indexes` to [2, 1] then the (logits, label1) will be the actually inputs of `update`. Note: When customize a metric, decorate the `update` function with the decorator :func:`mindspore.nn.rearrange_inputs` for the `indexes` to take effect. Args: indexes (List(int)): The order of logits and labels to be rearranged. Outputs: :class:`Metric`, its original Class instance. Examples: >>> import numpy as np >>> from mindspore import nn, Tensor >>> >>> x = Tensor(np.array([[0.2, 0.5], [0.3, 0.1], [0.9, 0.6]])) >>> y = Tensor(np.array([1, 0, 1])) >>> y2 = Tensor(np.array([0, 0, 1])) >>> metric = nn.Accuracy('classification').set_indexes([0, 2]) >>> metric.clear() >>> # indexes is [0, 2], using x as logits, y2 as label. >>> metric.update(x, y, y2) >>> accuracy = metric.eval() >>> print(accuracy) 0.3333333333333333
This interface is to rearrange the inputs of `update`.
[ "This", "interface", "is", "to", "rearrange", "the", "inputs", "of", "update", "." ]
def set_indexes(self, indexes): """ This interface is to rearrange the inputs of `update`. Given (label0, label1, logits), set the `indexes` to [2, 1] then the (logits, label1) will be the actually inputs of `update`. Note: When customize a metric, decorate the `update` function with the decorator :func:`mindspore.nn.rearrange_inputs` for the `indexes` to take effect. Args: indexes (List(int)): The order of logits and labels to be rearranged. Outputs: :class:`Metric`, its original Class instance. Examples: >>> import numpy as np >>> from mindspore import nn, Tensor >>> >>> x = Tensor(np.array([[0.2, 0.5], [0.3, 0.1], [0.9, 0.6]])) >>> y = Tensor(np.array([1, 0, 1])) >>> y2 = Tensor(np.array([0, 0, 1])) >>> metric = nn.Accuracy('classification').set_indexes([0, 2]) >>> metric.clear() >>> # indexes is [0, 2], using x as logits, y2 as label. >>> metric.update(x, y, y2) >>> accuracy = metric.eval() >>> print(accuracy) 0.3333333333333333 """ if not isinstance(indexes, list) or not all(isinstance(i, int) for i in indexes): raise ValueError("For 'set_indexes', the argument 'indexes' should be a list and all its elements should " "be int, please check whether it is correct.") self._indexes = indexes return self
[ "def", "set_indexes", "(", "self", ",", "indexes", ")", ":", "if", "not", "isinstance", "(", "indexes", ",", "list", ")", "or", "not", "all", "(", "isinstance", "(", "i", ",", "int", ")", "for", "i", "in", "indexes", ")", ":", "raise", "ValueError", "(", "\"For 'set_indexes', the argument 'indexes' should be a list and all its elements should \"", "\"be int, please check whether it is correct.\"", ")", "self", ".", "_indexes", "=", "indexes", "return", "self" ]
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/nn/metrics/metric.py#L152-L188
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numpy/polynomial/polyutils.py
python
getdomain
(x)
Return a domain suitable for given abscissae. Find a domain suitable for a polynomial or Chebyshev series defined at the values supplied. Parameters ---------- x : array_like 1-d array of abscissae whose domain will be determined. Returns ------- domain : ndarray 1-d array containing two values. If the inputs are complex, then the two returned points are the lower left and upper right corners of the smallest rectangle (aligned with the axes) in the complex plane containing the points `x`. If the inputs are real, then the two points are the ends of the smallest interval containing the points `x`. See Also -------- mapparms, mapdomain Examples -------- >>> from numpy.polynomial import polyutils as pu >>> points = np.arange(4)**2 - 5; points array([-5, -4, -1, 4]) >>> pu.getdomain(points) array([-5., 4.]) >>> c = np.exp(complex(0,1)*np.pi*np.arange(12)/6) # unit circle >>> pu.getdomain(c) array([-1.-1.j, 1.+1.j])
Return a domain suitable for given abscissae.
[ "Return", "a", "domain", "suitable", "for", "given", "abscissae", "." ]
def getdomain(x): """ Return a domain suitable for given abscissae. Find a domain suitable for a polynomial or Chebyshev series defined at the values supplied. Parameters ---------- x : array_like 1-d array of abscissae whose domain will be determined. Returns ------- domain : ndarray 1-d array containing two values. If the inputs are complex, then the two returned points are the lower left and upper right corners of the smallest rectangle (aligned with the axes) in the complex plane containing the points `x`. If the inputs are real, then the two points are the ends of the smallest interval containing the points `x`. See Also -------- mapparms, mapdomain Examples -------- >>> from numpy.polynomial import polyutils as pu >>> points = np.arange(4)**2 - 5; points array([-5, -4, -1, 4]) >>> pu.getdomain(points) array([-5., 4.]) >>> c = np.exp(complex(0,1)*np.pi*np.arange(12)/6) # unit circle >>> pu.getdomain(c) array([-1.-1.j, 1.+1.j]) """ [x] = as_series([x], trim=False) if x.dtype.char in np.typecodes['Complex']: rmin, rmax = x.real.min(), x.real.max() imin, imax = x.imag.min(), x.imag.max() return np.array((complex(rmin, imin), complex(rmax, imax))) else: return np.array((x.min(), x.max()))
[ "def", "getdomain", "(", "x", ")", ":", "[", "x", "]", "=", "as_series", "(", "[", "x", "]", ",", "trim", "=", "False", ")", "if", "x", ".", "dtype", ".", "char", "in", "np", ".", "typecodes", "[", "'Complex'", "]", ":", "rmin", ",", "rmax", "=", "x", ".", "real", ".", "min", "(", ")", ",", "x", ".", "real", ".", "max", "(", ")", "imin", ",", "imax", "=", "x", ".", "imag", ".", "min", "(", ")", ",", "x", ".", "imag", ".", "max", "(", ")", "return", "np", ".", "array", "(", "(", "complex", "(", "rmin", ",", "imin", ")", ",", "complex", "(", "rmax", ",", "imax", ")", ")", ")", "else", ":", "return", "np", ".", "array", "(", "(", "x", ".", "min", "(", ")", ",", "x", ".", "max", "(", ")", ")", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numpy/polynomial/polyutils.py#L258-L302
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_controls.py
python
InfoBar.SetShowHideEffects
(*args, **kwargs)
return _controls_.InfoBar_SetShowHideEffects(*args, **kwargs)
SetShowHideEffects(self, int showEffect, int hideEffect)
SetShowHideEffects(self, int showEffect, int hideEffect)
[ "SetShowHideEffects", "(", "self", "int", "showEffect", "int", "hideEffect", ")" ]
def SetShowHideEffects(*args, **kwargs): """SetShowHideEffects(self, int showEffect, int hideEffect)""" return _controls_.InfoBar_SetShowHideEffects(*args, **kwargs)
[ "def", "SetShowHideEffects", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_controls_", ".", "InfoBar_SetShowHideEffects", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_controls.py#L7777-L7779
microsoft/clang
86d4513d3e0daa4d5a29b0b1de7c854ca15f9fe5
bindings/python/clang/cindex.py
python
Cursor.linkage
(self)
return LinkageKind.from_id(self._linkage)
Return the linkage of this cursor.
Return the linkage of this cursor.
[ "Return", "the", "linkage", "of", "this", "cursor", "." ]
def linkage(self): """Return the linkage of this cursor.""" if not hasattr(self, '_linkage'): self._linkage = conf.lib.clang_getCursorLinkage(self) return LinkageKind.from_id(self._linkage)
[ "def", "linkage", "(", "self", ")", ":", "if", "not", "hasattr", "(", "self", ",", "'_linkage'", ")", ":", "self", ".", "_linkage", "=", "conf", ".", "lib", ".", "clang_getCursorLinkage", "(", "self", ")", "return", "LinkageKind", ".", "from_id", "(", "self", ".", "_linkage", ")" ]
https://github.com/microsoft/clang/blob/86d4513d3e0daa4d5a29b0b1de7c854ca15f9fe5/bindings/python/clang/cindex.py#L1567-L1572
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python3/src/Lib/importlib/_bootstrap.py
python
_exec
(spec, module)
return module
Execute the spec's specified module in an existing module's namespace.
Execute the spec's specified module in an existing module's namespace.
[ "Execute", "the", "spec", "s", "specified", "module", "in", "an", "existing", "module", "s", "namespace", "." ]
def _exec(spec, module): """Execute the spec's specified module in an existing module's namespace.""" name = spec.name with _ModuleLockManager(name): if sys.modules.get(name) is not module: msg = 'module {!r} not in sys.modules'.format(name) raise ImportError(msg, name=name) try: if spec.loader is None: if spec.submodule_search_locations is None: raise ImportError('missing loader', name=spec.name) # Namespace package. _init_module_attrs(spec, module, override=True) else: _init_module_attrs(spec, module, override=True) if not hasattr(spec.loader, 'exec_module'): # (issue19713) Once BuiltinImporter and ExtensionFileLoader # have exec_module() implemented, we can add a deprecation # warning here. spec.loader.load_module(name) else: spec.loader.exec_module(module) finally: # Update the order of insertion into sys.modules for module # clean-up at shutdown. module = sys.modules.pop(spec.name) sys.modules[spec.name] = module return module
[ "def", "_exec", "(", "spec", ",", "module", ")", ":", "name", "=", "spec", ".", "name", "with", "_ModuleLockManager", "(", "name", ")", ":", "if", "sys", ".", "modules", ".", "get", "(", "name", ")", "is", "not", "module", ":", "msg", "=", "'module {!r} not in sys.modules'", ".", "format", "(", "name", ")", "raise", "ImportError", "(", "msg", ",", "name", "=", "name", ")", "try", ":", "if", "spec", ".", "loader", "is", "None", ":", "if", "spec", ".", "submodule_search_locations", "is", "None", ":", "raise", "ImportError", "(", "'missing loader'", ",", "name", "=", "spec", ".", "name", ")", "# Namespace package.", "_init_module_attrs", "(", "spec", ",", "module", ",", "override", "=", "True", ")", "else", ":", "_init_module_attrs", "(", "spec", ",", "module", ",", "override", "=", "True", ")", "if", "not", "hasattr", "(", "spec", ".", "loader", ",", "'exec_module'", ")", ":", "# (issue19713) Once BuiltinImporter and ExtensionFileLoader", "# have exec_module() implemented, we can add a deprecation", "# warning here.", "spec", ".", "loader", ".", "load_module", "(", "name", ")", "else", ":", "spec", ".", "loader", ".", "exec_module", "(", "module", ")", "finally", ":", "# Update the order of insertion into sys.modules for module", "# clean-up at shutdown.", "module", "=", "sys", ".", "modules", ".", "pop", "(", "spec", ".", "name", ")", "sys", ".", "modules", "[", "spec", ".", "name", "]", "=", "module", "return", "module" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/importlib/_bootstrap.py#L592-L619
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/py3/scipy/spatial/transform/rotation.py
python
Rotation.as_euler
(self, seq, degrees=False)
return angles[0] if self._single else angles
Represent as Euler angles. Any orientation can be expressed as a composition of 3 elementary rotations. Once the axis sequence has been chosen, Euler angles define the angle of rotation around each respective axis [1]_. The algorithm from [2]_ has been used to calculate Euler angles for the rotation about a given sequence of axes. Euler angles suffer from the problem of gimbal lock [3]_, where the representation loses a degree of freedom and it is not possible to determine the first and third angles uniquely. In this case, a warning is raised, and the third angle is set to zero. Note however that the returned angles still represent the correct rotation. Parameters ---------- seq : string, length 3 3 characters belonging to the set {'X', 'Y', 'Z'} for intrinsic rotations, or {'x', 'y', 'z'} for extrinsic rotations [1]_. Adjacent axes cannot be the same. Extrinsic and intrinsic rotations cannot be mixed in one function call. degrees : boolean, optional Returned angles are in degrees if this flag is True, else they are in radians. Default is False. Returns ------- angles : `numpy.ndarray`, shape (3,) or (N, 3) Shape depends on shape of inputs used to initialize object. The returned angles are in the range: - First angle belongs to [-180, 180] degrees (both inclusive) - Third angle belongs to [-180, 180] degrees (both inclusive) - Second angle belongs to: - [-90, 90] degrees if all axes are different (like xyz) - [0, 180] degrees if first and third axes are the same (like zxz) References ---------- .. [1] `Euler angle definitions <https://en.wikipedia.org/wiki/Euler_angles#Definition_by_intrinsic_rotations>`_ .. [2] Malcolm D. Shuster, F. Landis Markley `General Formula for Euler Angles <https://arc.aiaa.org/doi/abs/10.2514/1.16622>`_ .. [3] `Gimbal lock <https://en.wikipedia.org/wiki/Gimbal_lock#In_applied_mathematics>`_ Examples -------- >>> from scipy.spatial.transform import Rotation as R Represent a single rotation: >>> r = R.from_rotvec([0, 0, np.pi/2]) >>> r.as_euler('zxy', degrees=True) array([90., 0., 0.]) >>> r.as_euler('zxy', degrees=True).shape (3,) Represent a stack of single rotation: >>> r = R.from_rotvec([[0, 0, np.pi/2]]) >>> r.as_euler('zxy', degrees=True) array([[90., 0., 0.]]) >>> r.as_euler('zxy', degrees=True).shape (1, 3) Represent multiple rotations in a single object: >>> r = R.from_rotvec([ ... [0, 0, np.pi/2], ... [0, -np.pi/3, 0], ... [np.pi/4, 0, 0]]) >>> r.as_euler('zxy', degrees=True) array([[ 90., 0., 0.], [ 0., 0., -60.], [ 0., 45., 0.]]) >>> r.as_euler('zxy', degrees=True).shape (3, 3)
Represent as Euler angles.
[ "Represent", "as", "Euler", "angles", "." ]
def as_euler(self, seq, degrees=False): """Represent as Euler angles. Any orientation can be expressed as a composition of 3 elementary rotations. Once the axis sequence has been chosen, Euler angles define the angle of rotation around each respective axis [1]_. The algorithm from [2]_ has been used to calculate Euler angles for the rotation about a given sequence of axes. Euler angles suffer from the problem of gimbal lock [3]_, where the representation loses a degree of freedom and it is not possible to determine the first and third angles uniquely. In this case, a warning is raised, and the third angle is set to zero. Note however that the returned angles still represent the correct rotation. Parameters ---------- seq : string, length 3 3 characters belonging to the set {'X', 'Y', 'Z'} for intrinsic rotations, or {'x', 'y', 'z'} for extrinsic rotations [1]_. Adjacent axes cannot be the same. Extrinsic and intrinsic rotations cannot be mixed in one function call. degrees : boolean, optional Returned angles are in degrees if this flag is True, else they are in radians. Default is False. Returns ------- angles : `numpy.ndarray`, shape (3,) or (N, 3) Shape depends on shape of inputs used to initialize object. The returned angles are in the range: - First angle belongs to [-180, 180] degrees (both inclusive) - Third angle belongs to [-180, 180] degrees (both inclusive) - Second angle belongs to: - [-90, 90] degrees if all axes are different (like xyz) - [0, 180] degrees if first and third axes are the same (like zxz) References ---------- .. [1] `Euler angle definitions <https://en.wikipedia.org/wiki/Euler_angles#Definition_by_intrinsic_rotations>`_ .. [2] Malcolm D. Shuster, F. Landis Markley `General Formula for Euler Angles <https://arc.aiaa.org/doi/abs/10.2514/1.16622>`_ .. [3] `Gimbal lock <https://en.wikipedia.org/wiki/Gimbal_lock#In_applied_mathematics>`_ Examples -------- >>> from scipy.spatial.transform import Rotation as R Represent a single rotation: >>> r = R.from_rotvec([0, 0, np.pi/2]) >>> r.as_euler('zxy', degrees=True) array([90., 0., 0.]) >>> r.as_euler('zxy', degrees=True).shape (3,) Represent a stack of single rotation: >>> r = R.from_rotvec([[0, 0, np.pi/2]]) >>> r.as_euler('zxy', degrees=True) array([[90., 0., 0.]]) >>> r.as_euler('zxy', degrees=True).shape (1, 3) Represent multiple rotations in a single object: >>> r = R.from_rotvec([ ... [0, 0, np.pi/2], ... [0, -np.pi/3, 0], ... [np.pi/4, 0, 0]]) >>> r.as_euler('zxy', degrees=True) array([[ 90., 0., 0.], [ 0., 0., -60.], [ 0., 45., 0.]]) >>> r.as_euler('zxy', degrees=True).shape (3, 3) """ if len(seq) != 3: raise ValueError("Expected 3 axes, got {}.".format(seq)) intrinsic = (re.match(r'^[XYZ]{1,3}$', seq) is not None) extrinsic = (re.match(r'^[xyz]{1,3}$', seq) is not None) if not (intrinsic or extrinsic): raise ValueError("Expected axes from `seq` to be from " "['x', 'y', 'z'] or ['X', 'Y', 'Z'], " "got {}".format(seq)) if any(seq[i] == seq[i+1] for i in range(2)): raise ValueError("Expected consecutive axes to be different, " "got {}".format(seq)) seq = seq.lower() angles = _compute_euler_from_dcm(self.as_dcm(), seq, extrinsic) if degrees: angles = np.rad2deg(angles) return angles[0] if self._single else angles
[ "def", "as_euler", "(", "self", ",", "seq", ",", "degrees", "=", "False", ")", ":", "if", "len", "(", "seq", ")", "!=", "3", ":", "raise", "ValueError", "(", "\"Expected 3 axes, got {}.\"", ".", "format", "(", "seq", ")", ")", "intrinsic", "=", "(", "re", ".", "match", "(", "r'^[XYZ]{1,3}$'", ",", "seq", ")", "is", "not", "None", ")", "extrinsic", "=", "(", "re", ".", "match", "(", "r'^[xyz]{1,3}$'", ",", "seq", ")", "is", "not", "None", ")", "if", "not", "(", "intrinsic", "or", "extrinsic", ")", ":", "raise", "ValueError", "(", "\"Expected axes from `seq` to be from \"", "\"['x', 'y', 'z'] or ['X', 'Y', 'Z'], \"", "\"got {}\"", ".", "format", "(", "seq", ")", ")", "if", "any", "(", "seq", "[", "i", "]", "==", "seq", "[", "i", "+", "1", "]", "for", "i", "in", "range", "(", "2", ")", ")", ":", "raise", "ValueError", "(", "\"Expected consecutive axes to be different, \"", "\"got {}\"", ".", "format", "(", "seq", ")", ")", "seq", "=", "seq", ".", "lower", "(", ")", "angles", "=", "_compute_euler_from_dcm", "(", "self", ".", "as_dcm", "(", ")", ",", "seq", ",", "extrinsic", ")", "if", "degrees", ":", "angles", "=", "np", ".", "rad2deg", "(", "angles", ")", "return", "angles", "[", "0", "]", "if", "self", ".", "_single", "else", "angles" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/spatial/transform/rotation.py#L1058-L1165
root-project/root
fcd3583bb14852bf2e8cd2415717cbaac0e75896
bindings/pyroot/pythonizations/python/ROOT/_pythonization/_roofit/_rooabspdf.py
python
RooAbsPdf.plotOn
(self, *args, **kwargs)
return self._plotOn(*args, **kwargs)
r"""The RooAbsPdf::plotOn() function is pythonized with the command argument pythonization. The keywords must correspond to the CmdArgs of the function.
r"""The RooAbsPdf::plotOn() function is pythonized with the command argument pythonization. The keywords must correspond to the CmdArgs of the function.
[ "r", "The", "RooAbsPdf", "::", "plotOn", "()", "function", "is", "pythonized", "with", "the", "command", "argument", "pythonization", ".", "The", "keywords", "must", "correspond", "to", "the", "CmdArgs", "of", "the", "function", "." ]
def plotOn(self, *args, **kwargs): r"""The RooAbsPdf::plotOn() function is pythonized with the command argument pythonization. The keywords must correspond to the CmdArgs of the function. """ # Redefinition of `RooAbsPdf.plotOn` for keyword arguments. args, kwargs = _kwargs_to_roocmdargs(*args, **kwargs) return self._plotOn(*args, **kwargs)
[ "def", "plotOn", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "# Redefinition of `RooAbsPdf.plotOn` for keyword arguments.", "args", ",", "kwargs", "=", "_kwargs_to_roocmdargs", "(", "*", "args", ",", "*", "*", "kwargs", ")", "return", "self", ".", "_plotOn", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/root-project/root/blob/fcd3583bb14852bf2e8cd2415717cbaac0e75896/bindings/pyroot/pythonizations/python/ROOT/_pythonization/_roofit/_rooabspdf.py#L51-L57
ValveSoftware/source-sdk-2013
0d8dceea4310fde5706b3ce1c70609d72a38efdf
mp/src/thirdparty/protobuf-2.3.0/python/mox.py
python
IsAlmost.__init__
(self, float_value, places=7)
Initialize IsAlmost. Args: float_value: The value for making the comparison. places: The number of decimal places to round to.
Initialize IsAlmost.
[ "Initialize", "IsAlmost", "." ]
def __init__(self, float_value, places=7): """Initialize IsAlmost. Args: float_value: The value for making the comparison. places: The number of decimal places to round to. """ self._float_value = float_value self._places = places
[ "def", "__init__", "(", "self", ",", "float_value", ",", "places", "=", "7", ")", ":", "self", ".", "_float_value", "=", "float_value", "self", ".", "_places", "=", "places" ]
https://github.com/ValveSoftware/source-sdk-2013/blob/0d8dceea4310fde5706b3ce1c70609d72a38efdf/mp/src/thirdparty/protobuf-2.3.0/python/mox.py#L835-L844
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/python/ops/math_grad.py
python
_IgammaGrad
(op, grad)
return (None, array_ops.reshape(math_ops.reduce_sum(partial_x * grad, rx), sx))
Returns gradient of igamma(a, x) with respect to x.
Returns gradient of igamma(a, x) with respect to x.
[ "Returns", "gradient", "of", "igamma", "(", "a", "x", ")", "with", "respect", "to", "x", "." ]
def _IgammaGrad(op, grad): """Returns gradient of igamma(a, x) with respect to x.""" # TODO(ebrevdo): Perhaps add the derivative w.r.t. a a = op.inputs[0] x = op.inputs[1] sa = array_ops.shape(a) sx = array_ops.shape(x) unused_ra, rx = gen_array_ops._broadcast_gradient_args(sa, sx) # Perform operations in log space before summing, because Gamma(a) # and Gamma'(a) can grow large. partial_x = math_ops.exp(-x + (a - 1) * math_ops.log(x) - math_ops.lgamma(a)) # TODO(b/36815900): Mark None return values as NotImplemented return (None, array_ops.reshape(math_ops.reduce_sum(partial_x * grad, rx), sx))
[ "def", "_IgammaGrad", "(", "op", ",", "grad", ")", ":", "# TODO(ebrevdo): Perhaps add the derivative w.r.t. a", "a", "=", "op", ".", "inputs", "[", "0", "]", "x", "=", "op", ".", "inputs", "[", "1", "]", "sa", "=", "array_ops", ".", "shape", "(", "a", ")", "sx", "=", "array_ops", ".", "shape", "(", "x", ")", "unused_ra", ",", "rx", "=", "gen_array_ops", ".", "_broadcast_gradient_args", "(", "sa", ",", "sx", ")", "# Perform operations in log space before summing, because Gamma(a)", "# and Gamma'(a) can grow large.", "partial_x", "=", "math_ops", ".", "exp", "(", "-", "x", "+", "(", "a", "-", "1", ")", "*", "math_ops", ".", "log", "(", "x", ")", "-", "math_ops", ".", "lgamma", "(", "a", ")", ")", "# TODO(b/36815900): Mark None return values as NotImplemented", "return", "(", "None", ",", "array_ops", ".", "reshape", "(", "math_ops", ".", "reduce_sum", "(", "partial_x", "*", "grad", ",", "rx", ")", ",", "sx", ")", ")" ]
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/ops/math_grad.py#L481-L495
HKUST-Aerial-Robotics/Fast-Planner
2ddd7793eecd573dbb5b47e2c985aa06606df3cf
uav_simulator/Utils/pose_utils/build/devel/_setup_util.py
python
rollback_env_variables
(environ, env_var_subfolders)
return lines
Generate shell code to reset environment variables by unrolling modifications based on all workspaces in CMAKE_PREFIX_PATH. This does not cover modifications performed by environment hooks.
Generate shell code to reset environment variables by unrolling modifications based on all workspaces in CMAKE_PREFIX_PATH. This does not cover modifications performed by environment hooks.
[ "Generate", "shell", "code", "to", "reset", "environment", "variables", "by", "unrolling", "modifications", "based", "on", "all", "workspaces", "in", "CMAKE_PREFIX_PATH", ".", "This", "does", "not", "cover", "modifications", "performed", "by", "environment", "hooks", "." ]
def rollback_env_variables(environ, env_var_subfolders): ''' Generate shell code to reset environment variables by unrolling modifications based on all workspaces in CMAKE_PREFIX_PATH. This does not cover modifications performed by environment hooks. ''' lines = [] unmodified_environ = copy.copy(environ) for key in sorted(env_var_subfolders.keys()): subfolders = env_var_subfolders[key] if not isinstance(subfolders, list): subfolders = [subfolders] for subfolder in subfolders: value = _rollback_env_variable(unmodified_environ, key, subfolder) if value is not None: environ[key] = value lines.append(assignment(key, value)) if lines: lines.insert(0, comment('reset environment variables by unrolling modifications based on all workspaces in CMAKE_PREFIX_PATH')) return lines
[ "def", "rollback_env_variables", "(", "environ", ",", "env_var_subfolders", ")", ":", "lines", "=", "[", "]", "unmodified_environ", "=", "copy", ".", "copy", "(", "environ", ")", "for", "key", "in", "sorted", "(", "env_var_subfolders", ".", "keys", "(", ")", ")", ":", "subfolders", "=", "env_var_subfolders", "[", "key", "]", "if", "not", "isinstance", "(", "subfolders", ",", "list", ")", ":", "subfolders", "=", "[", "subfolders", "]", "for", "subfolder", "in", "subfolders", ":", "value", "=", "_rollback_env_variable", "(", "unmodified_environ", ",", "key", ",", "subfolder", ")", "if", "value", "is", "not", "None", ":", "environ", "[", "key", "]", "=", "value", "lines", ".", "append", "(", "assignment", "(", "key", ",", "value", ")", ")", "if", "lines", ":", "lines", ".", "insert", "(", "0", ",", "comment", "(", "'reset environment variables by unrolling modifications based on all workspaces in CMAKE_PREFIX_PATH'", ")", ")", "return", "lines" ]
https://github.com/HKUST-Aerial-Robotics/Fast-Planner/blob/2ddd7793eecd573dbb5b47e2c985aa06606df3cf/uav_simulator/Utils/pose_utils/build/devel/_setup_util.py#L62-L81
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/html.py
python
HtmlCell.GetLastTerminal
(*args, **kwargs)
return _html.HtmlCell_GetLastTerminal(*args, **kwargs)
GetLastTerminal(self) -> HtmlCell
GetLastTerminal(self) -> HtmlCell
[ "GetLastTerminal", "(", "self", ")", "-", ">", "HtmlCell" ]
def GetLastTerminal(*args, **kwargs): """GetLastTerminal(self) -> HtmlCell""" return _html.HtmlCell_GetLastTerminal(*args, **kwargs)
[ "def", "GetLastTerminal", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_html", ".", "HtmlCell_GetLastTerminal", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/html.py#L730-L732
Z3Prover/z3
d745d03afdfdf638d66093e2bfbacaf87187f35b
src/api/python/z3/z3.py
python
Fixedpoint.help
(self)
Display a string describing all available options.
Display a string describing all available options.
[ "Display", "a", "string", "describing", "all", "available", "options", "." ]
def help(self): """Display a string describing all available options.""" print(Z3_fixedpoint_get_help(self.ctx.ref(), self.fixedpoint))
[ "def", "help", "(", "self", ")", ":", "print", "(", "Z3_fixedpoint_get_help", "(", "self", ".", "ctx", ".", "ref", "(", ")", ",", "self", ".", "fixedpoint", ")", ")" ]
https://github.com/Z3Prover/z3/blob/d745d03afdfdf638d66093e2bfbacaf87187f35b/src/api/python/z3/z3.py#L7379-L7381
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/ops/clip_ops.py
python
clip_by_global_norm
(t_list, clip_norm, use_norm=None, name=None)
return list_clipped, use_norm
Clips values of multiple tensors by the ratio of the sum of their norms. Given a tuple or list of tensors `t_list`, and a clipping ratio `clip_norm`, this operation returns a list of clipped tensors `list_clipped` and the global norm (`global_norm`) of all tensors in `t_list`. Optionally, if you've already computed the global norm for `t_list`, you can specify the global norm with `use_norm`. To perform the clipping, the values `t_list[i]` are set to: t_list[i] * clip_norm / max(global_norm, clip_norm) where: global_norm = sqrt(sum([l2norm(t)**2 for t in t_list])) If `clip_norm > global_norm` then the entries in `t_list` remain as they are, otherwise they're all shrunk by the global ratio. If `global_norm == infinity` then the entries in `t_list` are all set to `NaN` to signal that an error occurred. Any of the entries of `t_list` that are of type `None` are ignored. This is the correct way to perform gradient clipping (Pascanu et al., 2012). However, it is slower than `clip_by_norm()` because all the parameters must be ready before the clipping operation can be performed. Args: t_list: A tuple or list of mixed `Tensors`, `IndexedSlices`, or None. clip_norm: A 0-D (scalar) `Tensor` > 0. The clipping ratio. use_norm: A 0-D (scalar) `Tensor` of type `float` (optional). The global norm to use. If not provided, `global_norm()` is used to compute the norm. name: A name for the operation (optional). Returns: list_clipped: A list of `Tensors` of the same type as `list_t`. global_norm: A 0-D (scalar) `Tensor` representing the global norm. Raises: TypeError: If `t_list` is not a sequence. References: On the difficulty of training Recurrent Neural Networks: [Pascanu et al., 2012](http://proceedings.mlr.press/v28/pascanu13.html) ([pdf](http://proceedings.mlr.press/v28/pascanu13.pdf))
Clips values of multiple tensors by the ratio of the sum of their norms.
[ "Clips", "values", "of", "multiple", "tensors", "by", "the", "ratio", "of", "the", "sum", "of", "their", "norms", "." ]
def clip_by_global_norm(t_list, clip_norm, use_norm=None, name=None): """Clips values of multiple tensors by the ratio of the sum of their norms. Given a tuple or list of tensors `t_list`, and a clipping ratio `clip_norm`, this operation returns a list of clipped tensors `list_clipped` and the global norm (`global_norm`) of all tensors in `t_list`. Optionally, if you've already computed the global norm for `t_list`, you can specify the global norm with `use_norm`. To perform the clipping, the values `t_list[i]` are set to: t_list[i] * clip_norm / max(global_norm, clip_norm) where: global_norm = sqrt(sum([l2norm(t)**2 for t in t_list])) If `clip_norm > global_norm` then the entries in `t_list` remain as they are, otherwise they're all shrunk by the global ratio. If `global_norm == infinity` then the entries in `t_list` are all set to `NaN` to signal that an error occurred. Any of the entries of `t_list` that are of type `None` are ignored. This is the correct way to perform gradient clipping (Pascanu et al., 2012). However, it is slower than `clip_by_norm()` because all the parameters must be ready before the clipping operation can be performed. Args: t_list: A tuple or list of mixed `Tensors`, `IndexedSlices`, or None. clip_norm: A 0-D (scalar) `Tensor` > 0. The clipping ratio. use_norm: A 0-D (scalar) `Tensor` of type `float` (optional). The global norm to use. If not provided, `global_norm()` is used to compute the norm. name: A name for the operation (optional). Returns: list_clipped: A list of `Tensors` of the same type as `list_t`. global_norm: A 0-D (scalar) `Tensor` representing the global norm. Raises: TypeError: If `t_list` is not a sequence. References: On the difficulty of training Recurrent Neural Networks: [Pascanu et al., 2012](http://proceedings.mlr.press/v28/pascanu13.html) ([pdf](http://proceedings.mlr.press/v28/pascanu13.pdf)) """ if (not isinstance(t_list, collections_abc.Sequence) or isinstance(t_list, str)): raise TypeError("`t_list` should be a sequence of tensors. Received " f"{type(t_list)}.") t_list = list(t_list) if use_norm is None: use_norm = global_norm(t_list, name) with ops.name_scope(name, "clip_by_global_norm", t_list + [clip_norm]) as name: # Calculate L2-norm, clip elements by ratio of clip_norm to L2-norm scale_for_finite = clip_norm * math_ops.minimum( 1.0 / use_norm, constant_op.constant(1.0, dtype=use_norm.dtype) / clip_norm) # If use_norm is any finite number, this is a no-op. For inf/-inf/NaN, # this will make scale NaN. scale = scale_for_finite + (use_norm - use_norm) values = [ ops.convert_to_tensor( t.values if isinstance(t, indexed_slices.IndexedSlices) else t, name="t_%d" % i) if t is not None else t for i, t in enumerate(t_list) ] values_clipped = [] for i, v in enumerate(values): if v is None: values_clipped.append(None) else: with ops.colocate_with(v): values_clipped.append( array_ops.identity(v * scale, name="%s_%d" % (name, i))) list_clipped = [ indexed_slices.IndexedSlices(c_v, t.indices, t.dense_shape) if isinstance(t, indexed_slices.IndexedSlices) else c_v for (c_v, t) in zip(values_clipped, t_list) ] return list_clipped, use_norm
[ "def", "clip_by_global_norm", "(", "t_list", ",", "clip_norm", ",", "use_norm", "=", "None", ",", "name", "=", "None", ")", ":", "if", "(", "not", "isinstance", "(", "t_list", ",", "collections_abc", ".", "Sequence", ")", "or", "isinstance", "(", "t_list", ",", "str", ")", ")", ":", "raise", "TypeError", "(", "\"`t_list` should be a sequence of tensors. Received \"", "f\"{type(t_list)}.\"", ")", "t_list", "=", "list", "(", "t_list", ")", "if", "use_norm", "is", "None", ":", "use_norm", "=", "global_norm", "(", "t_list", ",", "name", ")", "with", "ops", ".", "name_scope", "(", "name", ",", "\"clip_by_global_norm\"", ",", "t_list", "+", "[", "clip_norm", "]", ")", "as", "name", ":", "# Calculate L2-norm, clip elements by ratio of clip_norm to L2-norm", "scale_for_finite", "=", "clip_norm", "*", "math_ops", ".", "minimum", "(", "1.0", "/", "use_norm", ",", "constant_op", ".", "constant", "(", "1.0", ",", "dtype", "=", "use_norm", ".", "dtype", ")", "/", "clip_norm", ")", "# If use_norm is any finite number, this is a no-op. For inf/-inf/NaN,", "# this will make scale NaN.", "scale", "=", "scale_for_finite", "+", "(", "use_norm", "-", "use_norm", ")", "values", "=", "[", "ops", ".", "convert_to_tensor", "(", "t", ".", "values", "if", "isinstance", "(", "t", ",", "indexed_slices", ".", "IndexedSlices", ")", "else", "t", ",", "name", "=", "\"t_%d\"", "%", "i", ")", "if", "t", "is", "not", "None", "else", "t", "for", "i", ",", "t", "in", "enumerate", "(", "t_list", ")", "]", "values_clipped", "=", "[", "]", "for", "i", ",", "v", "in", "enumerate", "(", "values", ")", ":", "if", "v", "is", "None", ":", "values_clipped", ".", "append", "(", "None", ")", "else", ":", "with", "ops", ".", "colocate_with", "(", "v", ")", ":", "values_clipped", ".", "append", "(", "array_ops", ".", "identity", "(", "v", "*", "scale", ",", "name", "=", "\"%s_%d\"", "%", "(", "name", ",", "i", ")", ")", ")", "list_clipped", "=", "[", "indexed_slices", ".", "IndexedSlices", "(", "c_v", ",", "t", ".", "indices", ",", "t", ".", "dense_shape", ")", "if", "isinstance", "(", "t", ",", "indexed_slices", ".", "IndexedSlices", ")", "else", "c_v", "for", "(", "c_v", ",", "t", ")", "in", "zip", "(", "values_clipped", ",", "t_list", ")", "]", "return", "list_clipped", ",", "use_norm" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/ops/clip_ops.py#L290-L379
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/lowering.py
python
BaseLower.init_pyapi
(self)
Init the Python API and Environment Manager for the function being lowered.
Init the Python API and Environment Manager for the function being lowered.
[ "Init", "the", "Python", "API", "and", "Environment", "Manager", "for", "the", "function", "being", "lowered", "." ]
def init_pyapi(self): """ Init the Python API and Environment Manager for the function being lowered. """ if self.pyapi is not None: return self.pyapi = self.context.get_python_api(self.builder) # Store environment argument for later use self.env_manager = self.context.get_env_manager(self.builder) self.env_body = self.env_manager.env_body self.envarg = self.env_manager.env_ptr
[ "def", "init_pyapi", "(", "self", ")", ":", "if", "self", ".", "pyapi", "is", "not", "None", ":", "return", "self", ".", "pyapi", "=", "self", ".", "context", ".", "get_python_api", "(", "self", ".", "builder", ")", "# Store environment argument for later use", "self", ".", "env_manager", "=", "self", ".", "context", ".", "get_env_manager", "(", "self", ".", "builder", ")", "self", ".", "env_body", "=", "self", ".", "env_manager", ".", "env_body", "self", ".", "envarg", "=", "self", ".", "env_manager", ".", "env_ptr" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/lowering.py#L125-L137
ricardoquesada/Spidermonkey
4a75ea2543408bd1b2c515aa95901523eeef7858
media/webrtc/trunk/tools/gyp/pylib/gyp/MSVSVersion.py
python
_RegistryQuery
(key, value=None)
return text
Use reg.exe to read a particular key through _RegistryQueryBase. First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If that fails, it falls back to System32. Sysnative is available on Vista and up and available on Windows Server 2003 and XP through KB patch 942589. Note that Sysnative will always fail if using 64-bit python due to it being a virtual directory and System32 will work correctly in the first place. KB 942589 - http://support.microsoft.com/kb/942589/en-us. Arguments: key: The registry key. value: The particular registry value to read (optional). Return: stdout from reg.exe, or None for failure.
Use reg.exe to read a particular key through _RegistryQueryBase.
[ "Use", "reg", ".", "exe", "to", "read", "a", "particular", "key", "through", "_RegistryQueryBase", "." ]
def _RegistryQuery(key, value=None): """Use reg.exe to read a particular key through _RegistryQueryBase. First tries to launch from %WinDir%\Sysnative to avoid WoW64 redirection. If that fails, it falls back to System32. Sysnative is available on Vista and up and available on Windows Server 2003 and XP through KB patch 942589. Note that Sysnative will always fail if using 64-bit python due to it being a virtual directory and System32 will work correctly in the first place. KB 942589 - http://support.microsoft.com/kb/942589/en-us. Arguments: key: The registry key. value: The particular registry value to read (optional). Return: stdout from reg.exe, or None for failure. """ text = None try: text = _RegistryQueryBase('Sysnative', key, value) except OSError, e: if e.errno == errno.ENOENT: text = _RegistryQueryBase('System32', key, value) else: raise return text
[ "def", "_RegistryQuery", "(", "key", ",", "value", "=", "None", ")", ":", "text", "=", "None", "try", ":", "text", "=", "_RegistryQueryBase", "(", "'Sysnative'", ",", "key", ",", "value", ")", "except", "OSError", ",", "e", ":", "if", "e", ".", "errno", "==", "errno", ".", "ENOENT", ":", "text", "=", "_RegistryQueryBase", "(", "'System32'", ",", "key", ",", "value", ")", "else", ":", "raise", "return", "text" ]
https://github.com/ricardoquesada/Spidermonkey/blob/4a75ea2543408bd1b2c515aa95901523eeef7858/media/webrtc/trunk/tools/gyp/pylib/gyp/MSVSVersion.py#L130-L155
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/x86/toolchain/lib/python2.7/cookielib.py
python
CookieJar._cookie_attrs
(self, cookies)
return attrs
Return a list of cookie-attributes to be returned to server. like ['foo="bar"; $Path="/"', ...] The $Version attribute is also added when appropriate (currently only once per request).
Return a list of cookie-attributes to be returned to server.
[ "Return", "a", "list", "of", "cookie", "-", "attributes", "to", "be", "returned", "to", "server", "." ]
def _cookie_attrs(self, cookies): """Return a list of cookie-attributes to be returned to server. like ['foo="bar"; $Path="/"', ...] The $Version attribute is also added when appropriate (currently only once per request). """ # add cookies in order of most specific (ie. longest) path first cookies.sort(key=lambda arg: len(arg.path), reverse=True) version_set = False attrs = [] for cookie in cookies: # set version of Cookie header # XXX # What should it be if multiple matching Set-Cookie headers have # different versions themselves? # Answer: there is no answer; was supposed to be settled by # RFC 2965 errata, but that may never appear... version = cookie.version if not version_set: version_set = True if version > 0: attrs.append("$Version=%s" % version) # quote cookie value if necessary # (not for Netscape protocol, which already has any quotes # intact, due to the poorly-specified Netscape Cookie: syntax) if ((cookie.value is not None) and self.non_word_re.search(cookie.value) and version > 0): value = self.quote_re.sub(r"\\\1", cookie.value) else: value = cookie.value # add cookie-attributes to be returned in Cookie header if cookie.value is None: attrs.append(cookie.name) else: attrs.append("%s=%s" % (cookie.name, value)) if version > 0: if cookie.path_specified: attrs.append('$Path="%s"' % cookie.path) if cookie.domain.startswith("."): domain = cookie.domain if (not cookie.domain_initial_dot and domain.startswith(".")): domain = domain[1:] attrs.append('$Domain="%s"' % domain) if cookie.port is not None: p = "$Port" if cookie.port_specified: p = p + ('="%s"' % cookie.port) attrs.append(p) return attrs
[ "def", "_cookie_attrs", "(", "self", ",", "cookies", ")", ":", "# add cookies in order of most specific (ie. longest) path first", "cookies", ".", "sort", "(", "key", "=", "lambda", "arg", ":", "len", "(", "arg", ".", "path", ")", ",", "reverse", "=", "True", ")", "version_set", "=", "False", "attrs", "=", "[", "]", "for", "cookie", "in", "cookies", ":", "# set version of Cookie header", "# XXX", "# What should it be if multiple matching Set-Cookie headers have", "# different versions themselves?", "# Answer: there is no answer; was supposed to be settled by", "# RFC 2965 errata, but that may never appear...", "version", "=", "cookie", ".", "version", "if", "not", "version_set", ":", "version_set", "=", "True", "if", "version", ">", "0", ":", "attrs", ".", "append", "(", "\"$Version=%s\"", "%", "version", ")", "# quote cookie value if necessary", "# (not for Netscape protocol, which already has any quotes", "# intact, due to the poorly-specified Netscape Cookie: syntax)", "if", "(", "(", "cookie", ".", "value", "is", "not", "None", ")", "and", "self", ".", "non_word_re", ".", "search", "(", "cookie", ".", "value", ")", "and", "version", ">", "0", ")", ":", "value", "=", "self", ".", "quote_re", ".", "sub", "(", "r\"\\\\\\1\"", ",", "cookie", ".", "value", ")", "else", ":", "value", "=", "cookie", ".", "value", "# add cookie-attributes to be returned in Cookie header", "if", "cookie", ".", "value", "is", "None", ":", "attrs", ".", "append", "(", "cookie", ".", "name", ")", "else", ":", "attrs", ".", "append", "(", "\"%s=%s\"", "%", "(", "cookie", ".", "name", ",", "value", ")", ")", "if", "version", ">", "0", ":", "if", "cookie", ".", "path_specified", ":", "attrs", ".", "append", "(", "'$Path=\"%s\"'", "%", "cookie", ".", "path", ")", "if", "cookie", ".", "domain", ".", "startswith", "(", "\".\"", ")", ":", "domain", "=", "cookie", ".", "domain", "if", "(", "not", "cookie", ".", "domain_initial_dot", "and", "domain", ".", "startswith", "(", "\".\"", ")", ")", ":", "domain", "=", "domain", "[", "1", ":", "]", "attrs", ".", "append", "(", "'$Domain=\"%s\"'", "%", "domain", ")", "if", "cookie", ".", "port", "is", "not", "None", ":", "p", "=", "\"$Port\"", "if", "cookie", ".", "port_specified", ":", "p", "=", "p", "+", "(", "'=\"%s\"'", "%", "cookie", ".", "port", ")", "attrs", ".", "append", "(", "p", ")", "return", "attrs" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/cookielib.py#L1253-L1310
apiaryio/drafter
4634ebd07f6c6f257cc656598ccd535492fdfb55
tools/gyp/pylib/gyp/input.py
python
PruneUnwantedTargets
(targets, flat_list, dependency_nodes, root_targets, data)
return wanted_targets, wanted_flat_list
Return only the targets that are deep dependencies of |root_targets|.
Return only the targets that are deep dependencies of |root_targets|.
[ "Return", "only", "the", "targets", "that", "are", "deep", "dependencies", "of", "|root_targets|", "." ]
def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets, data): """Return only the targets that are deep dependencies of |root_targets|.""" qualified_root_targets = [] for target in root_targets: target = target.strip() qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list) if not qualified_targets: raise GypError("Could not find target %s" % target) qualified_root_targets.extend(qualified_targets) wanted_targets = {} for target in qualified_root_targets: wanted_targets[target] = targets[target] for dependency in dependency_nodes[target].DeepDependencies(): wanted_targets[dependency] = targets[dependency] wanted_flat_list = [t for t in flat_list if t in wanted_targets] # Prune unwanted targets from each build_file's data dict. for build_file in data['target_build_files']: if not 'targets' in data[build_file]: continue new_targets = [] for target in data[build_file]['targets']: qualified_name = gyp.common.QualifiedTarget(build_file, target['target_name'], target['toolset']) if qualified_name in wanted_targets: new_targets.append(target) data[build_file]['targets'] = new_targets return wanted_targets, wanted_flat_list
[ "def", "PruneUnwantedTargets", "(", "targets", ",", "flat_list", ",", "dependency_nodes", ",", "root_targets", ",", "data", ")", ":", "qualified_root_targets", "=", "[", "]", "for", "target", "in", "root_targets", ":", "target", "=", "target", ".", "strip", "(", ")", "qualified_targets", "=", "gyp", ".", "common", ".", "FindQualifiedTargets", "(", "target", ",", "flat_list", ")", "if", "not", "qualified_targets", ":", "raise", "GypError", "(", "\"Could not find target %s\"", "%", "target", ")", "qualified_root_targets", ".", "extend", "(", "qualified_targets", ")", "wanted_targets", "=", "{", "}", "for", "target", "in", "qualified_root_targets", ":", "wanted_targets", "[", "target", "]", "=", "targets", "[", "target", "]", "for", "dependency", "in", "dependency_nodes", "[", "target", "]", ".", "DeepDependencies", "(", ")", ":", "wanted_targets", "[", "dependency", "]", "=", "targets", "[", "dependency", "]", "wanted_flat_list", "=", "[", "t", "for", "t", "in", "flat_list", "if", "t", "in", "wanted_targets", "]", "# Prune unwanted targets from each build_file's data dict.", "for", "build_file", "in", "data", "[", "'target_build_files'", "]", ":", "if", "not", "'targets'", "in", "data", "[", "build_file", "]", ":", "continue", "new_targets", "=", "[", "]", "for", "target", "in", "data", "[", "build_file", "]", "[", "'targets'", "]", ":", "qualified_name", "=", "gyp", ".", "common", ".", "QualifiedTarget", "(", "build_file", ",", "target", "[", "'target_name'", "]", ",", "target", "[", "'toolset'", "]", ")", "if", "qualified_name", "in", "wanted_targets", ":", "new_targets", ".", "append", "(", "target", ")", "data", "[", "build_file", "]", "[", "'targets'", "]", "=", "new_targets", "return", "wanted_targets", ",", "wanted_flat_list" ]
https://github.com/apiaryio/drafter/blob/4634ebd07f6c6f257cc656598ccd535492fdfb55/tools/gyp/pylib/gyp/input.py#L2673-L2705
metashell/metashell
f4177e4854ea00c8dbc722cadab26ef413d798ea
3rd/templight/llvm/bindings/python/llvm/object.py
python
Relocation.offset
(self)
return lib.LLVMGetRelocationOffset(self)
The offset of this relocation, in long bytes.
The offset of this relocation, in long bytes.
[ "The", "offset", "of", "this", "relocation", "in", "long", "bytes", "." ]
def offset(self): """The offset of this relocation, in long bytes.""" if self.expired: raise Exception('Relocation instance has expired.') return lib.LLVMGetRelocationOffset(self)
[ "def", "offset", "(", "self", ")", ":", "if", "self", ".", "expired", ":", "raise", "Exception", "(", "'Relocation instance has expired.'", ")", "return", "lib", ".", "LLVMGetRelocationOffset", "(", "self", ")" ]
https://github.com/metashell/metashell/blob/f4177e4854ea00c8dbc722cadab26ef413d798ea/3rd/templight/llvm/bindings/python/llvm/object.py#L374-L379