nwo
stringlengths
5
86
sha
stringlengths
40
40
path
stringlengths
4
189
language
stringclasses
1 value
identifier
stringlengths
1
94
parameters
stringlengths
2
4.03k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
11.5k
docstring
stringlengths
1
33.2k
docstring_summary
stringlengths
0
5.15k
docstring_tokens
list
function
stringlengths
34
151k
function_tokens
list
url
stringlengths
90
278
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
scripts/Inelastic/Direct/RunDescriptor.py
python
RunDescriptor.is_monws_separate
(self)
Is monitor workspace is separated from data workspace or not
Is monitor workspace is separated from data workspace or not
[ "Is", "monitor", "workspace", "is", "separated", "from", "data", "workspace", "or", "not" ]
def is_monws_separate(self): """Is monitor workspace is separated from data workspace or not""" try: data_ws = self.get_workspace() mon_ws = data_ws.getMonitorWorkspace() return True except: pass # lets go long but reliable way mon_ws = self.get_monitors_ws() if mon_ws: name = mon_ws.name() else: return False if name.endswith('_monitors'): return True else: return False
[ "def", "is_monws_separate", "(", "self", ")", ":", "try", ":", "data_ws", "=", "self", ".", "get_workspace", "(", ")", "mon_ws", "=", "data_ws", ".", "getMonitorWorkspace", "(", ")", "return", "True", "except", ":", "pass", "# lets go long but reliable way", "mon_ws", "=", "self", ".", "get_monitors_ws", "(", ")", "if", "mon_ws", ":", "name", "=", "mon_ws", ".", "name", "(", ")", "else", ":", "return", "False", "if", "name", ".", "endswith", "(", "'_monitors'", ")", ":", "return", "True", "else", ":", "return", "False" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/scripts/Inelastic/Direct/RunDescriptor.py#L600-L618
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/apiclient/googleapiclient/http.py
python
HttpRequest.add_response_callback
(self, cb)
add_response_headers_callback Args: cb: Callback to be called on receiving the response headers, of signature: def cb(resp): # Where resp is an instance of httplib2.Response
add_response_headers_callback
[ "add_response_headers_callback" ]
def add_response_callback(self, cb): """add_response_headers_callback Args: cb: Callback to be called on receiving the response headers, of signature: def cb(resp): # Where resp is an instance of httplib2.Response """ self.response_callbacks.append(cb)
[ "def", "add_response_callback", "(", "self", ",", "cb", ")", ":", "self", ".", "response_callbacks", ".", "append", "(", "cb", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/apiclient/googleapiclient/http.py#L733-L742
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/timeit.py
python
Timer.repeat
(self, repeat=default_repeat, number=default_number)
return r
Call timeit() a few times. This is a convenience function that calls the timeit() repeatedly, returning a list of results. The first argument specifies how many times to call timeit(), defaulting to 5; the second argument specifies the timer argument, defaulting to one million. Note: it's tempting to calculate mean and standard deviation from the result vector and report these. However, this is not very useful. In a typical case, the lowest value gives a lower bound for how fast your machine can run the given code snippet; higher values in the result vector are typically not caused by variability in Python's speed, but by other processes interfering with your timing accuracy. So the min() of the result is probably the only number you should be interested in. After that, you should look at the entire vector and apply common sense rather than statistics.
Call timeit() a few times.
[ "Call", "timeit", "()", "a", "few", "times", "." ]
def repeat(self, repeat=default_repeat, number=default_number): """Call timeit() a few times. This is a convenience function that calls the timeit() repeatedly, returning a list of results. The first argument specifies how many times to call timeit(), defaulting to 5; the second argument specifies the timer argument, defaulting to one million. Note: it's tempting to calculate mean and standard deviation from the result vector and report these. However, this is not very useful. In a typical case, the lowest value gives a lower bound for how fast your machine can run the given code snippet; higher values in the result vector are typically not caused by variability in Python's speed, but by other processes interfering with your timing accuracy. So the min() of the result is probably the only number you should be interested in. After that, you should look at the entire vector and apply common sense rather than statistics. """ r = [] for i in range(repeat): t = self.timeit(number) r.append(t) return r
[ "def", "repeat", "(", "self", ",", "repeat", "=", "default_repeat", ",", "number", "=", "default_number", ")", ":", "r", "=", "[", "]", "for", "i", "in", "range", "(", "repeat", ")", ":", "t", "=", "self", ".", "timeit", "(", "number", ")", "r", ".", "append", "(", "t", ")", "return", "r" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/timeit.py#L183-L207
mindspore-ai/mindspore
fb8fd3338605bb34fa5cea054e535a8b1d753fab
mindspore/python/mindspore/nn/transformer/transformer.py
python
MultiHeadAttention._merge_heads
(self, x)
return x_merge
convert a 4d input to a 2d output Inputs: x: input tensor Output: x_merge: the 2d output
convert a 4d input to a 2d output
[ "convert", "a", "4d", "input", "to", "a", "2d", "output" ]
def _merge_heads(self, x): """ convert a 4d input to a 2d output Inputs: x: input tensor Output: x_merge: the 2d output """ x = self.merger_head_transpose( x, (0, 2, 1, 3)) # bs, seq_length, head, size_per_head x_shape = P.Shape()(x) new_shape = (-1, x_shape[-2] * x_shape[-1]) x_merge = self.reshape(x, new_shape) return x_merge
[ "def", "_merge_heads", "(", "self", ",", "x", ")", ":", "x", "=", "self", ".", "merger_head_transpose", "(", "x", ",", "(", "0", ",", "2", ",", "1", ",", "3", ")", ")", "# bs, seq_length, head, size_per_head", "x_shape", "=", "P", ".", "Shape", "(", ")", "(", "x", ")", "new_shape", "=", "(", "-", "1", ",", "x_shape", "[", "-", "2", "]", "*", "x_shape", "[", "-", "1", "]", ")", "x_merge", "=", "self", ".", "reshape", "(", "x", ",", "new_shape", ")", "return", "x_merge" ]
https://github.com/mindspore-ai/mindspore/blob/fb8fd3338605bb34fa5cea054e535a8b1d753fab/mindspore/python/mindspore/nn/transformer/transformer.py#L1056-L1071
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/contrib/keras/python/keras/backend.py
python
get_value
(x)
return x.eval(session=get_session())
Returns the value of a variable. Arguments: x: input variable. Returns: A Numpy array.
Returns the value of a variable.
[ "Returns", "the", "value", "of", "a", "variable", "." ]
def get_value(x): """Returns the value of a variable. Arguments: x: input variable. Returns: A Numpy array. """ return x.eval(session=get_session())
[ "def", "get_value", "(", "x", ")", ":", "return", "x", ".", "eval", "(", "session", "=", "get_session", "(", ")", ")" ]
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/keras/python/keras/backend.py#L2271-L2280
CRYTEK/CRYENGINE
232227c59a220cbbd311576f0fbeba7bb53b2a8c
Editor/Python/windows/Lib/site-packages/pip/_vendor/requests/utils.py
python
parse_header_links
(value)
return links
Return a dict of parsed link headers proxies. i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg"
Return a dict of parsed link headers proxies.
[ "Return", "a", "dict", "of", "parsed", "link", "headers", "proxies", "." ]
def parse_header_links(value): """Return a dict of parsed link headers proxies. i.e. Link: <http:/.../front.jpeg>; rel=front; type="image/jpeg",<http://.../back.jpeg>; rel=back;type="image/jpeg" """ links = [] replace_chars = " '\"" for val in re.split(", *<", value): try: url, params = val.split(";", 1) except ValueError: url, params = val, '' link = {} link["url"] = url.strip("<> '\"") for param in params.split(";"): try: key, value = param.split("=") except ValueError: break link[key.strip(replace_chars)] = value.strip(replace_chars) links.append(link) return links
[ "def", "parse_header_links", "(", "value", ")", ":", "links", "=", "[", "]", "replace_chars", "=", "\" '\\\"\"", "for", "val", "in", "re", ".", "split", "(", "\", *<\"", ",", "value", ")", ":", "try", ":", "url", ",", "params", "=", "val", ".", "split", "(", "\";\"", ",", "1", ")", "except", "ValueError", ":", "url", ",", "params", "=", "val", ",", "''", "link", "=", "{", "}", "link", "[", "\"url\"", "]", "=", "url", ".", "strip", "(", "\"<> '\\\"\"", ")", "for", "param", "in", "params", ".", "split", "(", "\";\"", ")", ":", "try", ":", "key", ",", "value", "=", "param", ".", "split", "(", "\"=\"", ")", "except", "ValueError", ":", "break", "link", "[", "key", ".", "strip", "(", "replace_chars", ")", "]", "=", "value", ".", "strip", "(", "replace_chars", ")", "links", ".", "append", "(", "link", ")", "return", "links" ]
https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Editor/Python/windows/Lib/site-packages/pip/_vendor/requests/utils.py#L580-L611
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
tools/json_schema_compiler/model.py
python
_GetFunctions
(parent, json, namespace)
return functions
Creates Function objects extracted from |json|.
Creates Function objects extracted from |json|.
[ "Creates", "Function", "objects", "extracted", "from", "|json|", "." ]
def _GetFunctions(parent, json, namespace): """Creates Function objects extracted from |json|. """ functions = OrderedDict() for function_json in json.get('functions', []): function = Function(parent, function_json['name'], function_json, namespace, Origin(from_json=True)) functions[function.name] = function return functions
[ "def", "_GetFunctions", "(", "parent", ",", "json", ",", "namespace", ")", ":", "functions", "=", "OrderedDict", "(", ")", "for", "function_json", "in", "json", ".", "get", "(", "'functions'", ",", "[", "]", ")", ":", "function", "=", "Function", "(", "parent", ",", "function_json", "[", "'name'", "]", ",", "function_json", ",", "namespace", ",", "Origin", "(", "from_json", "=", "True", ")", ")", "functions", "[", "function", ".", "name", "]", "=", "function", "return", "functions" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/tools/json_schema_compiler/model.py#L515-L526
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/debug/cli/debugger_cli_common.py
python
CommandHistory.most_recent_n
(self, n)
return self._commands[-n:]
Look up the n most recent commands. Args: n: Number of most recent commands to look up. Returns: A list of n most recent commands, or all available most recent commands, if n exceeds size of the command history, in chronological order.
Look up the n most recent commands.
[ "Look", "up", "the", "n", "most", "recent", "commands", "." ]
def most_recent_n(self, n): """Look up the n most recent commands. Args: n: Number of most recent commands to look up. Returns: A list of n most recent commands, or all available most recent commands, if n exceeds size of the command history, in chronological order. """ return self._commands[-n:]
[ "def", "most_recent_n", "(", "self", ",", "n", ")", ":", "return", "self", ".", "_commands", "[", "-", "n", ":", "]" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/debug/cli/debugger_cli_common.py#L1073-L1084
FreeCAD/FreeCAD
ba42231b9c6889b89e064d6d563448ed81e376ec
src/Mod/Draft/drafttaskpanels/task_polararray.py
python
TaskPanelPolarArray.reject
(self)
Execute when clicking the Cancel button or pressing Escape.
Execute when clicking the Cancel button or pressing Escape.
[ "Execute", "when", "clicking", "the", "Cancel", "button", "or", "pressing", "Escape", "." ]
def reject(self): """Execute when clicking the Cancel button or pressing Escape.""" _msg(translate("draft","Aborted:") + " {}".format(translate("draft","Polar array"))) self.finish()
[ "def", "reject", "(", "self", ")", ":", "_msg", "(", "translate", "(", "\"draft\"", ",", "\"Aborted:\"", ")", "+", "\" {}\"", ".", "format", "(", "translate", "(", "\"draft\"", ",", "\"Polar array\"", ")", ")", ")", "self", ".", "finish", "(", ")" ]
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Draft/drafttaskpanels/task_polararray.py#L436-L439
baidu/bigflow
449245016c0df7d1252e85581e588bfc60cefad3
bigflow_python/python/bigflow/pipeline/pipeline_base.py
python
PipelineBase._set_before_run_hook
(self, name, callback)
注册一个在 pipeline.run() 执行之前的 hook. hook 执行顺序: 注册的 name 进行 sorted 排序结果 todo: deal with callback with parameters. Users can always use closure to convert a callback with parameters to a zero-parameter callback :param name: 钩子名称 :param callback: 无参的 callback :return: None ..Note: This function is provided for advanced usage, please make sure you know what you are doing.
注册一个在 pipeline.run() 执行之前的 hook. hook 执行顺序: 注册的 name 进行 sorted 排序结果
[ "注册一个在", "pipeline", ".", "run", "()", "执行之前的", "hook", ".", "hook", "执行顺序", ":", "注册的", "name", "进行", "sorted", "排序结果" ]
def _set_before_run_hook(self, name, callback): """ 注册一个在 pipeline.run() 执行之前的 hook. hook 执行顺序: 注册的 name 进行 sorted 排序结果 todo: deal with callback with parameters. Users can always use closure to convert a callback with parameters to a zero-parameter callback :param name: 钩子名称 :param callback: 无参的 callback :return: None ..Note: This function is provided for advanced usage, please make sure you know what you are doing. """ if callable(callback): self._before_run_hooks[name] = (callback, ) else: raise error.BigflowPlanningException("Cannot register a non-callable object: %s" % str(callback))
[ "def", "_set_before_run_hook", "(", "self", ",", "name", ",", "callback", ")", ":", "if", "callable", "(", "callback", ")", ":", "self", ".", "_before_run_hooks", "[", "name", "]", "=", "(", "callback", ",", ")", "else", ":", "raise", "error", ".", "BigflowPlanningException", "(", "\"Cannot register a non-callable object: %s\"", "%", "str", "(", "callback", ")", ")" ]
https://github.com/baidu/bigflow/blob/449245016c0df7d1252e85581e588bfc60cefad3/bigflow_python/python/bigflow/pipeline/pipeline_base.py#L406-L423
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/setuptools/msvc.py
python
PlatformInfo.cross_dir
(self, forcex86=False)
return ( '' if self.target_cpu == current else self.target_dir().replace('\\', '\\%s_' % current) )
r""" Cross platform specific subfolder. Parameters ---------- forcex86: bool Use 'x86' as current architecture even if current architecture is not x86. Return ------ str subfolder: '' if target architecture is current architecture, '\current_target' if not.
r""" Cross platform specific subfolder.
[ "r", "Cross", "platform", "specific", "subfolder", "." ]
def cross_dir(self, forcex86=False): r""" Cross platform specific subfolder. Parameters ---------- forcex86: bool Use 'x86' as current architecture even if current architecture is not x86. Return ------ str subfolder: '' if target architecture is current architecture, '\current_target' if not. """ current = 'x86' if forcex86 else self.current_cpu return ( '' if self.target_cpu == current else self.target_dir().replace('\\', '\\%s_' % current) )
[ "def", "cross_dir", "(", "self", ",", "forcex86", "=", "False", ")", ":", "current", "=", "'x86'", "if", "forcex86", "else", "self", ".", "current_cpu", "return", "(", "''", "if", "self", ".", "target_cpu", "==", "current", "else", "self", ".", "target_dir", "(", ")", ".", "replace", "(", "'\\\\'", ",", "'\\\\%s_'", "%", "current", ")", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/setuptools/msvc.py#L461-L481
microsoft/ivy
9f3c7ecc0b2383129fdd0953e10890d98d09a82d
ivy/ivy_parser.py
python
p_top_state_symbol_eq_state_expr
(p)
top : top STATE SYMBOL EQ state_expr
top : top STATE SYMBOL EQ state_expr
[ "top", ":", "top", "STATE", "SYMBOL", "EQ", "state_expr" ]
def p_top_state_symbol_eq_state_expr(p): 'top : top STATE SYMBOL EQ state_expr' p[0] = p[1] p[0].declare(StateDecl(StateDef(p[3],p[5])))
[ "def", "p_top_state_symbol_eq_state_expr", "(", "p", ")", ":", "p", "[", "0", "]", "=", "p", "[", "1", "]", "p", "[", "0", "]", ".", "declare", "(", "StateDecl", "(", "StateDef", "(", "p", "[", "3", "]", ",", "p", "[", "5", "]", ")", ")", ")" ]
https://github.com/microsoft/ivy/blob/9f3c7ecc0b2383129fdd0953e10890d98d09a82d/ivy/ivy_parser.py#L1789-L1792
PaddlePaddle/Paddle
1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c
python/paddle/dataset/mnist.py
python
train
()
return reader_creator( paddle.dataset.common.download(TRAIN_IMAGE_URL, 'mnist', TRAIN_IMAGE_MD5), paddle.dataset.common.download(TRAIN_LABEL_URL, 'mnist', TRAIN_LABEL_MD5), 100)
MNIST training set creator. It returns a reader creator, each sample in the reader is image pixels in [-1, 1] and label in [0, 9]. :return: Training reader creator :rtype: callable
MNIST training set creator.
[ "MNIST", "training", "set", "creator", "." ]
def train(): """ MNIST training set creator. It returns a reader creator, each sample in the reader is image pixels in [-1, 1] and label in [0, 9]. :return: Training reader creator :rtype: callable """ return reader_creator( paddle.dataset.common.download(TRAIN_IMAGE_URL, 'mnist', TRAIN_IMAGE_MD5), paddle.dataset.common.download(TRAIN_LABEL_URL, 'mnist', TRAIN_LABEL_MD5), 100)
[ "def", "train", "(", ")", ":", "return", "reader_creator", "(", "paddle", ".", "dataset", ".", "common", ".", "download", "(", "TRAIN_IMAGE_URL", ",", "'mnist'", ",", "TRAIN_IMAGE_MD5", ")", ",", "paddle", ".", "dataset", ".", "common", ".", "download", "(", "TRAIN_LABEL_URL", ",", "'mnist'", ",", "TRAIN_LABEL_MD5", ")", ",", "100", ")" ]
https://github.com/PaddlePaddle/Paddle/blob/1252f4bb3e574df80aa6d18c7ddae1b3a90bd81c/python/paddle/dataset/mnist.py#L98-L112
twtygqyy/caffe-augmentation
c76600d247e5132fa5bd89d87bb5df458341fa84
scripts/cpp_lint.py
python
ParseArguments
(args)
return filenames
Parses the command line arguments. This may set the output format and verbosity level as side-effects. Args: args: The command line arguments: Returns: The list of filenames to lint.
Parses the command line arguments.
[ "Parses", "the", "command", "line", "arguments", "." ]
def ParseArguments(args): """Parses the command line arguments. This may set the output format and verbosity level as side-effects. Args: args: The command line arguments: Returns: The list of filenames to lint. """ try: (opts, filenames) = getopt.getopt(args, '', ['help', 'output=', 'verbose=', 'counting=', 'filter=', 'root=', 'linelength=', 'extensions=']) except getopt.GetoptError: PrintUsage('Invalid arguments.') verbosity = _VerboseLevel() output_format = _OutputFormat() filters = '' counting_style = '' for (opt, val) in opts: if opt == '--help': PrintUsage(None) elif opt == '--output': if val not in ('emacs', 'vs7', 'eclipse'): PrintUsage('The only allowed output formats are emacs, vs7 and eclipse.') output_format = val elif opt == '--verbose': verbosity = int(val) elif opt == '--filter': filters = val if not filters: PrintCategories() elif opt == '--counting': if val not in ('total', 'toplevel', 'detailed'): PrintUsage('Valid counting options are total, toplevel, and detailed') counting_style = val elif opt == '--root': global _root _root = val elif opt == '--linelength': global _line_length try: _line_length = int(val) except ValueError: PrintUsage('Line length must be digits.') elif opt == '--extensions': global _valid_extensions try: _valid_extensions = set(val.split(',')) except ValueError: PrintUsage('Extensions must be comma separated list.') if not filenames: PrintUsage('No files were specified.') _SetOutputFormat(output_format) _SetVerboseLevel(verbosity) _SetFilters(filters) _SetCountingStyle(counting_style) return filenames
[ "def", "ParseArguments", "(", "args", ")", ":", "try", ":", "(", "opts", ",", "filenames", ")", "=", "getopt", ".", "getopt", "(", "args", ",", "''", ",", "[", "'help'", ",", "'output='", ",", "'verbose='", ",", "'counting='", ",", "'filter='", ",", "'root='", ",", "'linelength='", ",", "'extensions='", "]", ")", "except", "getopt", ".", "GetoptError", ":", "PrintUsage", "(", "'Invalid arguments.'", ")", "verbosity", "=", "_VerboseLevel", "(", ")", "output_format", "=", "_OutputFormat", "(", ")", "filters", "=", "''", "counting_style", "=", "''", "for", "(", "opt", ",", "val", ")", "in", "opts", ":", "if", "opt", "==", "'--help'", ":", "PrintUsage", "(", "None", ")", "elif", "opt", "==", "'--output'", ":", "if", "val", "not", "in", "(", "'emacs'", ",", "'vs7'", ",", "'eclipse'", ")", ":", "PrintUsage", "(", "'The only allowed output formats are emacs, vs7 and eclipse.'", ")", "output_format", "=", "val", "elif", "opt", "==", "'--verbose'", ":", "verbosity", "=", "int", "(", "val", ")", "elif", "opt", "==", "'--filter'", ":", "filters", "=", "val", "if", "not", "filters", ":", "PrintCategories", "(", ")", "elif", "opt", "==", "'--counting'", ":", "if", "val", "not", "in", "(", "'total'", ",", "'toplevel'", ",", "'detailed'", ")", ":", "PrintUsage", "(", "'Valid counting options are total, toplevel, and detailed'", ")", "counting_style", "=", "val", "elif", "opt", "==", "'--root'", ":", "global", "_root", "_root", "=", "val", "elif", "opt", "==", "'--linelength'", ":", "global", "_line_length", "try", ":", "_line_length", "=", "int", "(", "val", ")", "except", "ValueError", ":", "PrintUsage", "(", "'Line length must be digits.'", ")", "elif", "opt", "==", "'--extensions'", ":", "global", "_valid_extensions", "try", ":", "_valid_extensions", "=", "set", "(", "val", ".", "split", "(", "','", ")", ")", "except", "ValueError", ":", "PrintUsage", "(", "'Extensions must be comma separated list.'", ")", "if", "not", "filenames", ":", "PrintUsage", "(", "'No files were specified.'", ")", "_SetOutputFormat", "(", "output_format", ")", "_SetVerboseLevel", "(", "verbosity", ")", "_SetFilters", "(", "filters", ")", "_SetCountingStyle", "(", "counting_style", ")", "return", "filenames" ]
https://github.com/twtygqyy/caffe-augmentation/blob/c76600d247e5132fa5bd89d87bb5df458341fa84/scripts/cpp_lint.py#L4783-L4850
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/bdb.py
python
Bdb.stop_here
(self, frame)
return False
Return True if frame is below the starting frame in the stack.
Return True if frame is below the starting frame in the stack.
[ "Return", "True", "if", "frame", "is", "below", "the", "starting", "frame", "in", "the", "stack", "." ]
def stop_here(self, frame): "Return True if frame is below the starting frame in the stack." # (CT) stopframe may now also be None, see dispatch_call. # (CT) the former test for None is therefore removed from here. if self.skip and \ self.is_skipped_module(frame.f_globals.get('__name__')): return False if frame is self.stopframe: if self.stoplineno == -1: return False return frame.f_lineno >= self.stoplineno if not self.stopframe: return True return False
[ "def", "stop_here", "(", "self", ",", "frame", ")", ":", "# (CT) stopframe may now also be None, see dispatch_call.", "# (CT) the former test for None is therefore removed from here.", "if", "self", ".", "skip", "and", "self", ".", "is_skipped_module", "(", "frame", ".", "f_globals", ".", "get", "(", "'__name__'", ")", ")", ":", "return", "False", "if", "frame", "is", "self", ".", "stopframe", ":", "if", "self", ".", "stoplineno", "==", "-", "1", ":", "return", "False", "return", "frame", ".", "f_lineno", ">=", "self", ".", "stoplineno", "if", "not", "self", ".", "stopframe", ":", "return", "True", "return", "False" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/bdb.py#L198-L211
9miao/CrossApp
1f5375e061bf69841eb19728598f5ae3f508d620
tools/bindings-generator/backup/clang-llvm-3.3-pybinding/cindex.py
python
SourceLocation.line
(self)
return self._get_instantiation()[1]
Get the line represented by this source location.
Get the line represented by this source location.
[ "Get", "the", "line", "represented", "by", "this", "source", "location", "." ]
def line(self): """Get the line represented by this source location.""" return self._get_instantiation()[1]
[ "def", "line", "(", "self", ")", ":", "return", "self", ".", "_get_instantiation", "(", ")", "[", "1", "]" ]
https://github.com/9miao/CrossApp/blob/1f5375e061bf69841eb19728598f5ae3f508d620/tools/bindings-generator/backup/clang-llvm-3.3-pybinding/cindex.py#L203-L205
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/distributions/python/ops/onehot_categorical.py
python
OneHotCategorical.__init__
( self, logits=None, probs=None, dtype=dtypes.int32, validate_args=False, allow_nan_stats=True, name="OneHotCategorical")
Initialize OneHotCategorical distributions using class log-probabilities. Args: logits: An N-D `Tensor`, `N >= 1`, representing the log probabilities of a set of Categorical distributions. The first `N - 1` dimensions index into a batch of independent distributions and the last dimension represents a vector of logits for each class. Only one of `logits` or `probs` should be passed in. probs: An N-D `Tensor`, `N >= 1`, representing the probabilities of a set of Categorical distributions. The first `N - 1` dimensions index into a batch of independent distributions and the last dimension represents a vector of probabilities for each class. Only one of `logits` or `probs` should be passed in. dtype: The type of the event samples (default: int32). validate_args: Python `bool`, default `False`. When `True` distribution parameters are checked for validity despite possibly degrading runtime performance. When `False` invalid inputs may silently render incorrect outputs. allow_nan_stats: Python `bool`, default `True`. When `True`, statistics (e.g., mean, mode, variance) use the value "`NaN`" to indicate the result is undefined. When `False`, an exception is raised if one or more of the statistic's batch members are undefined. name: Python `str` name prefixed to Ops created by this class.
Initialize OneHotCategorical distributions using class log-probabilities.
[ "Initialize", "OneHotCategorical", "distributions", "using", "class", "log", "-", "probabilities", "." ]
def __init__( self, logits=None, probs=None, dtype=dtypes.int32, validate_args=False, allow_nan_stats=True, name="OneHotCategorical"): """Initialize OneHotCategorical distributions using class log-probabilities. Args: logits: An N-D `Tensor`, `N >= 1`, representing the log probabilities of a set of Categorical distributions. The first `N - 1` dimensions index into a batch of independent distributions and the last dimension represents a vector of logits for each class. Only one of `logits` or `probs` should be passed in. probs: An N-D `Tensor`, `N >= 1`, representing the probabilities of a set of Categorical distributions. The first `N - 1` dimensions index into a batch of independent distributions and the last dimension represents a vector of probabilities for each class. Only one of `logits` or `probs` should be passed in. dtype: The type of the event samples (default: int32). validate_args: Python `bool`, default `False`. When `True` distribution parameters are checked for validity despite possibly degrading runtime performance. When `False` invalid inputs may silently render incorrect outputs. allow_nan_stats: Python `bool`, default `True`. When `True`, statistics (e.g., mean, mode, variance) use the value "`NaN`" to indicate the result is undefined. When `False`, an exception is raised if one or more of the statistic's batch members are undefined. name: Python `str` name prefixed to Ops created by this class. """ parameters = dict(locals()) with ops.name_scope(name, values=[logits, probs]) as name: self._logits, self._probs = distribution_util.get_logits_and_probs( name=name, logits=logits, probs=probs, validate_args=validate_args, multidimensional=True) logits_shape_static = self._logits.get_shape().with_rank_at_least(1) if logits_shape_static.ndims is not None: self._batch_rank = ops.convert_to_tensor( logits_shape_static.ndims - 1, dtype=dtypes.int32, name="batch_rank") else: with ops.name_scope(name="batch_rank"): self._batch_rank = array_ops.rank(self._logits) - 1 with ops.name_scope(name="event_size"): self._event_size = array_ops.shape(self._logits)[-1] super(OneHotCategorical, self).__init__( dtype=dtype, reparameterization_type=distribution.NOT_REPARAMETERIZED, validate_args=validate_args, allow_nan_stats=allow_nan_stats, parameters=parameters, graph_parents=[self._logits, self._probs], name=name)
[ "def", "__init__", "(", "self", ",", "logits", "=", "None", ",", "probs", "=", "None", ",", "dtype", "=", "dtypes", ".", "int32", ",", "validate_args", "=", "False", ",", "allow_nan_stats", "=", "True", ",", "name", "=", "\"OneHotCategorical\"", ")", ":", "parameters", "=", "dict", "(", "locals", "(", ")", ")", "with", "ops", ".", "name_scope", "(", "name", ",", "values", "=", "[", "logits", ",", "probs", "]", ")", "as", "name", ":", "self", ".", "_logits", ",", "self", ".", "_probs", "=", "distribution_util", ".", "get_logits_and_probs", "(", "name", "=", "name", ",", "logits", "=", "logits", ",", "probs", "=", "probs", ",", "validate_args", "=", "validate_args", ",", "multidimensional", "=", "True", ")", "logits_shape_static", "=", "self", ".", "_logits", ".", "get_shape", "(", ")", ".", "with_rank_at_least", "(", "1", ")", "if", "logits_shape_static", ".", "ndims", "is", "not", "None", ":", "self", ".", "_batch_rank", "=", "ops", ".", "convert_to_tensor", "(", "logits_shape_static", ".", "ndims", "-", "1", ",", "dtype", "=", "dtypes", ".", "int32", ",", "name", "=", "\"batch_rank\"", ")", "else", ":", "with", "ops", ".", "name_scope", "(", "name", "=", "\"batch_rank\"", ")", ":", "self", ".", "_batch_rank", "=", "array_ops", ".", "rank", "(", "self", ".", "_logits", ")", "-", "1", "with", "ops", ".", "name_scope", "(", "name", "=", "\"event_size\"", ")", ":", "self", ".", "_event_size", "=", "array_ops", ".", "shape", "(", "self", ".", "_logits", ")", "[", "-", "1", "]", "super", "(", "OneHotCategorical", ",", "self", ")", ".", "__init__", "(", "dtype", "=", "dtype", ",", "reparameterization_type", "=", "distribution", ".", "NOT_REPARAMETERIZED", ",", "validate_args", "=", "validate_args", ",", "allow_nan_stats", "=", "allow_nan_stats", ",", "parameters", "=", "parameters", ",", "graph_parents", "=", "[", "self", ".", "_logits", ",", "self", ".", "_probs", "]", ",", "name", "=", "name", ")" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/distributions/python/ops/onehot_categorical.py#L95-L154
BitMEX/api-connectors
37a3a5b806ad5d0e0fc975ab86d9ed43c3bcd812
auto-generated/python/swagger_client/api/user_api.py
python
UserApi.user_cancel_withdrawal
(self, token, **kwargs)
Cancel a withdrawal. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.user_cancel_withdrawal(token, async_req=True) >>> result = thread.get() :param async_req bool :param str token: (required) :return: Transaction If the method is called asynchronously, returns the request thread.
Cancel a withdrawal. # noqa: E501
[ "Cancel", "a", "withdrawal", ".", "#", "noqa", ":", "E501" ]
def user_cancel_withdrawal(self, token, **kwargs): # noqa: E501 """Cancel a withdrawal. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.user_cancel_withdrawal(token, async_req=True) >>> result = thread.get() :param async_req bool :param str token: (required) :return: Transaction If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.user_cancel_withdrawal_with_http_info(token, **kwargs) # noqa: E501 else: (data) = self.user_cancel_withdrawal_with_http_info(token, **kwargs) # noqa: E501 return data
[ "def", "user_cancel_withdrawal", "(", "self", ",", "token", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'async_req'", ")", ":", "return", "self", ".", "user_cancel_withdrawal_with_http_info", "(", "token", ",", "*", "*", "kwargs", ")", "# noqa: E501", "else", ":", "(", "data", ")", "=", "self", ".", "user_cancel_withdrawal_with_http_info", "(", "token", ",", "*", "*", "kwargs", ")", "# noqa: E501", "return", "data" ]
https://github.com/BitMEX/api-connectors/blob/37a3a5b806ad5d0e0fc975ab86d9ed43c3bcd812/auto-generated/python/swagger_client/api/user_api.py#L36-L55
lzhang10/maxent
3560c94b737d4272ed86de529e50d823200e6d8e
python/maxent/cmaxent.py
python
MaxentModel.save
(self, model, binary=False)
return _cmaxent.MaxentModel_save(self, model, binary)
r""" save(self, model, binary=False) Save current model to a file. Parameters: model The filename of the model to save. binary If true, the file is saved in binary format, which is usually smaller (if compiled with libz) and much faster to load.
r""" save(self, model, binary=False)
[ "r", "save", "(", "self", "model", "binary", "=", "False", ")" ]
def save(self, model, binary=False): r""" save(self, model, binary=False) Save current model to a file. Parameters: model The filename of the model to save. binary If true, the file is saved in binary format, which is usually smaller (if compiled with libz) and much faster to load. """ return _cmaxent.MaxentModel_save(self, model, binary)
[ "def", "save", "(", "self", ",", "model", ",", "binary", "=", "False", ")", ":", "return", "_cmaxent", ".", "MaxentModel_save", "(", "self", ",", "model", ",", "binary", ")" ]
https://github.com/lzhang10/maxent/blob/3560c94b737d4272ed86de529e50d823200e6d8e/python/maxent/cmaxent.py#L96-L107
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/cython/Cython/Plex/Regexps.py
python
chars_to_ranges
(s)
return result
Return a list of character codes consisting of pairs [code1a, code1b, code2a, code2b,...] which cover all the characters in |s|.
Return a list of character codes consisting of pairs [code1a, code1b, code2a, code2b,...] which cover all the characters in |s|.
[ "Return", "a", "list", "of", "character", "codes", "consisting", "of", "pairs", "[", "code1a", "code1b", "code2a", "code2b", "...", "]", "which", "cover", "all", "the", "characters", "in", "|s|", "." ]
def chars_to_ranges(s): """ Return a list of character codes consisting of pairs [code1a, code1b, code2a, code2b,...] which cover all the characters in |s|. """ char_list = list(s) char_list.sort() i = 0 n = len(char_list) result = [] while i < n: code1 = ord(char_list[i]) code2 = code1 + 1 i += 1 while i < n and code2 >= ord(char_list[i]): code2 += 1 i += 1 result.append(code1) result.append(code2) return result
[ "def", "chars_to_ranges", "(", "s", ")", ":", "char_list", "=", "list", "(", "s", ")", "char_list", ".", "sort", "(", ")", "i", "=", "0", "n", "=", "len", "(", "char_list", ")", "result", "=", "[", "]", "while", "i", "<", "n", ":", "code1", "=", "ord", "(", "char_list", "[", "i", "]", ")", "code2", "=", "code1", "+", "1", "i", "+=", "1", "while", "i", "<", "n", "and", "code2", ">=", "ord", "(", "char_list", "[", "i", "]", ")", ":", "code2", "+=", "1", "i", "+=", "1", "result", ".", "append", "(", "code1", ")", "result", ".", "append", "(", "code2", ")", "return", "result" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/cython/Cython/Plex/Regexps.py#L34-L54
Polidea/SiriusObfuscator
b0e590d8130e97856afe578869b83a209e2b19be
SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py
python
SBCommandInterpreter.HasAliases
(self)
return _lldb.SBCommandInterpreter_HasAliases(self)
HasAliases(self) -> bool
HasAliases(self) -> bool
[ "HasAliases", "(", "self", ")", "-", ">", "bool" ]
def HasAliases(self): """HasAliases(self) -> bool""" return _lldb.SBCommandInterpreter_HasAliases(self)
[ "def", "HasAliases", "(", "self", ")", ":", "return", "_lldb", ".", "SBCommandInterpreter_HasAliases", "(", "self", ")" ]
https://github.com/Polidea/SiriusObfuscator/blob/b0e590d8130e97856afe578869b83a209e2b19be/SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py#L2201-L2203
apple/turicreate
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
deps/src/libxml2-2.9.1/python/libxml2.py
python
xmlNode.xpathNextAncestor
(self, ctxt)
return __tmp
Traversal function for the "ancestor" direction the ancestor axis contains the ancestors of the context node; the ancestors of the context node consist of the parent of context node and the parent's parent and so on; the nodes are ordered in reverse document order; thus the parent is the first node on the axis, and the parent's parent is the second node on the axis
Traversal function for the "ancestor" direction the ancestor axis contains the ancestors of the context node; the ancestors of the context node consist of the parent of context node and the parent's parent and so on; the nodes are ordered in reverse document order; thus the parent is the first node on the axis, and the parent's parent is the second node on the axis
[ "Traversal", "function", "for", "the", "ancestor", "direction", "the", "ancestor", "axis", "contains", "the", "ancestors", "of", "the", "context", "node", ";", "the", "ancestors", "of", "the", "context", "node", "consist", "of", "the", "parent", "of", "context", "node", "and", "the", "parent", "s", "parent", "and", "so", "on", ";", "the", "nodes", "are", "ordered", "in", "reverse", "document", "order", ";", "thus", "the", "parent", "is", "the", "first", "node", "on", "the", "axis", "and", "the", "parent", "s", "parent", "is", "the", "second", "node", "on", "the", "axis" ]
def xpathNextAncestor(self, ctxt): """Traversal function for the "ancestor" direction the ancestor axis contains the ancestors of the context node; the ancestors of the context node consist of the parent of context node and the parent's parent and so on; the nodes are ordered in reverse document order; thus the parent is the first node on the axis, and the parent's parent is the second node on the axis """ if ctxt is None: ctxt__o = None else: ctxt__o = ctxt._o ret = libxml2mod.xmlXPathNextAncestor(ctxt__o, self._o) if ret is None:raise xpathError('xmlXPathNextAncestor() failed') __tmp = xmlNode(_obj=ret) return __tmp
[ "def", "xpathNextAncestor", "(", "self", ",", "ctxt", ")", ":", "if", "ctxt", "is", "None", ":", "ctxt__o", "=", "None", "else", ":", "ctxt__o", "=", "ctxt", ".", "_o", "ret", "=", "libxml2mod", ".", "xmlXPathNextAncestor", "(", "ctxt__o", ",", "self", ".", "_o", ")", "if", "ret", "is", "None", ":", "raise", "xpathError", "(", "'xmlXPathNextAncestor() failed'", ")", "__tmp", "=", "xmlNode", "(", "_obj", "=", "ret", ")", "return", "__tmp" ]
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2.py#L3749-L3762
InsightSoftwareConsortium/ITK
87acfce9a93d928311c38bc371b666b515b9f19d
Modules/ThirdParty/pygccxml/src/pygccxml/parser/directory_cache.py
python
filename_repository_t.__init__
(self, sha1_sigs)
Constructor.
Constructor.
[ "Constructor", "." ]
def __init__(self, sha1_sigs): """Constructor. """ # Flag that determines whether the signature is a sha1 digest or # the modification time # (this flag is passed to the filename_repository_t class) self._sha1_sigs = sha1_sigs # ID lookup table (key: filename / value: id_) self.__id_lut = {} # Entry dictionary (key: id_ / value: filename_entry_t) # This dictionary contains the actual data. # It must always hold that each entry in __entries has a corresponding # entry in __id_lut (i.e. the keys in __id_lut must be the names # stored in __entries) self.__entries = {} # A counter for new ids self.__next_id = 1
[ "def", "__init__", "(", "self", ",", "sha1_sigs", ")", ":", "# Flag that determines whether the signature is a sha1 digest or", "# the modification time", "# (this flag is passed to the filename_repository_t class)", "self", ".", "_sha1_sigs", "=", "sha1_sigs", "# ID lookup table (key: filename / value: id_)", "self", ".", "__id_lut", "=", "{", "}", "# Entry dictionary (key: id_ / value: filename_entry_t)", "# This dictionary contains the actual data.", "# It must always hold that each entry in __entries has a corresponding", "# entry in __id_lut (i.e. the keys in __id_lut must be the names", "# stored in __entries)", "self", ".", "__entries", "=", "{", "}", "# A counter for new ids", "self", ".", "__next_id", "=", "1" ]
https://github.com/InsightSoftwareConsortium/ITK/blob/87acfce9a93d928311c38bc371b666b515b9f19d/Modules/ThirdParty/pygccxml/src/pygccxml/parser/directory_cache.py#L444-L464
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/tkinter/__init__.py
python
Misc.deletecommand
(self, name)
Internal function. Delete the Tcl command provided in NAME.
Internal function.
[ "Internal", "function", "." ]
def deletecommand(self, name): """Internal function. Delete the Tcl command provided in NAME.""" #print '- Tkinter: deleted command', name self.tk.deletecommand(name) try: self._tclCommands.remove(name) except ValueError: pass
[ "def", "deletecommand", "(", "self", ",", "name", ")", ":", "#print '- Tkinter: deleted command', name", "self", ".", "tk", ".", "deletecommand", "(", "name", ")", "try", ":", "self", ".", "_tclCommands", ".", "remove", "(", "name", ")", "except", "ValueError", ":", "pass" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/tkinter/__init__.py#L594-L603
syoyo/tinygltf
e7f1ff5c59d3ca2489923beb239bdf93d863498f
deps/cpplint.py
python
FindStartOfExpressionInLine
(line, endpos, stack)
return (-1, stack)
Find position at the matching start of current expression. This is almost the reverse of FindEndOfExpressionInLine, but note that the input position and returned position differs by 1. Args: line: a CleansedLines line. endpos: start searching at this position. stack: nesting stack at endpos. Returns: On finding matching start: (index at matching start, None) On finding an unclosed expression: (-1, None) Otherwise: (-1, new stack at beginning of this line)
Find position at the matching start of current expression.
[ "Find", "position", "at", "the", "matching", "start", "of", "current", "expression", "." ]
def FindStartOfExpressionInLine(line, endpos, stack): """Find position at the matching start of current expression. This is almost the reverse of FindEndOfExpressionInLine, but note that the input position and returned position differs by 1. Args: line: a CleansedLines line. endpos: start searching at this position. stack: nesting stack at endpos. Returns: On finding matching start: (index at matching start, None) On finding an unclosed expression: (-1, None) Otherwise: (-1, new stack at beginning of this line) """ i = endpos while i >= 0: char = line[i] if char in ')]}': # Found end of expression, push to expression stack stack.append(char) elif char == '>': # Found potential end of template argument list. # # Ignore it if it's a "->" or ">=" or "operator>" if (i > 0 and (line[i - 1] == '-' or Match(r'\s>=\s', line[i - 1:]) or Search(r'\boperator\s*$', line[0:i]))): i -= 1 else: stack.append('>') elif char == '<': # Found potential start of template argument list if i > 0 and line[i - 1] == '<': # Left shift operator i -= 1 else: # If there is a matching '>', we can pop the expression stack. # Otherwise, ignore this '<' since it must be an operator. if stack and stack[-1] == '>': stack.pop() if not stack: return (i, None) elif char in '([{': # Found start of expression. # # If there are any unmatched '>' on the stack, they must be # operators. Remove those. while stack and stack[-1] == '>': stack.pop() if not stack: return (-1, None) if ((char == '(' and stack[-1] == ')') or (char == '[' and stack[-1] == ']') or (char == '{' and stack[-1] == '}')): stack.pop() if not stack: return (i, None) else: # Mismatched parentheses return (-1, None) elif char == ';': # Found something that look like end of statements. If we are currently # expecting a '<', the matching '>' must have been an operator, since # template argument list should not contain statements. while stack and stack[-1] == '>': stack.pop() if not stack: return (-1, None) i -= 1 return (-1, stack)
[ "def", "FindStartOfExpressionInLine", "(", "line", ",", "endpos", ",", "stack", ")", ":", "i", "=", "endpos", "while", "i", ">=", "0", ":", "char", "=", "line", "[", "i", "]", "if", "char", "in", "')]}'", ":", "# Found end of expression, push to expression stack", "stack", ".", "append", "(", "char", ")", "elif", "char", "==", "'>'", ":", "# Found potential end of template argument list.", "#", "# Ignore it if it's a \"->\" or \">=\" or \"operator>\"", "if", "(", "i", ">", "0", "and", "(", "line", "[", "i", "-", "1", "]", "==", "'-'", "or", "Match", "(", "r'\\s>=\\s'", ",", "line", "[", "i", "-", "1", ":", "]", ")", "or", "Search", "(", "r'\\boperator\\s*$'", ",", "line", "[", "0", ":", "i", "]", ")", ")", ")", ":", "i", "-=", "1", "else", ":", "stack", ".", "append", "(", "'>'", ")", "elif", "char", "==", "'<'", ":", "# Found potential start of template argument list", "if", "i", ">", "0", "and", "line", "[", "i", "-", "1", "]", "==", "'<'", ":", "# Left shift operator", "i", "-=", "1", "else", ":", "# If there is a matching '>', we can pop the expression stack.", "# Otherwise, ignore this '<' since it must be an operator.", "if", "stack", "and", "stack", "[", "-", "1", "]", "==", "'>'", ":", "stack", ".", "pop", "(", ")", "if", "not", "stack", ":", "return", "(", "i", ",", "None", ")", "elif", "char", "in", "'([{'", ":", "# Found start of expression.", "#", "# If there are any unmatched '>' on the stack, they must be", "# operators. Remove those.", "while", "stack", "and", "stack", "[", "-", "1", "]", "==", "'>'", ":", "stack", ".", "pop", "(", ")", "if", "not", "stack", ":", "return", "(", "-", "1", ",", "None", ")", "if", "(", "(", "char", "==", "'('", "and", "stack", "[", "-", "1", "]", "==", "')'", ")", "or", "(", "char", "==", "'['", "and", "stack", "[", "-", "1", "]", "==", "']'", ")", "or", "(", "char", "==", "'{'", "and", "stack", "[", "-", "1", "]", "==", "'}'", ")", ")", ":", "stack", ".", "pop", "(", ")", "if", "not", "stack", ":", "return", "(", "i", ",", "None", ")", "else", ":", "# Mismatched parentheses", "return", "(", "-", "1", ",", "None", ")", "elif", "char", "==", "';'", ":", "# Found something that look like end of statements. If we are currently", "# expecting a '<', the matching '>' must have been an operator, since", "# template argument list should not contain statements.", "while", "stack", "and", "stack", "[", "-", "1", "]", "==", "'>'", ":", "stack", ".", "pop", "(", ")", "if", "not", "stack", ":", "return", "(", "-", "1", ",", "None", ")", "i", "-=", "1", "return", "(", "-", "1", ",", "stack", ")" ]
https://github.com/syoyo/tinygltf/blob/e7f1ff5c59d3ca2489923beb239bdf93d863498f/deps/cpplint.py#L1507-L1581
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/stc.py
python
StyledTextCtrl.AutoCompSelect
(*args, **kwargs)
return _stc.StyledTextCtrl_AutoCompSelect(*args, **kwargs)
AutoCompSelect(self, String text) Select the item in the auto-completion list that starts with a string.
AutoCompSelect(self, String text)
[ "AutoCompSelect", "(", "self", "String", "text", ")" ]
def AutoCompSelect(*args, **kwargs): """ AutoCompSelect(self, String text) Select the item in the auto-completion list that starts with a string. """ return _stc.StyledTextCtrl_AutoCompSelect(*args, **kwargs)
[ "def", "AutoCompSelect", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_stc", ".", "StyledTextCtrl_AutoCompSelect", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/stc.py#L3095-L3101
krishauser/Klampt
972cc83ea5befac3f653c1ba20f80155768ad519
Python/klampt/src/robotsim.py
python
Geometry3D.setPointCloud
(self, arg2: "PointCloud")
return _robotsim.Geometry3D_setPointCloud(self, arg2)
r""" setPointCloud(Geometry3D self, PointCloud arg2) Sets this Geometry3D to a PointCloud.
r""" setPointCloud(Geometry3D self, PointCloud arg2)
[ "r", "setPointCloud", "(", "Geometry3D", "self", "PointCloud", "arg2", ")" ]
def setPointCloud(self, arg2: "PointCloud") -> "void": r""" setPointCloud(Geometry3D self, PointCloud arg2) Sets this Geometry3D to a PointCloud. """ return _robotsim.Geometry3D_setPointCloud(self, arg2)
[ "def", "setPointCloud", "(", "self", ",", "arg2", ":", "\"PointCloud\"", ")", "->", "\"void\"", ":", "return", "_robotsim", ".", "Geometry3D_setPointCloud", "(", "self", ",", "arg2", ")" ]
https://github.com/krishauser/Klampt/blob/972cc83ea5befac3f653c1ba20f80155768ad519/Python/klampt/src/robotsim.py#L2186-L2194
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/selectors.py
python
_BaseSelectorImpl._key_from_fd
(self, fd)
Return the key associated to a given file descriptor. Parameters: fd -- file descriptor Returns: corresponding key, or None if not found
Return the key associated to a given file descriptor.
[ "Return", "the", "key", "associated", "to", "a", "given", "file", "descriptor", "." ]
def _key_from_fd(self, fd): """Return the key associated to a given file descriptor. Parameters: fd -- file descriptor Returns: corresponding key, or None if not found """ try: return self._fd_to_key[fd] except KeyError: return None
[ "def", "_key_from_fd", "(", "self", ",", "fd", ")", ":", "try", ":", "return", "self", ".", "_fd_to_key", "[", "fd", "]", "except", "KeyError", ":", "return", "None" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/selectors.py#L275-L287
FreeCAD/FreeCAD
ba42231b9c6889b89e064d6d563448ed81e376ec
src/Mod/Arch/exportIFC.py
python
getProfile
(ifcfile,p)
return profile
returns an IFC profile definition from a shape
returns an IFC profile definition from a shape
[ "returns", "an", "IFC", "profile", "definition", "from", "a", "shape" ]
def getProfile(ifcfile,p): """returns an IFC profile definition from a shape""" import Part import DraftGeomUtils profile = None if len(p.Edges) == 1: pxvc = ifcbin.createIfcDirection((1.0,0.0)) povc = ifcbin.createIfcCartesianPoint((0.0,0.0)) pt = ifcbin.createIfcAxis2Placement2D(povc,pxvc) if isinstance(p.Edges[0].Curve,Part.Circle): # extruded circle profile = ifcbin.createIfcCircleProfileDef("AREA",None,pt,p.Edges[0].Curve.Radius) elif isinstance(p.Edges[0].Curve,Part.Ellipse): # extruded ellipse profile = ifcbin.createIfcEllipseProfileDef("AREA",None,pt,p.Edges[0].Curve.MajorRadius,p.Edges[0].Curve.MinorRadius) elif (checkRectangle(p.Edges)): # arbitrarily use the first edge as the rectangle orientation d = vec(p.Edges[0]) d.normalize() pxvc = ifcbin.createIfcDirection(tuple(d)[:2]) povc = ifcbin.createIfcCartesianPoint((0.0,0.0)) # profile must be located at (0,0) because placement gets added later #povc = ifcbin.createIfcCartesianPoint(tuple(p.CenterOfMass[:2])) pt = ifcbin.createIfcAxis2Placement2D(povc,pxvc) #semiPerimeter = p.Length/2 #diff = math.sqrt(semiPerimeter**2 - 4*p.Area) #b = max(abs((semiPerimeter + diff)/2),abs((semiPerimeter - diff)/2)) #h = min(abs((semiPerimeter + diff)/2),abs((semiPerimeter - diff)/2)) b = p.Edges[0].Length h = p.Edges[1].Length profile = ifcbin.createIfcRectangleProfileDef("AREA",'rectangular',pt,b,h) elif (len(p.Faces) == 1) and (len(p.Wires) > 1): # face with holes f = p.Faces[0] if DraftGeomUtils.hasCurves(f.OuterWire): outerwire = createCurve(ifcfile,f.OuterWire) else: w = Part.Wire(Part.__sortEdges__(f.OuterWire.Edges)) pts = [ifcbin.createIfcCartesianPoint(tuple(v.Point)[:2]) for v in w.Vertexes+[w.Vertexes[0]]] outerwire = ifcbin.createIfcPolyline(pts) innerwires = [] for w in f.Wires: if w.hashCode() != f.OuterWire.hashCode(): if DraftGeomUtils.hasCurves(w): innerwires.append(createCurve(ifcfile,w)) else: w = Part.Wire(Part.__sortEdges__(w.Edges)) pts = [ifcbin.createIfcCartesianPoint(tuple(v.Point)[:2]) for v in w.Vertexes+[w.Vertexes[0]]] innerwires.append(ifcbin.createIfcPolyline(pts)) profile = ifcfile.createIfcArbitraryProfileDefWithVoids("AREA",None,outerwire,innerwires) else: if DraftGeomUtils.hasCurves(p): # extruded composite curve pol = createCurve(ifcfile,p) else: # extruded polyline w = Part.Wire(Part.__sortEdges__(p.Wires[0].Edges)) pts = [ifcbin.createIfcCartesianPoint(tuple(v.Point)[:2]) for v in w.Vertexes+[w.Vertexes[0]]] pol = ifcbin.createIfcPolyline(pts) profile = ifcfile.createIfcArbitraryClosedProfileDef("AREA",None,pol) return profile
[ "def", "getProfile", "(", "ifcfile", ",", "p", ")", ":", "import", "Part", "import", "DraftGeomUtils", "profile", "=", "None", "if", "len", "(", "p", ".", "Edges", ")", "==", "1", ":", "pxvc", "=", "ifcbin", ".", "createIfcDirection", "(", "(", "1.0", ",", "0.0", ")", ")", "povc", "=", "ifcbin", ".", "createIfcCartesianPoint", "(", "(", "0.0", ",", "0.0", ")", ")", "pt", "=", "ifcbin", ".", "createIfcAxis2Placement2D", "(", "povc", ",", "pxvc", ")", "if", "isinstance", "(", "p", ".", "Edges", "[", "0", "]", ".", "Curve", ",", "Part", ".", "Circle", ")", ":", "# extruded circle", "profile", "=", "ifcbin", ".", "createIfcCircleProfileDef", "(", "\"AREA\"", ",", "None", ",", "pt", ",", "p", ".", "Edges", "[", "0", "]", ".", "Curve", ".", "Radius", ")", "elif", "isinstance", "(", "p", ".", "Edges", "[", "0", "]", ".", "Curve", ",", "Part", ".", "Ellipse", ")", ":", "# extruded ellipse", "profile", "=", "ifcbin", ".", "createIfcEllipseProfileDef", "(", "\"AREA\"", ",", "None", ",", "pt", ",", "p", ".", "Edges", "[", "0", "]", ".", "Curve", ".", "MajorRadius", ",", "p", ".", "Edges", "[", "0", "]", ".", "Curve", ".", "MinorRadius", ")", "elif", "(", "checkRectangle", "(", "p", ".", "Edges", ")", ")", ":", "# arbitrarily use the first edge as the rectangle orientation", "d", "=", "vec", "(", "p", ".", "Edges", "[", "0", "]", ")", "d", ".", "normalize", "(", ")", "pxvc", "=", "ifcbin", ".", "createIfcDirection", "(", "tuple", "(", "d", ")", "[", ":", "2", "]", ")", "povc", "=", "ifcbin", ".", "createIfcCartesianPoint", "(", "(", "0.0", ",", "0.0", ")", ")", "# profile must be located at (0,0) because placement gets added later", "#povc = ifcbin.createIfcCartesianPoint(tuple(p.CenterOfMass[:2]))", "pt", "=", "ifcbin", ".", "createIfcAxis2Placement2D", "(", "povc", ",", "pxvc", ")", "#semiPerimeter = p.Length/2", "#diff = math.sqrt(semiPerimeter**2 - 4*p.Area)", "#b = max(abs((semiPerimeter + diff)/2),abs((semiPerimeter - diff)/2))", "#h = min(abs((semiPerimeter + diff)/2),abs((semiPerimeter - diff)/2))", "b", "=", "p", ".", "Edges", "[", "0", "]", ".", "Length", "h", "=", "p", ".", "Edges", "[", "1", "]", ".", "Length", "profile", "=", "ifcbin", ".", "createIfcRectangleProfileDef", "(", "\"AREA\"", ",", "'rectangular'", ",", "pt", ",", "b", ",", "h", ")", "elif", "(", "len", "(", "p", ".", "Faces", ")", "==", "1", ")", "and", "(", "len", "(", "p", ".", "Wires", ")", ">", "1", ")", ":", "# face with holes", "f", "=", "p", ".", "Faces", "[", "0", "]", "if", "DraftGeomUtils", ".", "hasCurves", "(", "f", ".", "OuterWire", ")", ":", "outerwire", "=", "createCurve", "(", "ifcfile", ",", "f", ".", "OuterWire", ")", "else", ":", "w", "=", "Part", ".", "Wire", "(", "Part", ".", "__sortEdges__", "(", "f", ".", "OuterWire", ".", "Edges", ")", ")", "pts", "=", "[", "ifcbin", ".", "createIfcCartesianPoint", "(", "tuple", "(", "v", ".", "Point", ")", "[", ":", "2", "]", ")", "for", "v", "in", "w", ".", "Vertexes", "+", "[", "w", ".", "Vertexes", "[", "0", "]", "]", "]", "outerwire", "=", "ifcbin", ".", "createIfcPolyline", "(", "pts", ")", "innerwires", "=", "[", "]", "for", "w", "in", "f", ".", "Wires", ":", "if", "w", ".", "hashCode", "(", ")", "!=", "f", ".", "OuterWire", ".", "hashCode", "(", ")", ":", "if", "DraftGeomUtils", ".", "hasCurves", "(", "w", ")", ":", "innerwires", ".", "append", "(", "createCurve", "(", "ifcfile", ",", "w", ")", ")", "else", ":", "w", "=", "Part", ".", "Wire", "(", "Part", ".", "__sortEdges__", "(", "w", ".", "Edges", ")", ")", "pts", "=", "[", "ifcbin", ".", "createIfcCartesianPoint", "(", "tuple", "(", "v", ".", "Point", ")", "[", ":", "2", "]", ")", "for", "v", "in", "w", ".", "Vertexes", "+", "[", "w", ".", "Vertexes", "[", "0", "]", "]", "]", "innerwires", ".", "append", "(", "ifcbin", ".", "createIfcPolyline", "(", "pts", ")", ")", "profile", "=", "ifcfile", ".", "createIfcArbitraryProfileDefWithVoids", "(", "\"AREA\"", ",", "None", ",", "outerwire", ",", "innerwires", ")", "else", ":", "if", "DraftGeomUtils", ".", "hasCurves", "(", "p", ")", ":", "# extruded composite curve", "pol", "=", "createCurve", "(", "ifcfile", ",", "p", ")", "else", ":", "# extruded polyline", "w", "=", "Part", ".", "Wire", "(", "Part", ".", "__sortEdges__", "(", "p", ".", "Wires", "[", "0", "]", ".", "Edges", ")", ")", "pts", "=", "[", "ifcbin", ".", "createIfcCartesianPoint", "(", "tuple", "(", "v", ".", "Point", ")", "[", ":", "2", "]", ")", "for", "v", "in", "w", ".", "Vertexes", "+", "[", "w", ".", "Vertexes", "[", "0", "]", "]", "]", "pol", "=", "ifcbin", ".", "createIfcPolyline", "(", "pts", ")", "profile", "=", "ifcfile", ".", "createIfcArbitraryClosedProfileDef", "(", "\"AREA\"", ",", "None", ",", "pol", ")", "return", "profile" ]
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Arch/exportIFC.py#L1853-L1914
su2code/SU2
72b2fa977b64b9683a388920f05298a40d39e5c5
SU2_PY/SU2/io/config.py
python
Config.local_files
(self)
removes path prefix from all *_FILENAME params
removes path prefix from all *_FILENAME params
[ "removes", "path", "prefix", "from", "all", "*", "_FILENAME", "params" ]
def local_files(self): """ removes path prefix from all *_FILENAME params """ for key, value in self.items(): if key.split('_')[-1] == 'FILENAME': self[key] = os.path.basename(value)
[ "def", "local_files", "(", "self", ")", ":", "for", "key", ",", "value", "in", "self", ".", "items", "(", ")", ":", "if", "key", ".", "split", "(", "'_'", ")", "[", "-", "1", "]", "==", "'FILENAME'", ":", "self", "[", "key", "]", "=", "os", ".", "path", ".", "basename", "(", "value", ")" ]
https://github.com/su2code/SU2/blob/72b2fa977b64b9683a388920f05298a40d39e5c5/SU2_PY/SU2/io/config.py#L225-L230
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/tornado/tornado-6/tornado/httputil.py
python
HTTPConnection.write
(self, chunk: bytes)
Writes a chunk of body data. Returns a future for flow control. .. versionchanged:: 6.0 The ``callback`` argument was removed.
Writes a chunk of body data.
[ "Writes", "a", "chunk", "of", "body", "data", "." ]
def write(self, chunk: bytes) -> "Future[None]": """Writes a chunk of body data. Returns a future for flow control. .. versionchanged:: 6.0 The ``callback`` argument was removed. """ raise NotImplementedError()
[ "def", "write", "(", "self", ",", "chunk", ":", "bytes", ")", "->", "\"Future[None]\"", ":", "raise", "NotImplementedError", "(", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/tornado/tornado-6/tornado/httputil.py#L593-L602
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/x86/toolchain/lib/python2.7/sched.py
python
scheduler.empty
(self)
return not self._queue
Check whether the queue is empty.
Check whether the queue is empty.
[ "Check", "whether", "the", "queue", "is", "empty", "." ]
def empty(self): """Check whether the queue is empty.""" return not self._queue
[ "def", "empty", "(", "self", ")", ":", "return", "not", "self", ".", "_queue" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/sched.py#L76-L78
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/tarfile.py
python
TarInfo.fromtarfile
(cls, tarfile)
return obj._proc_member(tarfile)
Return the next TarInfo object from TarFile object tarfile.
Return the next TarInfo object from TarFile object
[ "Return", "the", "next", "TarInfo", "object", "from", "TarFile", "object" ]
def fromtarfile(cls, tarfile): """Return the next TarInfo object from TarFile object tarfile. """ buf = tarfile.fileobj.read(BLOCKSIZE) obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors) obj.offset = tarfile.fileobj.tell() - BLOCKSIZE return obj._proc_member(tarfile)
[ "def", "fromtarfile", "(", "cls", ",", "tarfile", ")", ":", "buf", "=", "tarfile", ".", "fileobj", ".", "read", "(", "BLOCKSIZE", ")", "obj", "=", "cls", ".", "frombuf", "(", "buf", ",", "tarfile", ".", "encoding", ",", "tarfile", ".", "errors", ")", "obj", ".", "offset", "=", "tarfile", ".", "fileobj", ".", "tell", "(", ")", "-", "BLOCKSIZE", "return", "obj", ".", "_proc_member", "(", "tarfile", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/distlib/_backport/tarfile.py#L2565-L2579
benoitsteiner/tensorflow-opencl
cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5
tensorflow/contrib/learn/python/learn/models.py
python
logistic_regression
(x, y, class_weight=None, init_mean=None, init_stddev=1.0)
Creates logistic regression TensorFlow subgraph. Args: x: tensor or placeholder for input features, shape should be [batch_size, n_features]. y: tensor or placeholder for labels (one-hot), shape should be [batch_size, n_classes]. class_weight: tensor, [n_classes], where for each class it has weight of the class. If not provided will check if graph contains tensor `class_weight:0`. If that is not provided either all ones are used. init_mean: the mean value to use for initialization. init_stddev: the standard deviation to use for initialization. Returns: Predictions and loss tensors. Side effects: The variables linear_regression.weights and linear_regression.bias are initialized as follows. If init_mean is not None, then initialization will be done using a random normal initializer with the given init_mean and init_stddv. (These may be set to 0.0 each if a zero initialization is desirable for convex use cases.) If init_mean is None, then the uniform_unit_scaling_initialzer will be used.
Creates logistic regression TensorFlow subgraph.
[ "Creates", "logistic", "regression", "TensorFlow", "subgraph", "." ]
def logistic_regression(x, y, class_weight=None, init_mean=None, init_stddev=1.0): """Creates logistic regression TensorFlow subgraph. Args: x: tensor or placeholder for input features, shape should be [batch_size, n_features]. y: tensor or placeholder for labels (one-hot), shape should be [batch_size, n_classes]. class_weight: tensor, [n_classes], where for each class it has weight of the class. If not provided will check if graph contains tensor `class_weight:0`. If that is not provided either all ones are used. init_mean: the mean value to use for initialization. init_stddev: the standard deviation to use for initialization. Returns: Predictions and loss tensors. Side effects: The variables linear_regression.weights and linear_regression.bias are initialized as follows. If init_mean is not None, then initialization will be done using a random normal initializer with the given init_mean and init_stddv. (These may be set to 0.0 each if a zero initialization is desirable for convex use cases.) If init_mean is None, then the uniform_unit_scaling_initialzer will be used. """ with vs.variable_scope('logistic_regression'): scope_name = vs.get_variable_scope().name summary.histogram('%s.x' % scope_name, x) summary.histogram('%s.y' % scope_name, y) dtype = x.dtype.base_dtype # Set up the requested initialization. if init_mean is None: weights = vs.get_variable( 'weights', [x.get_shape()[1], y.get_shape()[-1]], dtype=dtype) bias = vs.get_variable('bias', [y.get_shape()[-1]], dtype=dtype) else: weights = vs.get_variable( 'weights', [x.get_shape()[1], y.get_shape()[-1]], initializer=init_ops.random_normal_initializer( init_mean, init_stddev, dtype=dtype), dtype=dtype) bias = vs.get_variable( 'bias', [y.get_shape()[-1]], initializer=init_ops.random_normal_initializer( init_mean, init_stddev, dtype=dtype), dtype=dtype) summary.histogram('%s.weights' % scope_name, weights) summary.histogram('%s.bias' % scope_name, bias) # If no class weight provided, try to retrieve one from pre-defined # tensor name in the graph. if not class_weight: try: class_weight = ops.get_default_graph().get_tensor_by_name( 'class_weight:0') except KeyError: pass return losses_ops.softmax_classifier( x, y, weights, bias, class_weight=class_weight)
[ "def", "logistic_regression", "(", "x", ",", "y", ",", "class_weight", "=", "None", ",", "init_mean", "=", "None", ",", "init_stddev", "=", "1.0", ")", ":", "with", "vs", ".", "variable_scope", "(", "'logistic_regression'", ")", ":", "scope_name", "=", "vs", ".", "get_variable_scope", "(", ")", ".", "name", "summary", ".", "histogram", "(", "'%s.x'", "%", "scope_name", ",", "x", ")", "summary", ".", "histogram", "(", "'%s.y'", "%", "scope_name", ",", "y", ")", "dtype", "=", "x", ".", "dtype", ".", "base_dtype", "# Set up the requested initialization.", "if", "init_mean", "is", "None", ":", "weights", "=", "vs", ".", "get_variable", "(", "'weights'", ",", "[", "x", ".", "get_shape", "(", ")", "[", "1", "]", ",", "y", ".", "get_shape", "(", ")", "[", "-", "1", "]", "]", ",", "dtype", "=", "dtype", ")", "bias", "=", "vs", ".", "get_variable", "(", "'bias'", ",", "[", "y", ".", "get_shape", "(", ")", "[", "-", "1", "]", "]", ",", "dtype", "=", "dtype", ")", "else", ":", "weights", "=", "vs", ".", "get_variable", "(", "'weights'", ",", "[", "x", ".", "get_shape", "(", ")", "[", "1", "]", ",", "y", ".", "get_shape", "(", ")", "[", "-", "1", "]", "]", ",", "initializer", "=", "init_ops", ".", "random_normal_initializer", "(", "init_mean", ",", "init_stddev", ",", "dtype", "=", "dtype", ")", ",", "dtype", "=", "dtype", ")", "bias", "=", "vs", ".", "get_variable", "(", "'bias'", ",", "[", "y", ".", "get_shape", "(", ")", "[", "-", "1", "]", "]", ",", "initializer", "=", "init_ops", ".", "random_normal_initializer", "(", "init_mean", ",", "init_stddev", ",", "dtype", "=", "dtype", ")", ",", "dtype", "=", "dtype", ")", "summary", ".", "histogram", "(", "'%s.weights'", "%", "scope_name", ",", "weights", ")", "summary", ".", "histogram", "(", "'%s.bias'", "%", "scope_name", ",", "bias", ")", "# If no class weight provided, try to retrieve one from pre-defined", "# tensor name in the graph.", "if", "not", "class_weight", ":", "try", ":", "class_weight", "=", "ops", ".", "get_default_graph", "(", ")", ".", "get_tensor_by_name", "(", "'class_weight:0'", ")", "except", "KeyError", ":", "pass", "return", "losses_ops", ".", "softmax_classifier", "(", "x", ",", "y", ",", "weights", ",", "bias", ",", "class_weight", "=", "class_weight", ")" ]
https://github.com/benoitsteiner/tensorflow-opencl/blob/cb7cb40a57fde5cfd4731bc551e82a1e2fef43a5/tensorflow/contrib/learn/python/learn/models.py#L110-L173
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/rlcompleter.py
python
Completer.__init__
(self, namespace = None)
Create a new completer for the command line. Completer([namespace]) -> completer instance. If unspecified, the default namespace where completions are performed is __main__ (technically, __main__.__dict__). Namespaces should be given as dictionaries. Completer instances should be used as the completion mechanism of readline via the set_completer() call: readline.set_completer(Completer(my_namespace).complete)
Create a new completer for the command line.
[ "Create", "a", "new", "completer", "for", "the", "command", "line", "." ]
def __init__(self, namespace = None): """Create a new completer for the command line. Completer([namespace]) -> completer instance. If unspecified, the default namespace where completions are performed is __main__ (technically, __main__.__dict__). Namespaces should be given as dictionaries. Completer instances should be used as the completion mechanism of readline via the set_completer() call: readline.set_completer(Completer(my_namespace).complete) """ if namespace and not isinstance(namespace, dict): raise TypeError('namespace must be a dictionary') # Don't bind to namespace quite yet, but flag whether the user wants a # specific namespace or to use __main__.__dict__. This will allow us # to bind to __main__.__dict__ at completion time, not now. if namespace is None: self.use_main_ns = 1 else: self.use_main_ns = 0 self.namespace = namespace
[ "def", "__init__", "(", "self", ",", "namespace", "=", "None", ")", ":", "if", "namespace", "and", "not", "isinstance", "(", "namespace", ",", "dict", ")", ":", "raise", "TypeError", "(", "'namespace must be a dictionary'", ")", "# Don't bind to namespace quite yet, but flag whether the user wants a", "# specific namespace or to use __main__.__dict__. This will allow us", "# to bind to __main__.__dict__ at completion time, not now.", "if", "namespace", "is", "None", ":", "self", ".", "use_main_ns", "=", "1", "else", ":", "self", ".", "use_main_ns", "=", "0", "self", ".", "namespace", "=", "namespace" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/rlcompleter.py#L39-L64
perilouswithadollarsign/cstrike15_src
f82112a2388b841d72cb62ca48ab1846dfcc11c8
thirdparty/protobuf-2.5.0/python/mox.py
python
ContainsKeyValue.equals
(self, rhs)
Check whether the given key/value pair is in the rhs dict. Returns: bool
Check whether the given key/value pair is in the rhs dict.
[ "Check", "whether", "the", "given", "key", "/", "value", "pair", "is", "in", "the", "rhs", "dict", "." ]
def equals(self, rhs): """Check whether the given key/value pair is in the rhs dict. Returns: bool """ try: return rhs[self._key] == self._value except Exception: return False
[ "def", "equals", "(", "self", ",", "rhs", ")", ":", "try", ":", "return", "rhs", "[", "self", ".", "_key", "]", "==", "self", ".", "_value", "except", "Exception", ":", "return", "False" ]
https://github.com/perilouswithadollarsign/cstrike15_src/blob/f82112a2388b841d72cb62ca48ab1846dfcc11c8/thirdparty/protobuf-2.5.0/python/mox.py#L989-L999
LisaAnne/lisa-caffe-public
49b8643ddef23a4f6120017968de30c45e693f59
scripts/cpp_lint.py
python
FileInfo.Split
(self)
return (project,) + os.path.splitext(rest)
Splits the file into the directory, basename, and extension. For 'chrome/browser/browser.cc', Split() would return ('chrome/browser', 'browser', '.cc') Returns: A tuple of (directory, basename, extension).
Splits the file into the directory, basename, and extension.
[ "Splits", "the", "file", "into", "the", "directory", "basename", "and", "extension", "." ]
def Split(self): """Splits the file into the directory, basename, and extension. For 'chrome/browser/browser.cc', Split() would return ('chrome/browser', 'browser', '.cc') Returns: A tuple of (directory, basename, extension). """ googlename = self.RepositoryName() project, rest = os.path.split(googlename) return (project,) + os.path.splitext(rest)
[ "def", "Split", "(", "self", ")", ":", "googlename", "=", "self", ".", "RepositoryName", "(", ")", "project", ",", "rest", "=", "os", ".", "path", ".", "split", "(", "googlename", ")", "return", "(", "project", ",", ")", "+", "os", ".", "path", ".", "splitext", "(", "rest", ")" ]
https://github.com/LisaAnne/lisa-caffe-public/blob/49b8643ddef23a4f6120017968de30c45e693f59/scripts/cpp_lint.py#L930-L942
openvinotoolkit/openvino
dedcbeafa8b84cccdc55ca64b8da516682b381c7
tools/mo/openvino/tools/mo/front/subgraph_matcher.py
python
SubgraphMatch.outputs_count
(self)
return len(self._output_nodes_map.keys())
Returns number of outputs for the matched sub-graph. Only unique output tensors are considered, thus if the same tensor is consumed by two or more nodes outside of the sub-graph it is counted only once. :return: Number or unique input tensors.
Returns number of outputs for the matched sub-graph. Only unique output tensors are considered, thus if the same tensor is consumed by two or more nodes outside of the sub-graph it is counted only once. :return: Number or unique input tensors.
[ "Returns", "number", "of", "outputs", "for", "the", "matched", "sub", "-", "graph", ".", "Only", "unique", "output", "tensors", "are", "considered", "thus", "if", "the", "same", "tensor", "is", "consumed", "by", "two", "or", "more", "nodes", "outside", "of", "the", "sub", "-", "graph", "it", "is", "counted", "only", "once", ".", ":", "return", ":", "Number", "or", "unique", "input", "tensors", "." ]
def outputs_count(self): """ Returns number of outputs for the matched sub-graph. Only unique output tensors are considered, thus if the same tensor is consumed by two or more nodes outside of the sub-graph it is counted only once. :return: Number or unique input tensors. """ return len(self._output_nodes_map.keys())
[ "def", "outputs_count", "(", "self", ")", ":", "return", "len", "(", "self", ".", "_output_nodes_map", ".", "keys", "(", ")", ")" ]
https://github.com/openvinotoolkit/openvino/blob/dedcbeafa8b84cccdc55ca64b8da516682b381c7/tools/mo/openvino/tools/mo/front/subgraph_matcher.py#L73-L79
mickem/nscp
79f89fdbb6da63f91bc9dedb7aea202fe938f237
scripts/python/lib/google/protobuf/internal/python_message.py
python
_IsPresent
(item)
Given a (FieldDescriptor, value) tuple from _fields, return true if the value should be included in the list returned by ListFields().
Given a (FieldDescriptor, value) tuple from _fields, return true if the value should be included in the list returned by ListFields().
[ "Given", "a", "(", "FieldDescriptor", "value", ")", "tuple", "from", "_fields", "return", "true", "if", "the", "value", "should", "be", "included", "in", "the", "list", "returned", "by", "ListFields", "()", "." ]
def _IsPresent(item): """Given a (FieldDescriptor, value) tuple from _fields, return true if the value should be included in the list returned by ListFields().""" if item[0].label == _FieldDescriptor.LABEL_REPEATED: return bool(item[1]) elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: return item[1]._is_present_in_parent else: return True
[ "def", "_IsPresent", "(", "item", ")", ":", "if", "item", "[", "0", "]", ".", "label", "==", "_FieldDescriptor", ".", "LABEL_REPEATED", ":", "return", "bool", "(", "item", "[", "1", "]", ")", "elif", "item", "[", "0", "]", ".", "cpp_type", "==", "_FieldDescriptor", ".", "CPPTYPE_MESSAGE", ":", "return", "item", "[", "1", "]", ".", "_is_present_in_parent", "else", ":", "return", "True" ]
https://github.com/mickem/nscp/blob/79f89fdbb6da63f91bc9dedb7aea202fe938f237/scripts/python/lib/google/protobuf/internal/python_message.py#L538-L547
ziquan111/RobustPCLReconstruction
35b9518dbf9ad3f06109cc0e3aaacafdb5c86e36
py/sophus/quaternion.py
python
Quaternion.__truediv__
(self, scalar)
return Quaternion(self.real / scalar, self.vec / scalar)
scalar division
scalar division
[ "scalar", "division" ]
def __truediv__(self, scalar): """ scalar division """ return Quaternion(self.real / scalar, self.vec / scalar)
[ "def", "__truediv__", "(", "self", ",", "scalar", ")", ":", "return", "Quaternion", "(", "self", ".", "real", "/", "scalar", ",", "self", ".", "vec", "/", "scalar", ")" ]
https://github.com/ziquan111/RobustPCLReconstruction/blob/35b9518dbf9ad3f06109cc0e3aaacafdb5c86e36/py/sophus/quaternion.py#L32-L34
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/python/ops/check_ops.py
python
assert_rank
(x, rank, data=None, summarize=None, message=None, name=None)
return assert_op
Assert `x` has rank equal to `rank`. Example of adding a dependency to an operation: ```python with tf.control_dependencies([tf.assert_rank(x, 2)]): output = tf.reduce_sum(x) ``` Args: x: Numeric `Tensor`. rank: Scalar integer `Tensor`. data: The tensors to print out if the condition is False. Defaults to error message and first few entries of `x`. summarize: Print this many entries of each tensor. message: A string to prefix to the default message. name: A name for this operation (optional). Defaults to "assert_rank". Returns: Op raising `InvalidArgumentError` unless `x` has specified rank. If static checks determine `x` has correct rank, a `no_op` is returned. Raises: ValueError: If static checks determine `x` has wrong rank.
Assert `x` has rank equal to `rank`.
[ "Assert", "x", "has", "rank", "equal", "to", "rank", "." ]
def assert_rank(x, rank, data=None, summarize=None, message=None, name=None): """Assert `x` has rank equal to `rank`. Example of adding a dependency to an operation: ```python with tf.control_dependencies([tf.assert_rank(x, 2)]): output = tf.reduce_sum(x) ``` Args: x: Numeric `Tensor`. rank: Scalar integer `Tensor`. data: The tensors to print out if the condition is False. Defaults to error message and first few entries of `x`. summarize: Print this many entries of each tensor. message: A string to prefix to the default message. name: A name for this operation (optional). Defaults to "assert_rank". Returns: Op raising `InvalidArgumentError` unless `x` has specified rank. If static checks determine `x` has correct rank, a `no_op` is returned. Raises: ValueError: If static checks determine `x` has wrong rank. """ with ops.name_scope(name, 'assert_rank', (x, rank) + tuple(data or [])): x = ops.convert_to_tensor(x, name='x') rank = ops.convert_to_tensor(rank, name='rank') message = message or '' static_condition = lambda actual_rank, given_rank: actual_rank == given_rank dynamic_condition = math_ops.equal if data is None: data = [ message, 'Tensor %s must have rank' % x.name, rank, 'Received shape: ', array_ops.shape(x) ] try: assert_op = _assert_rank_condition(x, rank, static_condition, dynamic_condition, data, summarize) except ValueError as e: if e.args[0] == 'Static rank condition failed': raise ValueError( '%s. Tensor %s must have rank %d. Received rank %d, shape %s' % (message, x.name, e.args[2], e.args[1], x.get_shape())) else: raise return assert_op
[ "def", "assert_rank", "(", "x", ",", "rank", ",", "data", "=", "None", ",", "summarize", "=", "None", ",", "message", "=", "None", ",", "name", "=", "None", ")", ":", "with", "ops", ".", "name_scope", "(", "name", ",", "'assert_rank'", ",", "(", "x", ",", "rank", ")", "+", "tuple", "(", "data", "or", "[", "]", ")", ")", ":", "x", "=", "ops", ".", "convert_to_tensor", "(", "x", ",", "name", "=", "'x'", ")", "rank", "=", "ops", ".", "convert_to_tensor", "(", "rank", ",", "name", "=", "'rank'", ")", "message", "=", "message", "or", "''", "static_condition", "=", "lambda", "actual_rank", ",", "given_rank", ":", "actual_rank", "==", "given_rank", "dynamic_condition", "=", "math_ops", ".", "equal", "if", "data", "is", "None", ":", "data", "=", "[", "message", ",", "'Tensor %s must have rank'", "%", "x", ".", "name", ",", "rank", ",", "'Received shape: '", ",", "array_ops", ".", "shape", "(", "x", ")", "]", "try", ":", "assert_op", "=", "_assert_rank_condition", "(", "x", ",", "rank", ",", "static_condition", ",", "dynamic_condition", ",", "data", ",", "summarize", ")", "except", "ValueError", "as", "e", ":", "if", "e", ".", "args", "[", "0", "]", "==", "'Static rank condition failed'", ":", "raise", "ValueError", "(", "'%s. Tensor %s must have rank %d. Received rank %d, shape %s'", "%", "(", "message", ",", "x", ".", "name", ",", "e", ".", "args", "[", "2", "]", ",", "e", ".", "args", "[", "1", "]", ",", "x", ".", "get_shape", "(", ")", ")", ")", "else", ":", "raise", "return", "assert_op" ]
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/ops/check_ops.py#L574-L627
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
wx/lib/shell.py
python
PyShellOutput.AddText
(self, text, style=None)
write text to output window
write text to output window
[ "write", "text", "to", "output", "window" ]
def AddText(self, text, style=None): """write text to output window""" # a trick needed to defer default from compile-time to execute-time if style ==None: style =self.out_style if 0 and __debug__: sys.__stdout__.write(text) # handle entities for (symbol, eref) in self.erefs: text = text.replace(symbol, eref) # replace newlines text = text.replace("\n", style[2]) # add to contents self.text =self.text +style[0] +text +style[1] if not self.in_batch: self.UpdWindow() else: self.dirty =1 if self.html_debug: # html debug output needn't to be too large self.view.SetValue(self.text[-4096:])
[ "def", "AddText", "(", "self", ",", "text", ",", "style", "=", "None", ")", ":", "# a trick needed to defer default from compile-time to execute-time", "if", "style", "==", "None", ":", "style", "=", "self", ".", "out_style", "if", "0", "and", "__debug__", ":", "sys", ".", "__stdout__", ".", "write", "(", "text", ")", "# handle entities", "for", "(", "symbol", ",", "eref", ")", "in", "self", ".", "erefs", ":", "text", "=", "text", ".", "replace", "(", "symbol", ",", "eref", ")", "# replace newlines", "text", "=", "text", ".", "replace", "(", "\"\\n\"", ",", "style", "[", "2", "]", ")", "# add to contents", "self", ".", "text", "=", "self", ".", "text", "+", "style", "[", "0", "]", "+", "text", "+", "style", "[", "1", "]", "if", "not", "self", ".", "in_batch", ":", "self", ".", "UpdWindow", "(", ")", "else", ":", "self", ".", "dirty", "=", "1", "if", "self", ".", "html_debug", ":", "# html debug output needn't to be too large", "self", ".", "view", ".", "SetValue", "(", "self", ".", "text", "[", "-", "4096", ":", "]", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/wx/lib/shell.py#L213-L229
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/scipy/stats/stats.py
python
pearsonr
(x, y)
return r, prob
Calculates a Pearson correlation coefficient and the p-value for testing non-correlation. The Pearson correlation coefficient measures the linear relationship between two datasets. Strictly speaking, Pearson's correlation requires that each dataset be normally distributed, and not necessarily zero-mean. Like other correlation coefficients, this one varies between -1 and +1 with 0 implying no correlation. Correlations of -1 or +1 imply an exact linear relationship. Positive correlations imply that as x increases, so does y. Negative correlations imply that as x increases, y decreases. The p-value roughly indicates the probability of an uncorrelated system producing datasets that have a Pearson correlation at least as extreme as the one computed from these datasets. The p-values are not entirely reliable but are probably reasonable for datasets larger than 500 or so. Parameters ---------- x : (N,) array_like Input y : (N,) array_like Input Returns ------- r : float Pearson's correlation coefficient p-value : float 2-tailed p-value References ---------- http://www.statsoft.com/textbook/glosp.html#Pearson%20Correlation
Calculates a Pearson correlation coefficient and the p-value for testing non-correlation.
[ "Calculates", "a", "Pearson", "correlation", "coefficient", "and", "the", "p", "-", "value", "for", "testing", "non", "-", "correlation", "." ]
def pearsonr(x, y): """ Calculates a Pearson correlation coefficient and the p-value for testing non-correlation. The Pearson correlation coefficient measures the linear relationship between two datasets. Strictly speaking, Pearson's correlation requires that each dataset be normally distributed, and not necessarily zero-mean. Like other correlation coefficients, this one varies between -1 and +1 with 0 implying no correlation. Correlations of -1 or +1 imply an exact linear relationship. Positive correlations imply that as x increases, so does y. Negative correlations imply that as x increases, y decreases. The p-value roughly indicates the probability of an uncorrelated system producing datasets that have a Pearson correlation at least as extreme as the one computed from these datasets. The p-values are not entirely reliable but are probably reasonable for datasets larger than 500 or so. Parameters ---------- x : (N,) array_like Input y : (N,) array_like Input Returns ------- r : float Pearson's correlation coefficient p-value : float 2-tailed p-value References ---------- http://www.statsoft.com/textbook/glosp.html#Pearson%20Correlation """ # x and y should have same length. x = np.asarray(x) y = np.asarray(y) n = len(x) mx = x.mean() my = y.mean() xm, ym = x - mx, y - my r_num = np.add.reduce(xm * ym) r_den = np.sqrt(_sum_of_squares(xm) * _sum_of_squares(ym)) r = r_num / r_den # Presumably, if abs(r) > 1, then it is only some small artifact of floating # point arithmetic. r = max(min(r, 1.0), -1.0) df = n - 2 if abs(r) == 1.0: prob = 0.0 else: t_squared = r**2 * (df / ((1.0 - r) * (1.0 + r))) prob = _betai(0.5*df, 0.5, df/(df+t_squared)) return r, prob
[ "def", "pearsonr", "(", "x", ",", "y", ")", ":", "# x and y should have same length.", "x", "=", "np", ".", "asarray", "(", "x", ")", "y", "=", "np", ".", "asarray", "(", "y", ")", "n", "=", "len", "(", "x", ")", "mx", "=", "x", ".", "mean", "(", ")", "my", "=", "y", ".", "mean", "(", ")", "xm", ",", "ym", "=", "x", "-", "mx", ",", "y", "-", "my", "r_num", "=", "np", ".", "add", ".", "reduce", "(", "xm", "*", "ym", ")", "r_den", "=", "np", ".", "sqrt", "(", "_sum_of_squares", "(", "xm", ")", "*", "_sum_of_squares", "(", "ym", ")", ")", "r", "=", "r_num", "/", "r_den", "# Presumably, if abs(r) > 1, then it is only some small artifact of floating", "# point arithmetic.", "r", "=", "max", "(", "min", "(", "r", ",", "1.0", ")", ",", "-", "1.0", ")", "df", "=", "n", "-", "2", "if", "abs", "(", "r", ")", "==", "1.0", ":", "prob", "=", "0.0", "else", ":", "t_squared", "=", "r", "**", "2", "*", "(", "df", "/", "(", "(", "1.0", "-", "r", ")", "*", "(", "1.0", "+", "r", ")", ")", ")", "prob", "=", "_betai", "(", "0.5", "*", "df", ",", "0.5", ",", "df", "/", "(", "df", "+", "t_squared", ")", ")", "return", "r", ",", "prob" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/scipy/stats/stats.py#L2983-L3041
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
build/android/buildbot/bb_device_steps.py
python
GenerateJavaCoverageReport
(options)
return coverage_html
Generates an HTML coverage report using EMMA and uploads it.
Generates an HTML coverage report using EMMA and uploads it.
[ "Generates", "an", "HTML", "coverage", "report", "using", "EMMA", "and", "uploads", "it", "." ]
def GenerateJavaCoverageReport(options): """Generates an HTML coverage report using EMMA and uploads it.""" bb_annotations.PrintNamedStep('java_coverage_report') coverage_html = os.path.join(options.coverage_dir, 'coverage_html') RunCmd(['build/android/generate_emma_html.py', '--coverage-dir', options.coverage_dir, '--metadata-dir', os.path.join(CHROME_OUT_DIR, options.target), '--cleanup', '--output', os.path.join(coverage_html, 'index.html')]) return coverage_html
[ "def", "GenerateJavaCoverageReport", "(", "options", ")", ":", "bb_annotations", ".", "PrintNamedStep", "(", "'java_coverage_report'", ")", "coverage_html", "=", "os", ".", "path", ".", "join", "(", "options", ".", "coverage_dir", ",", "'coverage_html'", ")", "RunCmd", "(", "[", "'build/android/generate_emma_html.py'", ",", "'--coverage-dir'", ",", "options", ".", "coverage_dir", ",", "'--metadata-dir'", ",", "os", ".", "path", ".", "join", "(", "CHROME_OUT_DIR", ",", "options", ".", "target", ")", ",", "'--cleanup'", ",", "'--output'", ",", "os", ".", "path", ".", "join", "(", "coverage_html", ",", "'index.html'", ")", "]", ")", "return", "coverage_html" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/build/android/buildbot/bb_device_steps.py#L511-L521
fatih/subvim
241b6d170597857105da219c9b7d36059e9f11fb
vim/base/YouCompleteMe/third_party/bottle/bottle.py
python
BaseRequest.script_name
(self)
return '/' + script_name + '/' if script_name else '/'
The initial portion of the URL's `path` that was removed by a higher level (server or routing middleware) before the application was called. This script path is returned with leading and tailing slashes.
The initial portion of the URL's `path` that was removed by a higher level (server or routing middleware) before the application was called. This script path is returned with leading and tailing slashes.
[ "The", "initial", "portion", "of", "the", "URL", "s", "path", "that", "was", "removed", "by", "a", "higher", "level", "(", "server", "or", "routing", "middleware", ")", "before", "the", "application", "was", "called", ".", "This", "script", "path", "is", "returned", "with", "leading", "and", "tailing", "slashes", "." ]
def script_name(self): ''' The initial portion of the URL's `path` that was removed by a higher level (server or routing middleware) before the application was called. This script path is returned with leading and tailing slashes. ''' script_name = self.environ.get('SCRIPT_NAME', '').strip('/') return '/' + script_name + '/' if script_name else '/'
[ "def", "script_name", "(", "self", ")", ":", "script_name", "=", "self", ".", "environ", ".", "get", "(", "'SCRIPT_NAME'", ",", "''", ")", ".", "strip", "(", "'/'", ")", "return", "'/'", "+", "script_name", "+", "'/'", "if", "script_name", "else", "'/'" ]
https://github.com/fatih/subvim/blob/241b6d170597857105da219c9b7d36059e9f11fb/vim/base/YouCompleteMe/third_party/bottle/bottle.py#L1248-L1254
bulletphysics/bullet3
f0f2a952e146f016096db6f85cf0c44ed75b0b9a
examples/pybullet/gym/pybullet_envs/minitaur/agents/ppo/utility.py
python
available_gpus
()
return [x.name for x in local_device_protos if x.device_type == 'GPU']
List of GPU device names detected by TensorFlow.
List of GPU device names detected by TensorFlow.
[ "List", "of", "GPU", "device", "names", "detected", "by", "TensorFlow", "." ]
def available_gpus(): """List of GPU device names detected by TensorFlow.""" local_device_protos = device_lib.list_local_devices() return [x.name for x in local_device_protos if x.device_type == 'GPU']
[ "def", "available_gpus", "(", ")", ":", "local_device_protos", "=", "device_lib", ".", "list_local_devices", "(", ")", "return", "[", "x", ".", "name", "for", "x", "in", "local_device_protos", "if", "x", ".", "device_type", "==", "'GPU'", "]" ]
https://github.com/bulletphysics/bullet3/blob/f0f2a952e146f016096db6f85cf0c44ed75b0b9a/examples/pybullet/gym/pybullet_envs/minitaur/agents/ppo/utility.py#L152-L155
pmq20/node-packer
12c46c6e44fbc14d9ee645ebd17d5296b324f7e0
lts/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common.py
python
ExceptionAppend
(e, msg)
Append a message to the given exception's message.
Append a message to the given exception's message.
[ "Append", "a", "message", "to", "the", "given", "exception", "s", "message", "." ]
def ExceptionAppend(e, msg): """Append a message to the given exception's message.""" if not e.args: e.args = (msg,) elif len(e.args) == 1: e.args = (str(e.args[0]) + ' ' + msg,) else: e.args = (str(e.args[0]) + ' ' + msg,) + e.args[1:]
[ "def", "ExceptionAppend", "(", "e", ",", "msg", ")", ":", "if", "not", "e", ".", "args", ":", "e", ".", "args", "=", "(", "msg", ",", ")", "elif", "len", "(", "e", ".", "args", ")", "==", "1", ":", "e", ".", "args", "=", "(", "str", "(", "e", ".", "args", "[", "0", "]", ")", "+", "' '", "+", "msg", ",", ")", "else", ":", "e", ".", "args", "=", "(", "str", "(", "e", ".", "args", "[", "0", "]", ")", "+", "' '", "+", "msg", ",", ")", "+", "e", ".", "args", "[", "1", ":", "]" ]
https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/lts/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/common.py#L43-L50
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/python/ops/math_grad.py
python
_SparseSegmentSqrtNGrad
(op, grad)
return (math_ops.sparse_segment_sqrt_n_grad(grad, op.inputs[1], op.inputs[2], dim0), None, None)
Gradient for SparseSegmentSqrtN.
Gradient for SparseSegmentSqrtN.
[ "Gradient", "for", "SparseSegmentSqrtN", "." ]
def _SparseSegmentSqrtNGrad(op, grad): """Gradient for SparseSegmentSqrtN.""" dim0 = array_ops.shape(op.inputs[0])[0] return (math_ops.sparse_segment_sqrt_n_grad(grad, op.inputs[1], op.inputs[2], dim0), None, None)
[ "def", "_SparseSegmentSqrtNGrad", "(", "op", ",", "grad", ")", ":", "dim0", "=", "array_ops", ".", "shape", "(", "op", ".", "inputs", "[", "0", "]", ")", "[", "0", "]", "return", "(", "math_ops", ".", "sparse_segment_sqrt_n_grad", "(", "grad", ",", "op", ".", "inputs", "[", "1", "]", ",", "op", ".", "inputs", "[", "2", "]", ",", "dim0", ")", ",", "None", ",", "None", ")" ]
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/ops/math_grad.py#L186-L190
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/gsutil/third_party/protorpc/protorpc/webapp/forms.py
python
FormsHandler.__init__
(self, registry_path=DEFAULT_REGISTRY_PATH)
Constructor. When configuring a FormsHandler to use with a webapp application do not pass the request handler class in directly. Instead use new_factory to ensure that the FormsHandler is created with the correct registry path for each request. Args: registry_path: Absolute path on server where the ProtoRPC RegsitryService is located.
Constructor.
[ "Constructor", "." ]
def __init__(self, registry_path=DEFAULT_REGISTRY_PATH): """Constructor. When configuring a FormsHandler to use with a webapp application do not pass the request handler class in directly. Instead use new_factory to ensure that the FormsHandler is created with the correct registry path for each request. Args: registry_path: Absolute path on server where the ProtoRPC RegsitryService is located. """ assert registry_path self.__registry_path = registry_path
[ "def", "__init__", "(", "self", ",", "registry_path", "=", "DEFAULT_REGISTRY_PATH", ")", ":", "assert", "registry_path", "self", ".", "__registry_path", "=", "registry_path" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/protorpc/protorpc/webapp/forms.py#L100-L113
FreeCAD/FreeCAD
ba42231b9c6889b89e064d6d563448ed81e376ec
src/Mod/Draft/draftguitools/gui_fillets.py
python
Fillet.set_chamfer
(self)
Execute as a callback when the chamfer checkbox changes.
Execute as a callback when the chamfer checkbox changes.
[ "Execute", "as", "a", "callback", "when", "the", "chamfer", "checkbox", "changes", "." ]
def set_chamfer(self): """Execute as a callback when the chamfer checkbox changes.""" self.chamfer = self.ui.check_chamfer.isChecked() _msg(translate("draft","Chamfer mode:") + " " + str(self.chamfer))
[ "def", "set_chamfer", "(", "self", ")", ":", "self", ".", "chamfer", "=", "self", ".", "ui", ".", "check_chamfer", ".", "isChecked", "(", ")", "_msg", "(", "translate", "(", "\"draft\"", ",", "\"Chamfer mode:\"", ")", "+", "\" \"", "+", "str", "(", "self", ".", "chamfer", ")", ")" ]
https://github.com/FreeCAD/FreeCAD/blob/ba42231b9c6889b89e064d6d563448ed81e376ec/src/Mod/Draft/draftguitools/gui_fillets.py#L133-L136
hakuna-m/wubiuefi
caec1af0a09c78fd5a345180ada1fe45e0c63493
src/pypack/modulegraph/pkg_resources.py
python
ResourceManager.resource_listdir
(self, package_or_requirement, resource_name)
return get_provider(package_or_requirement).resource_listdir( resource_name )
List the contents of the named resource directory
List the contents of the named resource directory
[ "List", "the", "contents", "of", "the", "named", "resource", "directory" ]
def resource_listdir(self, package_or_requirement, resource_name): """List the contents of the named resource directory""" return get_provider(package_or_requirement).resource_listdir( resource_name )
[ "def", "resource_listdir", "(", "self", ",", "package_or_requirement", ",", "resource_name", ")", ":", "return", "get_provider", "(", "package_or_requirement", ")", ".", "resource_listdir", "(", "resource_name", ")" ]
https://github.com/hakuna-m/wubiuefi/blob/caec1af0a09c78fd5a345180ada1fe45e0c63493/src/pypack/modulegraph/pkg_resources.py#L752-L756
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/Jinja2/py3/jinja2/nodes.py
python
Expr.can_assign
(self)
return False
Check if it's possible to assign something to this node.
Check if it's possible to assign something to this node.
[ "Check", "if", "it", "s", "possible", "to", "assign", "something", "to", "this", "node", "." ]
def can_assign(self) -> bool: """Check if it's possible to assign something to this node.""" return False
[ "def", "can_assign", "(", "self", ")", "->", "bool", ":", "return", "False" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/Jinja2/py3/jinja2/nodes.py#L483-L485
domino-team/openwrt-cc
8b181297c34d14d3ca521cc9f31430d561dbc688
package/gli-pub/openwrt-node-packages-master/node/node-v6.9.1/tools/gyp/pylib/gyp/generator/msvs.py
python
_GenerateProject
(project, options, version, generator_flags)
Generates a vcproj file. Arguments: project: the MSVSProject object. options: global generator options. version: the MSVSVersion object. generator_flags: dict of generator-specific flags. Returns: A list of source files that cannot be found on disk.
Generates a vcproj file.
[ "Generates", "a", "vcproj", "file", "." ]
def _GenerateProject(project, options, version, generator_flags): """Generates a vcproj file. Arguments: project: the MSVSProject object. options: global generator options. version: the MSVSVersion object. generator_flags: dict of generator-specific flags. Returns: A list of source files that cannot be found on disk. """ default_config = _GetDefaultConfiguration(project.spec) # Skip emitting anything if told to with msvs_existing_vcproj option. if default_config.get('msvs_existing_vcproj'): return [] if version.UsesVcxproj(): return _GenerateMSBuildProject(project, options, version, generator_flags) else: return _GenerateMSVSProject(project, options, version, generator_flags)
[ "def", "_GenerateProject", "(", "project", ",", "options", ",", "version", ",", "generator_flags", ")", ":", "default_config", "=", "_GetDefaultConfiguration", "(", "project", ".", "spec", ")", "# Skip emitting anything if told to with msvs_existing_vcproj option.", "if", "default_config", ".", "get", "(", "'msvs_existing_vcproj'", ")", ":", "return", "[", "]", "if", "version", ".", "UsesVcxproj", "(", ")", ":", "return", "_GenerateMSBuildProject", "(", "project", ",", "options", ",", "version", ",", "generator_flags", ")", "else", ":", "return", "_GenerateMSVSProject", "(", "project", ",", "options", ",", "version", ",", "generator_flags", ")" ]
https://github.com/domino-team/openwrt-cc/blob/8b181297c34d14d3ca521cc9f31430d561dbc688/package/gli-pub/openwrt-node-packages-master/node/node-v6.9.1/tools/gyp/pylib/gyp/generator/msvs.py#L907-L927
apple/turicreate
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
src/python/turicreate/toolkits/recommender/util.py
python
_Recommender.__prepare_dataset_parameter
(self, dataset)
return dataset
Processes the dataset parameter for type correctness. Returns it as an SFrame.
Processes the dataset parameter for type correctness. Returns it as an SFrame.
[ "Processes", "the", "dataset", "parameter", "for", "type", "correctness", ".", "Returns", "it", "as", "an", "SFrame", "." ]
def __prepare_dataset_parameter(self, dataset): """ Processes the dataset parameter for type correctness. Returns it as an SFrame. """ # Translate the dataset argument into the proper type if not isinstance(dataset, _SFrame): def raise_dataset_type_exception(): raise TypeError( "The dataset parameter must be either an SFrame, " "or a dictionary of (str : list) or (str : value)." ) if type(dataset) is dict: if not all(type(k) is str for k in _six.iterkeys(dataset)): raise_dataset_type_exception() if all( type(v) in (list, tuple, _array.array) for v in _six.itervalues(dataset) ): dataset = _SFrame(dataset) else: dataset = _SFrame({k: [v] for k, v in _six.iteritems(dataset)}) else: raise_dataset_type_exception() return dataset
[ "def", "__prepare_dataset_parameter", "(", "self", ",", "dataset", ")", ":", "# Translate the dataset argument into the proper type", "if", "not", "isinstance", "(", "dataset", ",", "_SFrame", ")", ":", "def", "raise_dataset_type_exception", "(", ")", ":", "raise", "TypeError", "(", "\"The dataset parameter must be either an SFrame, \"", "\"or a dictionary of (str : list) or (str : value).\"", ")", "if", "type", "(", "dataset", ")", "is", "dict", ":", "if", "not", "all", "(", "type", "(", "k", ")", "is", "str", "for", "k", "in", "_six", ".", "iterkeys", "(", "dataset", ")", ")", ":", "raise_dataset_type_exception", "(", ")", "if", "all", "(", "type", "(", "v", ")", "in", "(", "list", ",", "tuple", ",", "_array", ".", "array", ")", "for", "v", "in", "_six", ".", "itervalues", "(", "dataset", ")", ")", ":", "dataset", "=", "_SFrame", "(", "dataset", ")", "else", ":", "dataset", "=", "_SFrame", "(", "{", "k", ":", "[", "v", "]", "for", "k", ",", "v", "in", "_six", ".", "iteritems", "(", "dataset", ")", "}", ")", "else", ":", "raise_dataset_type_exception", "(", ")", "return", "dataset" ]
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/src/python/turicreate/toolkits/recommender/util.py#L829-L858
fatih/subvim
241b6d170597857105da219c9b7d36059e9f11fb
vim/base/YouCompleteMe/third_party/waitress/waitress/parser.py
python
HTTPRequestParser.__init__
(self, adj)
adj is an Adjustments object.
adj is an Adjustments object.
[ "adj", "is", "an", "Adjustments", "object", "." ]
def __init__(self, adj): """ adj is an Adjustments object. """ # headers is a mapping containing keys translated to uppercase # with dashes turned into underscores. self.headers = {} self.adj = adj
[ "def", "__init__", "(", "self", ",", "adj", ")", ":", "# headers is a mapping containing keys translated to uppercase", "# with dashes turned into underscores.", "self", ".", "headers", "=", "{", "}", "self", ".", "adj", "=", "adj" ]
https://github.com/fatih/subvim/blob/241b6d170597857105da219c9b7d36059e9f11fb/vim/base/YouCompleteMe/third_party/waitress/waitress/parser.py#L68-L75
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/contrib/layers/python/layers/feature_column_ops.py
python
joint_weighted_sum_from_feature_columns
(columns_to_tensors, feature_columns, num_outputs, weight_collections=None, trainable=True, scope=None)
A restricted linear prediction builder based on FeatureColumns. As long as all feature columns are unweighted sparse columns this computes the prediction of a linear model which stores all weights in a single variable. Args: columns_to_tensors: A mapping from feature column to tensors. 'string' key means a base feature (not-transformed). It can have FeatureColumn as a key too. That means that FeatureColumn is already transformed by input pipeline. For example, `inflow` may have handled transformations. feature_columns: A set containing all the feature columns. All items in the set should be instances of classes derived from FeatureColumn. num_outputs: An integer specifying number of outputs. Default value is 1. weight_collections: List of graph collections to which weights are added. trainable: If `True` also add variables to the graph collection `GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable). scope: Optional scope for variable_scope. Returns: A tuple containing: * A Tensor which represents predictions of a linear model. * A list of Variables storing the weights. * A Variable which is used for bias. Raises: ValueError: if FeatureColumn cannot be used for linear predictions.
A restricted linear prediction builder based on FeatureColumns.
[ "A", "restricted", "linear", "prediction", "builder", "based", "on", "FeatureColumns", "." ]
def joint_weighted_sum_from_feature_columns(columns_to_tensors, feature_columns, num_outputs, weight_collections=None, trainable=True, scope=None): """A restricted linear prediction builder based on FeatureColumns. As long as all feature columns are unweighted sparse columns this computes the prediction of a linear model which stores all weights in a single variable. Args: columns_to_tensors: A mapping from feature column to tensors. 'string' key means a base feature (not-transformed). It can have FeatureColumn as a key too. That means that FeatureColumn is already transformed by input pipeline. For example, `inflow` may have handled transformations. feature_columns: A set containing all the feature columns. All items in the set should be instances of classes derived from FeatureColumn. num_outputs: An integer specifying number of outputs. Default value is 1. weight_collections: List of graph collections to which weights are added. trainable: If `True` also add variables to the graph collection `GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable). scope: Optional scope for variable_scope. Returns: A tuple containing: * A Tensor which represents predictions of a linear model. * A list of Variables storing the weights. * A Variable which is used for bias. Raises: ValueError: if FeatureColumn cannot be used for linear predictions. """ columns_to_tensors = columns_to_tensors.copy() check_feature_columns(feature_columns) with variable_scope.variable_scope( scope, default_name='joint_weighted_sum_from_feature_columns', values=columns_to_tensors.values()): transformer = _Transformer(columns_to_tensors) embedding_lookup_arguments = [] for column in sorted(set(feature_columns), key=lambda x: x.key): transformed_tensor = transformer.transform(column) try: embedding_lookup_arguments.append( column._wide_embedding_lookup_arguments(transformed_tensor)) # pylint: disable=protected-access except NotImplementedError: raise NotImplementedError('Real-valued columns are not supported. ' 'Use weighted_sum_from_feature_columns ' 'instead, or bucketize these columns.') variable, predictions_no_bias = _create_joint_embedding_lookup( columns_to_tensors, embedding_lookup_arguments, num_outputs, trainable, weight_collections) bias = contrib_variables.model_variable( 'bias_weight', shape=[num_outputs], initializer=init_ops.zeros_initializer(), trainable=trainable, collections=_add_variable_collection(weight_collections)) _log_variable(bias) predictions = nn_ops.bias_add(predictions_no_bias, bias) return predictions, variable, bias
[ "def", "joint_weighted_sum_from_feature_columns", "(", "columns_to_tensors", ",", "feature_columns", ",", "num_outputs", ",", "weight_collections", "=", "None", ",", "trainable", "=", "True", ",", "scope", "=", "None", ")", ":", "columns_to_tensors", "=", "columns_to_tensors", ".", "copy", "(", ")", "check_feature_columns", "(", "feature_columns", ")", "with", "variable_scope", ".", "variable_scope", "(", "scope", ",", "default_name", "=", "'joint_weighted_sum_from_feature_columns'", ",", "values", "=", "columns_to_tensors", ".", "values", "(", ")", ")", ":", "transformer", "=", "_Transformer", "(", "columns_to_tensors", ")", "embedding_lookup_arguments", "=", "[", "]", "for", "column", "in", "sorted", "(", "set", "(", "feature_columns", ")", ",", "key", "=", "lambda", "x", ":", "x", ".", "key", ")", ":", "transformed_tensor", "=", "transformer", ".", "transform", "(", "column", ")", "try", ":", "embedding_lookup_arguments", ".", "append", "(", "column", ".", "_wide_embedding_lookup_arguments", "(", "transformed_tensor", ")", ")", "# pylint: disable=protected-access", "except", "NotImplementedError", ":", "raise", "NotImplementedError", "(", "'Real-valued columns are not supported. '", "'Use weighted_sum_from_feature_columns '", "'instead, or bucketize these columns.'", ")", "variable", ",", "predictions_no_bias", "=", "_create_joint_embedding_lookup", "(", "columns_to_tensors", ",", "embedding_lookup_arguments", ",", "num_outputs", ",", "trainable", ",", "weight_collections", ")", "bias", "=", "contrib_variables", ".", "model_variable", "(", "'bias_weight'", ",", "shape", "=", "[", "num_outputs", "]", ",", "initializer", "=", "init_ops", ".", "zeros_initializer", "(", ")", ",", "trainable", "=", "trainable", ",", "collections", "=", "_add_variable_collection", "(", "weight_collections", ")", ")", "_log_variable", "(", "bias", ")", "predictions", "=", "nn_ops", ".", "bias_add", "(", "predictions_no_bias", ",", "bias", ")", "return", "predictions", ",", "variable", ",", "bias" ]
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/layers/python/layers/feature_column_ops.py#L351-L419
arangodb/arangodb
0d658689c7d1b721b314fa3ca27d38303e1570c8
3rdParty/V8/v7.9.317/tools/stats-viewer.py
python
CounterCollection.CountersInUse
(self)
return self.data.IntAt(12)
Return the number of counters in active use.
Return the number of counters in active use.
[ "Return", "the", "number", "of", "counters", "in", "active", "use", "." ]
def CountersInUse(self): """Return the number of counters in active use.""" return self.data.IntAt(12)
[ "def", "CountersInUse", "(", "self", ")", ":", "return", "self", ".", "data", ".", "IntAt", "(", "12", ")" ]
https://github.com/arangodb/arangodb/blob/0d658689c7d1b721b314fa3ca27d38303e1570c8/3rdParty/V8/v7.9.317/tools/stats-viewer.py#L373-L375
epam/Indigo
30e40b4b1eb9bae0207435a26cfcb81ddcc42be1
api/python/indigo/__init__.py
python
IndigoObject.iterateReactants
(self)
return self.dispatcher.IndigoObject( self.dispatcher, self.dispatcher._checkResult( Indigo._lib.indigoIterateReactants(self.id) ), )
Reaction method iterates reactants Returns: IndigoObject: reactant iterator
Reaction method iterates reactants
[ "Reaction", "method", "iterates", "reactants" ]
def iterateReactants(self): """Reaction method iterates reactants Returns: IndigoObject: reactant iterator """ self.dispatcher._setSessionId() return self.dispatcher.IndigoObject( self.dispatcher, self.dispatcher._checkResult( Indigo._lib.indigoIterateReactants(self.id) ), )
[ "def", "iterateReactants", "(", "self", ")", ":", "self", ".", "dispatcher", ".", "_setSessionId", "(", ")", "return", "self", ".", "dispatcher", ".", "IndigoObject", "(", "self", ".", "dispatcher", ",", "self", ".", "dispatcher", ".", "_checkResult", "(", "Indigo", ".", "_lib", ".", "indigoIterateReactants", "(", "self", ".", "id", ")", ")", ",", ")" ]
https://github.com/epam/Indigo/blob/30e40b4b1eb9bae0207435a26cfcb81ddcc42be1/api/python/indigo/__init__.py#L482-L494
ChromiumWebApps/chromium
c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7
third_party/protobuf/python/google/protobuf/descriptor_database.py
python
DescriptorDatabase.Add
(self, file_desc_proto)
Adds the FileDescriptorProto and its types to this database. Args: file_desc_proto: The FileDescriptorProto to add.
Adds the FileDescriptorProto and its types to this database.
[ "Adds", "the", "FileDescriptorProto", "and", "its", "types", "to", "this", "database", "." ]
def Add(self, file_desc_proto): """Adds the FileDescriptorProto and its types to this database. Args: file_desc_proto: The FileDescriptorProto to add. """ self._file_desc_protos_by_file[file_desc_proto.name] = file_desc_proto package = file_desc_proto.package for message in file_desc_proto.message_type: self._file_desc_protos_by_symbol.update( (name, file_desc_proto) for name in _ExtractSymbols(message, package)) for enum in file_desc_proto.enum_type: self._file_desc_protos_by_symbol[ '.'.join((package, enum.name))] = file_desc_proto
[ "def", "Add", "(", "self", ",", "file_desc_proto", ")", ":", "self", ".", "_file_desc_protos_by_file", "[", "file_desc_proto", ".", "name", "]", "=", "file_desc_proto", "package", "=", "file_desc_proto", ".", "package", "for", "message", "in", "file_desc_proto", ".", "message_type", ":", "self", ".", "_file_desc_protos_by_symbol", ".", "update", "(", "(", "name", ",", "file_desc_proto", ")", "for", "name", "in", "_ExtractSymbols", "(", "message", ",", "package", ")", ")", "for", "enum", "in", "file_desc_proto", ".", "enum_type", ":", "self", ".", "_file_desc_protos_by_symbol", "[", "'.'", ".", "join", "(", "(", "package", ",", "enum", ".", "name", ")", ")", "]", "=", "file_desc_proto" ]
https://github.com/ChromiumWebApps/chromium/blob/c7361d39be8abd1574e6ce8957c8dbddd4c6ccf7/third_party/protobuf/python/google/protobuf/descriptor_database.py#L43-L57
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/gsutil/third_party/boto/boto/cloudsearch2/domain.py
python
Domain.delete
(self)
return self.layer1.delete_domain(self.name)
Delete this domain and all index data associated with it.
Delete this domain and all index data associated with it.
[ "Delete", "this", "domain", "and", "all", "index", "data", "associated", "with", "it", "." ]
def delete(self): """ Delete this domain and all index data associated with it. """ return self.layer1.delete_domain(self.name)
[ "def", "delete", "(", "self", ")", ":", "return", "self", ".", "layer1", ".", "delete_domain", "(", "self", ".", "name", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/boto/boto/cloudsearch2/domain.py#L171-L175
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/urllib3/util/response.py
python
is_response_to_head
(response)
return method.upper() == "HEAD"
Checks whether the request of a response has been a HEAD-request. Handles the quirks of AppEngine. :param http.client.HTTPResponse response: Response to check if the originating request used 'HEAD' as a method.
Checks whether the request of a response has been a HEAD-request. Handles the quirks of AppEngine.
[ "Checks", "whether", "the", "request", "of", "a", "response", "has", "been", "a", "HEAD", "-", "request", ".", "Handles", "the", "quirks", "of", "AppEngine", "." ]
def is_response_to_head(response): """ Checks whether the request of a response has been a HEAD-request. Handles the quirks of AppEngine. :param http.client.HTTPResponse response: Response to check if the originating request used 'HEAD' as a method. """ # FIXME: Can we do this somehow without accessing private httplib _method? method = response._method if isinstance(method, int): # Platform-specific: Appengine return method == 3 return method.upper() == "HEAD"
[ "def", "is_response_to_head", "(", "response", ")", ":", "# FIXME: Can we do this somehow without accessing private httplib _method?", "method", "=", "response", ".", "_method", "if", "isinstance", "(", "method", ",", "int", ")", ":", "# Platform-specific: Appengine", "return", "method", "==", "3", "return", "method", ".", "upper", "(", ")", "==", "\"HEAD\"" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/pip/_vendor/urllib3/util/response.py#L94-L107
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/grid.py
python
Grid.IsInSelection
(*args, **kwargs)
return _grid.Grid_IsInSelection(*args, **kwargs)
IsInSelection(self, int row, int col) -> bool
IsInSelection(self, int row, int col) -> bool
[ "IsInSelection", "(", "self", "int", "row", "int", "col", ")", "-", ">", "bool" ]
def IsInSelection(*args, **kwargs): """IsInSelection(self, int row, int col) -> bool""" return _grid.Grid_IsInSelection(*args, **kwargs)
[ "def", "IsInSelection", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_grid", ".", "Grid_IsInSelection", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/grid.py#L2053-L2055
facebookincubator/fizz
bd0ba1b80f72023cb7ede671a4caa85f6664d3f6
build/fbcode_builder/getdeps/builder.py
python
BuilderBase._build
(self, install_dirs, reconfigure)
Perform the build. install_dirs contains the list of installation directories for the dependencies of this project. reconfigure will be set to true if the fetcher determined that the sources have changed in such a way that the build system needs to regenerate its rules.
Perform the build. install_dirs contains the list of installation directories for the dependencies of this project. reconfigure will be set to true if the fetcher determined that the sources have changed in such a way that the build system needs to regenerate its rules.
[ "Perform", "the", "build", ".", "install_dirs", "contains", "the", "list", "of", "installation", "directories", "for", "the", "dependencies", "of", "this", "project", ".", "reconfigure", "will", "be", "set", "to", "true", "if", "the", "fetcher", "determined", "that", "the", "sources", "have", "changed", "in", "such", "a", "way", "that", "the", "build", "system", "needs", "to", "regenerate", "its", "rules", "." ]
def _build(self, install_dirs, reconfigure): """Perform the build. install_dirs contains the list of installation directories for the dependencies of this project. reconfigure will be set to true if the fetcher determined that the sources have changed in such a way that the build system needs to regenerate its rules.""" pass
[ "def", "_build", "(", "self", ",", "install_dirs", ",", "reconfigure", ")", ":", "pass" ]
https://github.com/facebookincubator/fizz/blob/bd0ba1b80f72023cb7ede671a4caa85f6664d3f6/build/fbcode_builder/getdeps/builder.py#L116-L123
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/numpy/py2/numpy/core/_internal.py
python
_newnames
(datatype, order)
Given a datatype and an order object, return a new names tuple, with the order indicated
Given a datatype and an order object, return a new names tuple, with the order indicated
[ "Given", "a", "datatype", "and", "an", "order", "object", "return", "a", "new", "names", "tuple", "with", "the", "order", "indicated" ]
def _newnames(datatype, order): """ Given a datatype and an order object, return a new names tuple, with the order indicated """ oldnames = datatype.names nameslist = list(oldnames) if isinstance(order, (str, unicode)): order = [order] seen = set() if isinstance(order, (list, tuple)): for name in order: try: nameslist.remove(name) except ValueError: if name in seen: raise ValueError("duplicate field name: %s" % (name,)) else: raise ValueError("unknown field name: %s" % (name,)) seen.add(name) return tuple(list(order) + nameslist) raise ValueError("unsupported order value: %s" % (order,))
[ "def", "_newnames", "(", "datatype", ",", "order", ")", ":", "oldnames", "=", "datatype", ".", "names", "nameslist", "=", "list", "(", "oldnames", ")", "if", "isinstance", "(", "order", ",", "(", "str", ",", "unicode", ")", ")", ":", "order", "=", "[", "order", "]", "seen", "=", "set", "(", ")", "if", "isinstance", "(", "order", ",", "(", "list", ",", "tuple", ")", ")", ":", "for", "name", "in", "order", ":", "try", ":", "nameslist", ".", "remove", "(", "name", ")", "except", "ValueError", ":", "if", "name", "in", "seen", ":", "raise", "ValueError", "(", "\"duplicate field name: %s\"", "%", "(", "name", ",", ")", ")", "else", ":", "raise", "ValueError", "(", "\"unknown field name: %s\"", "%", "(", "name", ",", ")", ")", "seen", ".", "add", "(", "name", ")", "return", "tuple", "(", "list", "(", "order", ")", "+", "nameslist", ")", "raise", "ValueError", "(", "\"unsupported order value: %s\"", "%", "(", "order", ",", ")", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/numpy/py2/numpy/core/_internal.py#L363-L384
SequoiaDB/SequoiaDB
2894ed7e5bd6fe57330afc900cf76d0ff0df9f64
tools/server/php_linux/libxml2/lib/python2.4/site-packages/libxml2.py
python
uCSIsCatP
(code)
return ret
Check whether the character is part of P UCS Category
Check whether the character is part of P UCS Category
[ "Check", "whether", "the", "character", "is", "part", "of", "P", "UCS", "Category" ]
def uCSIsCatP(code): """Check whether the character is part of P UCS Category """ ret = libxml2mod.xmlUCSIsCatP(code) return ret
[ "def", "uCSIsCatP", "(", "code", ")", ":", "ret", "=", "libxml2mod", ".", "xmlUCSIsCatP", "(", "code", ")", "return", "ret" ]
https://github.com/SequoiaDB/SequoiaDB/blob/2894ed7e5bd6fe57330afc900cf76d0ff0df9f64/tools/server/php_linux/libxml2/lib/python2.4/site-packages/libxml2.py#L2291-L2294
NVIDIA/TensorRT
42805f078052daad1a98bc5965974fcffaad0960
tools/pytorch-quantization/pytorch_quantization/tensor_quant.py
python
ScaledQuantDescriptor.dict
(self)
return obj_dict
Serialize to dict The build-in __dict__ method returns all the attributes, which includes those have default value and have protected prefix "_". This method only returns those have values other than the default one and don't have _ in key. Construct a instance by dict returned by this method should get exactly the same instance.
Serialize to dict
[ "Serialize", "to", "dict" ]
def dict(self): """Serialize to dict The build-in __dict__ method returns all the attributes, which includes those have default value and have protected prefix "_". This method only returns those have values other than the default one and don't have _ in key. Construct a instance by dict returned by this method should get exactly the same instance. """ obj_dict = {} obj_dict['num_bits'] = self._num_bits obj_dict['name'] = self._name if not self._fake_quant: obj_dict['fake_quant'] = self._fake_quant if self._axis is not None: obj_dict['axis'] = self._axis if self._amax is not None: obj_dict['amax'] = self._amax.tolist() if self._scale_amax is not None: obj_dict['scale_amax'] = self._scale_amax if self._learn_amax: obj_dict['learn_amax'] = self._learn_amax if self._unsigned: obj_dict['unsigned'] = self._unsigned return obj_dict
[ "def", "dict", "(", "self", ")", ":", "obj_dict", "=", "{", "}", "obj_dict", "[", "'num_bits'", "]", "=", "self", ".", "_num_bits", "obj_dict", "[", "'name'", "]", "=", "self", ".", "_name", "if", "not", "self", ".", "_fake_quant", ":", "obj_dict", "[", "'fake_quant'", "]", "=", "self", ".", "_fake_quant", "if", "self", ".", "_axis", "is", "not", "None", ":", "obj_dict", "[", "'axis'", "]", "=", "self", ".", "_axis", "if", "self", ".", "_amax", "is", "not", "None", ":", "obj_dict", "[", "'amax'", "]", "=", "self", ".", "_amax", ".", "tolist", "(", ")", "if", "self", ".", "_scale_amax", "is", "not", "None", ":", "obj_dict", "[", "'scale_amax'", "]", "=", "self", ".", "_scale_amax", "if", "self", ".", "_learn_amax", ":", "obj_dict", "[", "'learn_amax'", "]", "=", "self", ".", "_learn_amax", "if", "self", ".", "_unsigned", ":", "obj_dict", "[", "'unsigned'", "]", "=", "self", ".", "_unsigned", "return", "obj_dict" ]
https://github.com/NVIDIA/TensorRT/blob/42805f078052daad1a98bc5965974fcffaad0960/tools/pytorch-quantization/pytorch_quantization/tensor_quant.py#L171-L195
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/pandas/py3/pandas/io/html.py
python
_HtmlFrameParser._parse_tfoot_tr
(self, table)
Return the list of tfoot row elements from the parsed table element. Parameters ---------- table : a table element that contains row elements. Returns ------- list of node-like These are the <tr> row elements of a table.
Return the list of tfoot row elements from the parsed table element.
[ "Return", "the", "list", "of", "tfoot", "row", "elements", "from", "the", "parsed", "table", "element", "." ]
def _parse_tfoot_tr(self, table): """ Return the list of tfoot row elements from the parsed table element. Parameters ---------- table : a table element that contains row elements. Returns ------- list of node-like These are the <tr> row elements of a table. """ raise AbstractMethodError(self)
[ "def", "_parse_tfoot_tr", "(", "self", ",", "table", ")", ":", "raise", "AbstractMethodError", "(", "self", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/pandas/py3/pandas/io/html.py#L311-L324
jiangxiluning/FOTS.PyTorch
b1851c170b4f1ad18406766352cb5171648ce603
FOTS/data_loader/datautils.py
python
polygon_area
(poly)
return np.sum(edge) / 2.
compute area of a polygon :param poly: :return:
compute area of a polygon :param poly: :return:
[ "compute", "area", "of", "a", "polygon", ":", "param", "poly", ":", ":", "return", ":" ]
def polygon_area(poly): ''' compute area of a polygon :param poly: :return: ''' edge = [ (poly[1][0] - poly[0][0]) * (poly[1][1] + poly[0][1]), (poly[2][0] - poly[1][0]) * (poly[2][1] + poly[1][1]), (poly[3][0] - poly[2][0]) * (poly[3][1] + poly[2][1]), (poly[0][0] - poly[3][0]) * (poly[0][1] + poly[3][1]) ] return np.sum(edge) / 2.
[ "def", "polygon_area", "(", "poly", ")", ":", "edge", "=", "[", "(", "poly", "[", "1", "]", "[", "0", "]", "-", "poly", "[", "0", "]", "[", "0", "]", ")", "*", "(", "poly", "[", "1", "]", "[", "1", "]", "+", "poly", "[", "0", "]", "[", "1", "]", ")", ",", "(", "poly", "[", "2", "]", "[", "0", "]", "-", "poly", "[", "1", "]", "[", "0", "]", ")", "*", "(", "poly", "[", "2", "]", "[", "1", "]", "+", "poly", "[", "1", "]", "[", "1", "]", ")", ",", "(", "poly", "[", "3", "]", "[", "0", "]", "-", "poly", "[", "2", "]", "[", "0", "]", ")", "*", "(", "poly", "[", "3", "]", "[", "1", "]", "+", "poly", "[", "2", "]", "[", "1", "]", ")", ",", "(", "poly", "[", "0", "]", "[", "0", "]", "-", "poly", "[", "3", "]", "[", "0", "]", ")", "*", "(", "poly", "[", "0", "]", "[", "1", "]", "+", "poly", "[", "3", "]", "[", "1", "]", ")", "]", "return", "np", ".", "sum", "(", "edge", ")", "/", "2." ]
https://github.com/jiangxiluning/FOTS.PyTorch/blob/b1851c170b4f1ad18406766352cb5171648ce603/FOTS/data_loader/datautils.py#L51-L63
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numpy/polynomial/chebyshev.py
python
chebdiv
(c1, c2)
Divide one Chebyshev series by another. Returns the quotient-with-remainder of two Chebyshev series `c1` / `c2`. The arguments are sequences of coefficients from lowest order "term" to highest, e.g., [1,2,3] represents the series ``T_0 + 2*T_1 + 3*T_2``. Parameters ---------- c1, c2 : array_like 1-D arrays of Chebyshev series coefficients ordered from low to high. Returns ------- [quo, rem] : ndarrays Of Chebyshev series coefficients representing the quotient and remainder. See Also -------- chebadd, chebsub, chemulx, chebmul, chebpow Notes ----- In general, the (polynomial) division of one C-series by another results in quotient and remainder terms that are not in the Chebyshev polynomial basis set. Thus, to express these results as C-series, it is typically necessary to "reproject" the results onto said basis set, which typically produces "unintuitive" (but correct) results; see Examples section below. Examples -------- >>> from numpy.polynomial import chebyshev as C >>> c1 = (1,2,3) >>> c2 = (3,2,1) >>> C.chebdiv(c1,c2) # quotient "intuitive," remainder not (array([3.]), array([-8., -4.])) >>> c2 = (0,1,2,3) >>> C.chebdiv(c2,c1) # neither "intuitive" (array([0., 2.]), array([-2., -4.]))
Divide one Chebyshev series by another.
[ "Divide", "one", "Chebyshev", "series", "by", "another", "." ]
def chebdiv(c1, c2): """ Divide one Chebyshev series by another. Returns the quotient-with-remainder of two Chebyshev series `c1` / `c2`. The arguments are sequences of coefficients from lowest order "term" to highest, e.g., [1,2,3] represents the series ``T_0 + 2*T_1 + 3*T_2``. Parameters ---------- c1, c2 : array_like 1-D arrays of Chebyshev series coefficients ordered from low to high. Returns ------- [quo, rem] : ndarrays Of Chebyshev series coefficients representing the quotient and remainder. See Also -------- chebadd, chebsub, chemulx, chebmul, chebpow Notes ----- In general, the (polynomial) division of one C-series by another results in quotient and remainder terms that are not in the Chebyshev polynomial basis set. Thus, to express these results as C-series, it is typically necessary to "reproject" the results onto said basis set, which typically produces "unintuitive" (but correct) results; see Examples section below. Examples -------- >>> from numpy.polynomial import chebyshev as C >>> c1 = (1,2,3) >>> c2 = (3,2,1) >>> C.chebdiv(c1,c2) # quotient "intuitive," remainder not (array([3.]), array([-8., -4.])) >>> c2 = (0,1,2,3) >>> C.chebdiv(c2,c1) # neither "intuitive" (array([0., 2.]), array([-2., -4.])) """ # c1, c2 are trimmed copies [c1, c2] = pu.as_series([c1, c2]) if c2[-1] == 0: raise ZeroDivisionError() # note: this is more efficient than `pu._div(chebmul, c1, c2)` lc1 = len(c1) lc2 = len(c2) if lc1 < lc2: return c1[:1]*0, c1 elif lc2 == 1: return c1/c2[-1], c1[:1]*0 else: z1 = _cseries_to_zseries(c1) z2 = _cseries_to_zseries(c2) quo, rem = _zseries_div(z1, z2) quo = pu.trimseq(_zseries_to_cseries(quo)) rem = pu.trimseq(_zseries_to_cseries(rem)) return quo, rem
[ "def", "chebdiv", "(", "c1", ",", "c2", ")", ":", "# c1, c2 are trimmed copies", "[", "c1", ",", "c2", "]", "=", "pu", ".", "as_series", "(", "[", "c1", ",", "c2", "]", ")", "if", "c2", "[", "-", "1", "]", "==", "0", ":", "raise", "ZeroDivisionError", "(", ")", "# note: this is more efficient than `pu._div(chebmul, c1, c2)`", "lc1", "=", "len", "(", "c1", ")", "lc2", "=", "len", "(", "c2", ")", "if", "lc1", "<", "lc2", ":", "return", "c1", "[", ":", "1", "]", "*", "0", ",", "c1", "elif", "lc2", "==", "1", ":", "return", "c1", "/", "c2", "[", "-", "1", "]", ",", "c1", "[", ":", "1", "]", "*", "0", "else", ":", "z1", "=", "_cseries_to_zseries", "(", "c1", ")", "z2", "=", "_cseries_to_zseries", "(", "c2", ")", "quo", ",", "rem", "=", "_zseries_div", "(", "z1", ",", "z2", ")", "quo", "=", "pu", ".", "trimseq", "(", "_zseries_to_cseries", "(", "quo", ")", ")", "rem", "=", "pu", ".", "trimseq", "(", "_zseries_to_cseries", "(", "rem", ")", ")", "return", "quo", ",", "rem" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/python/windows/Lib/numpy/polynomial/chebyshev.py#L727-L791
cksystemsgroup/scalloc
049857919b5fa1d539c9e4206e353daca2e87394
tools/cpplint.py
python
CheckForNewlineAtEOF
(filename, lines, error)
Logs an error if there is no newline char at the end of the file. Args: filename: The name of the current file. lines: An array of strings, each representing a line of the file. error: The function to call with any errors found.
Logs an error if there is no newline char at the end of the file.
[ "Logs", "an", "error", "if", "there", "is", "no", "newline", "char", "at", "the", "end", "of", "the", "file", "." ]
def CheckForNewlineAtEOF(filename, lines, error): """Logs an error if there is no newline char at the end of the file. Args: filename: The name of the current file. lines: An array of strings, each representing a line of the file. error: The function to call with any errors found. """ # The array lines() was created by adding two newlines to the # original file (go figure), then splitting on \n. # To verify that the file ends in \n, we just have to make sure the # last-but-two element of lines() exists and is empty. if len(lines) < 3 or lines[-2]: error(filename, len(lines) - 2, 'whitespace/ending_newline', 5, 'Could not find a newline character at the end of the file.')
[ "def", "CheckForNewlineAtEOF", "(", "filename", ",", "lines", ",", "error", ")", ":", "# The array lines() was created by adding two newlines to the", "# original file (go figure), then splitting on \\n.", "# To verify that the file ends in \\n, we just have to make sure the", "# last-but-two element of lines() exists and is empty.", "if", "len", "(", "lines", ")", "<", "3", "or", "lines", "[", "-", "2", "]", ":", "error", "(", "filename", ",", "len", "(", "lines", ")", "-", "2", ",", "'whitespace/ending_newline'", ",", "5", ",", "'Could not find a newline character at the end of the file.'", ")" ]
https://github.com/cksystemsgroup/scalloc/blob/049857919b5fa1d539c9e4206e353daca2e87394/tools/cpplint.py#L1497-L1512
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/py2/scipy/signal/ltisys.py
python
StateSpace.to_zpk
(self, **kwargs)
return ZerosPolesGain(*ss2zpk(self._A, self._B, self._C, self._D, **kwargs), **self._dt_dict)
Convert system representation to `ZerosPolesGain`. Parameters ---------- kwargs : dict, optional Additional keywords passed to `ss2zpk` Returns ------- sys : instance of `ZerosPolesGain` Zeros, poles, gain representation of the current system
Convert system representation to `ZerosPolesGain`.
[ "Convert", "system", "representation", "to", "ZerosPolesGain", "." ]
def to_zpk(self, **kwargs): """ Convert system representation to `ZerosPolesGain`. Parameters ---------- kwargs : dict, optional Additional keywords passed to `ss2zpk` Returns ------- sys : instance of `ZerosPolesGain` Zeros, poles, gain representation of the current system """ return ZerosPolesGain(*ss2zpk(self._A, self._B, self._C, self._D, **kwargs), **self._dt_dict)
[ "def", "to_zpk", "(", "self", ",", "*", "*", "kwargs", ")", ":", "return", "ZerosPolesGain", "(", "*", "ss2zpk", "(", "self", ".", "_A", ",", "self", ".", "_B", ",", "self", ".", "_C", ",", "self", ".", "_D", ",", "*", "*", "kwargs", ")", ",", "*", "*", "self", ".", "_dt_dict", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py2/scipy/signal/ltisys.py#L1581-L1597
windystrife/UnrealEngine_NVIDIAGameWorks
b50e6338a7c5b26374d66306ebc7807541ff815e
Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/distutils/dist.py
python
Distribution.print_command_list
(self, commands, header, max_length)
Print a subset of the list of all commands -- used by 'print_commands()'.
Print a subset of the list of all commands -- used by 'print_commands()'.
[ "Print", "a", "subset", "of", "the", "list", "of", "all", "commands", "--", "used", "by", "print_commands", "()", "." ]
def print_command_list(self, commands, header, max_length): """Print a subset of the list of all commands -- used by 'print_commands()'. """ print(header + ":") for cmd in commands: klass = self.cmdclass.get(cmd) if not klass: klass = self.get_command_class(cmd) try: description = klass.description except AttributeError: description = "(no description available)" print(" %-*s %s" % (max_length, cmd, description))
[ "def", "print_command_list", "(", "self", ",", "commands", ",", "header", ",", "max_length", ")", ":", "print", "(", "header", "+", "\":\"", ")", "for", "cmd", "in", "commands", ":", "klass", "=", "self", ".", "cmdclass", ".", "get", "(", "cmd", ")", "if", "not", "klass", ":", "klass", "=", "self", ".", "get_command_class", "(", "cmd", ")", "try", ":", "description", "=", "klass", ".", "description", "except", "AttributeError", ":", "description", "=", "\"(no description available)\"", "print", "(", "\" %-*s %s\"", "%", "(", "max_length", ",", "cmd", ",", "description", ")", ")" ]
https://github.com/windystrife/UnrealEngine_NVIDIAGameWorks/blob/b50e6338a7c5b26374d66306ebc7807541ff815e/Engine/Extras/ThirdPartyNotUE/emsdk/Win64/python/2.7.5.3_64bit/Lib/distutils/dist.py#L697-L712
SFTtech/openage
d6a08c53c48dc1e157807471df92197f6ca9e04d
openage/convert/processor/conversion/aoc/tech_subprocessor.py
python
AoCTechSubprocessor.upgrade_unit_effect
(converter_group, effect)
return patches
Creates the patches for upgrading entities in a line.
Creates the patches for upgrading entities in a line.
[ "Creates", "the", "patches", "for", "upgrading", "entities", "in", "a", "line", "." ]
def upgrade_unit_effect(converter_group, effect): """ Creates the patches for upgrading entities in a line. """ patches = [] tech_id = converter_group.get_id() dataset = converter_group.data tech_lookup_dict = internal_name_lookups.get_tech_lookups(dataset.game_version) upgrade_source_id = effect["attr_a"].get_value() upgrade_target_id = effect["attr_b"].get_value() if upgrade_source_id not in dataset.unit_ref.keys() or\ upgrade_target_id not in dataset.unit_ref.keys(): # Skip annexes or transform units return patches line = dataset.unit_ref[upgrade_source_id] upgrade_source_pos = line.get_unit_position(upgrade_source_id) try: upgrade_target_pos = line.get_unit_position(upgrade_target_id) except KeyError: # TODO: Implement branching line upgrades warn(f"Could not create upgrade from unit {upgrade_source_id} to {upgrade_target_id}") return patches if isinstance(line, GenieBuildingLineGroup): # Building upgrades always reference the head unit # so we use the decremented target id instead upgrade_source_pos = upgrade_target_pos - 1 elif upgrade_target_pos - upgrade_source_pos != 1: # Skip effects that upgrades entities not next to each other in # the line. return patches upgrade_source = line.line[upgrade_source_pos] upgrade_target = line.line[upgrade_target_pos] tech_name = tech_lookup_dict[tech_id][0] diff = upgrade_source.diff(upgrade_target) patches.extend(AoCUpgradeAbilitySubprocessor.death_ability(converter_group, line, tech_name, diff)) patches.extend(AoCUpgradeAbilitySubprocessor.despawn_ability(converter_group, line, tech_name, diff)) patches.extend(AoCUpgradeAbilitySubprocessor.idle_ability(converter_group, line, tech_name, diff)) patches.extend(AoCUpgradeAbilitySubprocessor.live_ability(converter_group, line, tech_name, diff)) patches.extend(AoCUpgradeAbilitySubprocessor.los_ability(converter_group, line, tech_name, diff)) patches.extend(AoCUpgradeAbilitySubprocessor.named_ability(converter_group, line, tech_name, diff)) patches.extend(AoCUpgradeAbilitySubprocessor.resistance_ability(converter_group, line, tech_name, diff)) patches.extend(AoCUpgradeAbilitySubprocessor.selectable_ability(converter_group, line, tech_name, diff)) patches.extend(AoCUpgradeAbilitySubprocessor.turn_ability(converter_group, line, tech_name, diff)) if line.is_projectile_shooter(): patches.extend(AoCUpgradeAbilitySubprocessor.shoot_projectile_ability(converter_group, line, tech_name, upgrade_source, upgrade_target, 7, diff)) elif line.is_melee() or line.is_ranged(): if line.has_command(7): # Attack patches.extend(AoCUpgradeAbilitySubprocessor.apply_discrete_effect_ability(converter_group, line, tech_name, 7, line.is_ranged(), diff)) if isinstance(line, GenieUnitLineGroup): patches.extend(AoCUpgradeAbilitySubprocessor.move_ability(converter_group, line, tech_name, diff)) if isinstance(line, GenieBuildingLineGroup): patches.extend(AoCUpgradeAbilitySubprocessor.attribute_change_tracker_ability(converter_group, line, tech_name, diff)) return patches
[ "def", "upgrade_unit_effect", "(", "converter_group", ",", "effect", ")", ":", "patches", "=", "[", "]", "tech_id", "=", "converter_group", ".", "get_id", "(", ")", "dataset", "=", "converter_group", ".", "data", "tech_lookup_dict", "=", "internal_name_lookups", ".", "get_tech_lookups", "(", "dataset", ".", "game_version", ")", "upgrade_source_id", "=", "effect", "[", "\"attr_a\"", "]", ".", "get_value", "(", ")", "upgrade_target_id", "=", "effect", "[", "\"attr_b\"", "]", ".", "get_value", "(", ")", "if", "upgrade_source_id", "not", "in", "dataset", ".", "unit_ref", ".", "keys", "(", ")", "or", "upgrade_target_id", "not", "in", "dataset", ".", "unit_ref", ".", "keys", "(", ")", ":", "# Skip annexes or transform units", "return", "patches", "line", "=", "dataset", ".", "unit_ref", "[", "upgrade_source_id", "]", "upgrade_source_pos", "=", "line", ".", "get_unit_position", "(", "upgrade_source_id", ")", "try", ":", "upgrade_target_pos", "=", "line", ".", "get_unit_position", "(", "upgrade_target_id", ")", "except", "KeyError", ":", "# TODO: Implement branching line upgrades", "warn", "(", "f\"Could not create upgrade from unit {upgrade_source_id} to {upgrade_target_id}\"", ")", "return", "patches", "if", "isinstance", "(", "line", ",", "GenieBuildingLineGroup", ")", ":", "# Building upgrades always reference the head unit", "# so we use the decremented target id instead", "upgrade_source_pos", "=", "upgrade_target_pos", "-", "1", "elif", "upgrade_target_pos", "-", "upgrade_source_pos", "!=", "1", ":", "# Skip effects that upgrades entities not next to each other in", "# the line.", "return", "patches", "upgrade_source", "=", "line", ".", "line", "[", "upgrade_source_pos", "]", "upgrade_target", "=", "line", ".", "line", "[", "upgrade_target_pos", "]", "tech_name", "=", "tech_lookup_dict", "[", "tech_id", "]", "[", "0", "]", "diff", "=", "upgrade_source", ".", "diff", "(", "upgrade_target", ")", "patches", ".", "extend", "(", "AoCUpgradeAbilitySubprocessor", ".", "death_ability", "(", "converter_group", ",", "line", ",", "tech_name", ",", "diff", ")", ")", "patches", ".", "extend", "(", "AoCUpgradeAbilitySubprocessor", ".", "despawn_ability", "(", "converter_group", ",", "line", ",", "tech_name", ",", "diff", ")", ")", "patches", ".", "extend", "(", "AoCUpgradeAbilitySubprocessor", ".", "idle_ability", "(", "converter_group", ",", "line", ",", "tech_name", ",", "diff", ")", ")", "patches", ".", "extend", "(", "AoCUpgradeAbilitySubprocessor", ".", "live_ability", "(", "converter_group", ",", "line", ",", "tech_name", ",", "diff", ")", ")", "patches", ".", "extend", "(", "AoCUpgradeAbilitySubprocessor", ".", "los_ability", "(", "converter_group", ",", "line", ",", "tech_name", ",", "diff", ")", ")", "patches", ".", "extend", "(", "AoCUpgradeAbilitySubprocessor", ".", "named_ability", "(", "converter_group", ",", "line", ",", "tech_name", ",", "diff", ")", ")", "patches", ".", "extend", "(", "AoCUpgradeAbilitySubprocessor", ".", "resistance_ability", "(", "converter_group", ",", "line", ",", "tech_name", ",", "diff", ")", ")", "patches", ".", "extend", "(", "AoCUpgradeAbilitySubprocessor", ".", "selectable_ability", "(", "converter_group", ",", "line", ",", "tech_name", ",", "diff", ")", ")", "patches", ".", "extend", "(", "AoCUpgradeAbilitySubprocessor", ".", "turn_ability", "(", "converter_group", ",", "line", ",", "tech_name", ",", "diff", ")", ")", "if", "line", ".", "is_projectile_shooter", "(", ")", ":", "patches", ".", "extend", "(", "AoCUpgradeAbilitySubprocessor", ".", "shoot_projectile_ability", "(", "converter_group", ",", "line", ",", "tech_name", ",", "upgrade_source", ",", "upgrade_target", ",", "7", ",", "diff", ")", ")", "elif", "line", ".", "is_melee", "(", ")", "or", "line", ".", "is_ranged", "(", ")", ":", "if", "line", ".", "has_command", "(", "7", ")", ":", "# Attack", "patches", ".", "extend", "(", "AoCUpgradeAbilitySubprocessor", ".", "apply_discrete_effect_ability", "(", "converter_group", ",", "line", ",", "tech_name", ",", "7", ",", "line", ".", "is_ranged", "(", ")", ",", "diff", ")", ")", "if", "isinstance", "(", "line", ",", "GenieUnitLineGroup", ")", ":", "patches", ".", "extend", "(", "AoCUpgradeAbilitySubprocessor", ".", "move_ability", "(", "converter_group", ",", "line", ",", "tech_name", ",", "diff", ")", ")", "if", "isinstance", "(", "line", ",", "GenieBuildingLineGroup", ")", ":", "patches", ".", "extend", "(", "AoCUpgradeAbilitySubprocessor", ".", "attribute_change_tracker_ability", "(", "converter_group", ",", "line", ",", "tech_name", ",", "diff", ")", ")", "return", "patches" ]
https://github.com/SFTtech/openage/blob/d6a08c53c48dc1e157807471df92197f6ca9e04d/openage/convert/processor/conversion/aoc/tech_subprocessor.py#L279-L356
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_misc.py
python
TextDataObject.__init__
(self, *args, **kwargs)
__init__(self, String text=EmptyString) -> TextDataObject Constructor, may be used to initialise the text (otherwise `SetText` should be used later).
__init__(self, String text=EmptyString) -> TextDataObject
[ "__init__", "(", "self", "String", "text", "=", "EmptyString", ")", "-", ">", "TextDataObject" ]
def __init__(self, *args, **kwargs): """ __init__(self, String text=EmptyString) -> TextDataObject Constructor, may be used to initialise the text (otherwise `SetText` should be used later). """ _misc_.TextDataObject_swiginit(self,_misc_.new_TextDataObject(*args, **kwargs))
[ "def", "__init__", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "_misc_", ".", "TextDataObject_swiginit", "(", "self", ",", "_misc_", ".", "new_TextDataObject", "(", "*", "args", ",", "*", "*", "kwargs", ")", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_misc.py#L5181-L5188
ceph/ceph
959663007321a369c83218414a29bd9dbc8bda3a
src/pybind/mgr/dashboard/rest_client.py
python
RestClient._handle_response_status_code
(status_code: int)
return status_code
Method to be overridden by subclasses that need specific handling.
Method to be overridden by subclasses that need specific handling.
[ "Method", "to", "be", "overridden", "by", "subclasses", "that", "need", "specific", "handling", "." ]
def _handle_response_status_code(status_code: int) -> int: """ Method to be overridden by subclasses that need specific handling. """ return status_code
[ "def", "_handle_response_status_code", "(", "status_code", ":", "int", ")", "->", "int", ":", "return", "status_code" ]
https://github.com/ceph/ceph/blob/959663007321a369c83218414a29bd9dbc8bda3a/src/pybind/mgr/dashboard/rest_client.py#L513-L517
SoarGroup/Soar
a1c5e249499137a27da60533c72969eef3b8ab6b
scons/scons-local-4.1.0/SCons/Node/FS.py
python
Base.__init__
(self, name, directory, fs)
Initialize a generic Node.FS.Base object. Call the superclass initialization, take care of setting up our relative and absolute paths, identify our parent directory, and indicate that this node should use signatures.
Initialize a generic Node.FS.Base object.
[ "Initialize", "a", "generic", "Node", ".", "FS", ".", "Base", "object", "." ]
def __init__(self, name, directory, fs): """Initialize a generic Node.FS.Base object. Call the superclass initialization, take care of setting up our relative and absolute paths, identify our parent directory, and indicate that this node should use signatures.""" if SCons.Debug.track_instances: logInstanceCreation(self, 'Node.FS.Base') SCons.Node.Node.__init__(self) # Filenames and paths are probably reused and are intern'ed to save some memory. # Filename with extension as it was specified when the object was # created; to obtain filesystem path, use Python str() function self.name = SCons.Util.silent_intern(name) self.fs = fs #: Reference to parent Node.FS object assert directory, "A directory must be provided" self._abspath = None self._labspath = None self._path = None self._tpath = None self._path_elements = None self.dir = directory self.cwd = None # will hold the SConscript directory for target nodes self.duplicate = directory.duplicate self.changed_since_last_build = 2 self._func_sconsign = 0 self._func_exists = 2 self._func_rexists = 2 self._func_get_contents = 0 self._func_target_from_source = 1 self.store_info = 1
[ "def", "__init__", "(", "self", ",", "name", ",", "directory", ",", "fs", ")", ":", "if", "SCons", ".", "Debug", ".", "track_instances", ":", "logInstanceCreation", "(", "self", ",", "'Node.FS.Base'", ")", "SCons", ".", "Node", ".", "Node", ".", "__init__", "(", "self", ")", "# Filenames and paths are probably reused and are intern'ed to save some memory.", "# Filename with extension as it was specified when the object was", "# created; to obtain filesystem path, use Python str() function", "self", ".", "name", "=", "SCons", ".", "Util", ".", "silent_intern", "(", "name", ")", "self", ".", "fs", "=", "fs", "#: Reference to parent Node.FS object", "assert", "directory", ",", "\"A directory must be provided\"", "self", ".", "_abspath", "=", "None", "self", ".", "_labspath", "=", "None", "self", ".", "_path", "=", "None", "self", ".", "_tpath", "=", "None", "self", ".", "_path_elements", "=", "None", "self", ".", "dir", "=", "directory", "self", ".", "cwd", "=", "None", "# will hold the SConscript directory for target nodes", "self", ".", "duplicate", "=", "directory", ".", "duplicate", "self", ".", "changed_since_last_build", "=", "2", "self", ".", "_func_sconsign", "=", "0", "self", ".", "_func_exists", "=", "2", "self", ".", "_func_rexists", "=", "2", "self", ".", "_func_get_contents", "=", "0", "self", ".", "_func_target_from_source", "=", "1", "self", ".", "store_info", "=", "1" ]
https://github.com/SoarGroup/Soar/blob/a1c5e249499137a27da60533c72969eef3b8ab6b/scons/scons-local-4.1.0/SCons/Node/FS.py#L566-L600
Polidea/SiriusObfuscator
b0e590d8130e97856afe578869b83a209e2b19be
SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py
python
SBData.GetSignedInt64
(self, *args)
return _lldb.SBData_GetSignedInt64(self, *args)
GetSignedInt64(self, SBError error, offset_t offset) -> int64_t
GetSignedInt64(self, SBError error, offset_t offset) -> int64_t
[ "GetSignedInt64", "(", "self", "SBError", "error", "offset_t", "offset", ")", "-", ">", "int64_t" ]
def GetSignedInt64(self, *args): """GetSignedInt64(self, SBError error, offset_t offset) -> int64_t""" return _lldb.SBData_GetSignedInt64(self, *args)
[ "def", "GetSignedInt64", "(", "self", ",", "*", "args", ")", ":", "return", "_lldb", ".", "SBData_GetSignedInt64", "(", "self", ",", "*", "args", ")" ]
https://github.com/Polidea/SiriusObfuscator/blob/b0e590d8130e97856afe578869b83a209e2b19be/SymbolExtractorAndRenamer/lldb/scripts/Python/static-binding/lldb.py#L2736-L2738
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/distributions/special_math.py
python
log_cdf_laplace
(x, name="log_cdf_laplace")
Log Laplace distribution function. This function calculates `Log[L(x)]`, where `L(x)` is the cumulative distribution function of the Laplace distribution, i.e. ```L(x) := 0.5 * int_{-infty}^x e^{-|t|} dt``` For numerical accuracy, `L(x)` is computed in different ways depending on `x`, ``` x <= 0: Log[L(x)] = Log[0.5] + x, which is exact 0 < x: Log[L(x)] = Log[1 - 0.5 * e^{-x}], which is exact ``` Args: x: `Tensor` of type `float32`, `float64`. name: Python string. A name for the operation (default="log_ndtr"). Returns: `Tensor` with `dtype=x.dtype`. Raises: TypeError: if `x.dtype` is not handled.
Log Laplace distribution function.
[ "Log", "Laplace", "distribution", "function", "." ]
def log_cdf_laplace(x, name="log_cdf_laplace"): """Log Laplace distribution function. This function calculates `Log[L(x)]`, where `L(x)` is the cumulative distribution function of the Laplace distribution, i.e. ```L(x) := 0.5 * int_{-infty}^x e^{-|t|} dt``` For numerical accuracy, `L(x)` is computed in different ways depending on `x`, ``` x <= 0: Log[L(x)] = Log[0.5] + x, which is exact 0 < x: Log[L(x)] = Log[1 - 0.5 * e^{-x}], which is exact ``` Args: x: `Tensor` of type `float32`, `float64`. name: Python string. A name for the operation (default="log_ndtr"). Returns: `Tensor` with `dtype=x.dtype`. Raises: TypeError: if `x.dtype` is not handled. """ with ops.name_scope(name, values=[x]): x = ops.convert_to_tensor(x, name="x") # For x < 0, L(x) = 0.5 * exp{x} exactly, so Log[L(x)] = log(0.5) + x. lower_solution = -np.log(2.) + x # safe_exp_neg_x = exp{-x} for x > 0, but is # bounded above by 1, which avoids # log[1 - 1] = -inf for x = log(1/2), AND # exp{-x} --> inf, for x << -1 safe_exp_neg_x = math_ops.exp(-math_ops.abs(x)) # log1p(z) = log(1 + z) approx z for |z| << 1. This approxmation is used # internally by log1p, rather than being done explicitly here. upper_solution = math_ops.log1p(-0.5 * safe_exp_neg_x) return array_ops.where_v2(x < 0., lower_solution, upper_solution)
[ "def", "log_cdf_laplace", "(", "x", ",", "name", "=", "\"log_cdf_laplace\"", ")", ":", "with", "ops", ".", "name_scope", "(", "name", ",", "values", "=", "[", "x", "]", ")", ":", "x", "=", "ops", ".", "convert_to_tensor", "(", "x", ",", "name", "=", "\"x\"", ")", "# For x < 0, L(x) = 0.5 * exp{x} exactly, so Log[L(x)] = log(0.5) + x.", "lower_solution", "=", "-", "np", ".", "log", "(", "2.", ")", "+", "x", "# safe_exp_neg_x = exp{-x} for x > 0, but is", "# bounded above by 1, which avoids", "# log[1 - 1] = -inf for x = log(1/2), AND", "# exp{-x} --> inf, for x << -1", "safe_exp_neg_x", "=", "math_ops", ".", "exp", "(", "-", "math_ops", ".", "abs", "(", "x", ")", ")", "# log1p(z) = log(1 + z) approx z for |z| << 1. This approxmation is used", "# internally by log1p, rather than being done explicitly here.", "upper_solution", "=", "math_ops", ".", "log1p", "(", "-", "0.5", "*", "safe_exp_neg_x", ")", "return", "array_ops", ".", "where_v2", "(", "x", "<", "0.", ",", "lower_solution", ",", "upper_solution", ")" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/distributions/special_math.py#L441-L486
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/lib2to3/fixer_base.py
python
BaseFix.finish_tree
(self, tree, filename)
Some fixers need to maintain tree-wide state. This method is called once, at the conclusion of tree fix-up. tree - the root node of the tree to be processed. filename - the name of the file the tree came from.
Some fixers need to maintain tree-wide state. This method is called once, at the conclusion of tree fix-up.
[ "Some", "fixers", "need", "to", "maintain", "tree", "-", "wide", "state", ".", "This", "method", "is", "called", "once", "at", "the", "conclusion", "of", "tree", "fix", "-", "up", "." ]
def finish_tree(self, tree, filename): """Some fixers need to maintain tree-wide state. This method is called once, at the conclusion of tree fix-up. tree - the root node of the tree to be processed. filename - the name of the file the tree came from. """ pass
[ "def", "finish_tree", "(", "self", ",", "tree", ",", "filename", ")", ":", "pass" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/lib2to3/fixer_base.py#L159-L166
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/_core.py
python
Sizer._ReplaceSizer
(*args, **kwargs)
return _core_.Sizer__ReplaceSizer(*args, **kwargs)
_ReplaceSizer(self, Sizer oldsz, Sizer newsz, bool recursive=False) -> bool
_ReplaceSizer(self, Sizer oldsz, Sizer newsz, bool recursive=False) -> bool
[ "_ReplaceSizer", "(", "self", "Sizer", "oldsz", "Sizer", "newsz", "bool", "recursive", "=", "False", ")", "-", ">", "bool" ]
def _ReplaceSizer(*args, **kwargs): """_ReplaceSizer(self, Sizer oldsz, Sizer newsz, bool recursive=False) -> bool""" return _core_.Sizer__ReplaceSizer(*args, **kwargs)
[ "def", "_ReplaceSizer", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "Sizer__ReplaceSizer", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_core.py#L14574-L14576
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/keras/backend.py
python
manual_variable_initialization
(value)
Sets the manual variable initialization flag. This boolean flag determines whether variables should be initialized as they are instantiated (default), or if the user should handle the initialization (e.g. via `tf.compat.v1.initialize_all_variables()`). Arguments: value: Python boolean.
Sets the manual variable initialization flag.
[ "Sets", "the", "manual", "variable", "initialization", "flag", "." ]
def manual_variable_initialization(value): """Sets the manual variable initialization flag. This boolean flag determines whether variables should be initialized as they are instantiated (default), or if the user should handle the initialization (e.g. via `tf.compat.v1.initialize_all_variables()`). Arguments: value: Python boolean. """ global _MANUAL_VAR_INIT _MANUAL_VAR_INIT = value
[ "def", "manual_variable_initialization", "(", "value", ")", ":", "global", "_MANUAL_VAR_INIT", "_MANUAL_VAR_INIT", "=", "value" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/keras/backend.py#L248-L261
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
scripts/abins/spowdersemiempiricalcalculator.py
python
SPowderSemiEmpiricalCalculator.calculate_isotropic_dw
(self, *, angle: float)
return self._isotropic_dw(frequencies=self._bin_centres, q2=q2, a_trace=average_a_traces[:, np.newaxis], temperature=self._temperature)
Compute Debye-Waller factor in isotropic approximation for current system Returns an N_atoms x N_frequencies array.
Compute Debye-Waller factor in isotropic approximation for current system
[ "Compute", "Debye", "-", "Waller", "factor", "in", "isotropic", "approximation", "for", "current", "system" ]
def calculate_isotropic_dw(self, *, angle: float) -> np.ndarray: """Compute Debye-Waller factor in isotropic approximation for current system Returns an N_atoms x N_frequencies array. """ q2 = self._instrument.calculate_q_powder(input_data=self._bin_centres, angle=angle) average_a_traces = np.sum([self._powder_data.get_a_traces(k_index) * kpoint.weight for k_index, kpoint in enumerate(self._abins_data.get_kpoints_data())], axis=0) return self._isotropic_dw(frequencies=self._bin_centres, q2=q2, a_trace=average_a_traces[:, np.newaxis], temperature=self._temperature)
[ "def", "calculate_isotropic_dw", "(", "self", ",", "*", ",", "angle", ":", "float", ")", "->", "np", ".", "ndarray", ":", "q2", "=", "self", ".", "_instrument", ".", "calculate_q_powder", "(", "input_data", "=", "self", ".", "_bin_centres", ",", "angle", "=", "angle", ")", "average_a_traces", "=", "np", ".", "sum", "(", "[", "self", ".", "_powder_data", ".", "get_a_traces", "(", "k_index", ")", "*", "kpoint", ".", "weight", "for", "k_index", ",", "kpoint", "in", "enumerate", "(", "self", ".", "_abins_data", ".", "get_kpoints_data", "(", ")", ")", "]", ",", "axis", "=", "0", ")", "return", "self", ".", "_isotropic_dw", "(", "frequencies", "=", "self", ".", "_bin_centres", ",", "q2", "=", "q2", ",", "a_trace", "=", "average_a_traces", "[", ":", ",", "np", ".", "newaxis", "]", ",", "temperature", "=", "self", ".", "_temperature", ")" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/scripts/abins/spowdersemiempiricalcalculator.py#L370-L382
plumonito/dtslam
5994bb9cf7a11981b830370db206bceb654c085d
3rdparty/opencv-git/3rdparty/jinja2/compiler.py
python
CodeGenerator.push_scope
(self, frame, extra_vars=())
return aliases
This function returns all the shadowed variables in a dict in the form name: alias and will write the required assignments into the current scope. No indentation takes place. This also predefines locally declared variables from the loop body because under some circumstances it may be the case that `extra_vars` is passed to `Frame.find_shadowed`.
This function returns all the shadowed variables in a dict in the form name: alias and will write the required assignments into the current scope. No indentation takes place.
[ "This", "function", "returns", "all", "the", "shadowed", "variables", "in", "a", "dict", "in", "the", "form", "name", ":", "alias", "and", "will", "write", "the", "required", "assignments", "into", "the", "current", "scope", ".", "No", "indentation", "takes", "place", "." ]
def push_scope(self, frame, extra_vars=()): """This function returns all the shadowed variables in a dict in the form name: alias and will write the required assignments into the current scope. No indentation takes place. This also predefines locally declared variables from the loop body because under some circumstances it may be the case that `extra_vars` is passed to `Frame.find_shadowed`. """ aliases = {} for name in frame.find_shadowed(extra_vars): aliases[name] = ident = self.temporary_identifier() self.writeline('%s = l_%s' % (ident, name)) to_declare = set() for name in frame.identifiers.declared_locally: if name not in aliases: to_declare.add('l_' + name) if to_declare: self.writeline(' = '.join(to_declare) + ' = missing') return aliases
[ "def", "push_scope", "(", "self", ",", "frame", ",", "extra_vars", "=", "(", ")", ")", ":", "aliases", "=", "{", "}", "for", "name", "in", "frame", ".", "find_shadowed", "(", "extra_vars", ")", ":", "aliases", "[", "name", "]", "=", "ident", "=", "self", ".", "temporary_identifier", "(", ")", "self", ".", "writeline", "(", "'%s = l_%s'", "%", "(", "ident", ",", "name", ")", ")", "to_declare", "=", "set", "(", ")", "for", "name", "in", "frame", ".", "identifiers", ".", "declared_locally", ":", "if", "name", "not", "in", "aliases", ":", "to_declare", ".", "add", "(", "'l_'", "+", "name", ")", "if", "to_declare", ":", "self", ".", "writeline", "(", "' = '", ".", "join", "(", "to_declare", ")", "+", "' = missing'", ")", "return", "aliases" ]
https://github.com/plumonito/dtslam/blob/5994bb9cf7a11981b830370db206bceb654c085d/3rdparty/opencv-git/3rdparty/jinja2/compiler.py#L602-L622
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_gdi.py
python
BufferedDC.__init__
(self, *args)
__init__(self, DC dc, Bitmap buffer=NullBitmap, int style=BUFFER_CLIENT_AREA) -> BufferedDC __init__(self, DC dc, Size area, int style=BUFFER_CLIENT_AREA) -> BufferedDC Constructs a buffered DC.
__init__(self, DC dc, Bitmap buffer=NullBitmap, int style=BUFFER_CLIENT_AREA) -> BufferedDC __init__(self, DC dc, Size area, int style=BUFFER_CLIENT_AREA) -> BufferedDC
[ "__init__", "(", "self", "DC", "dc", "Bitmap", "buffer", "=", "NullBitmap", "int", "style", "=", "BUFFER_CLIENT_AREA", ")", "-", ">", "BufferedDC", "__init__", "(", "self", "DC", "dc", "Size", "area", "int", "style", "=", "BUFFER_CLIENT_AREA", ")", "-", ">", "BufferedDC" ]
def __init__(self, *args): """ __init__(self, DC dc, Bitmap buffer=NullBitmap, int style=BUFFER_CLIENT_AREA) -> BufferedDC __init__(self, DC dc, Size area, int style=BUFFER_CLIENT_AREA) -> BufferedDC Constructs a buffered DC. """ _gdi_.BufferedDC_swiginit(self,_gdi_.new_BufferedDC(*args)) # save a ref so the other dc will not be deleted before self self.__dc = args[0] # also save a ref to the bitmap if len(args) > 1: self.__bmp = args[1]
[ "def", "__init__", "(", "self", ",", "*", "args", ")", ":", "_gdi_", ".", "BufferedDC_swiginit", "(", "self", ",", "_gdi_", ".", "new_BufferedDC", "(", "*", "args", ")", ")", "# save a ref so the other dc will not be deleted before self", "self", ".", "__dc", "=", "args", "[", "0", "]", "# also save a ref to the bitmap", "if", "len", "(", "args", ")", ">", "1", ":", "self", ".", "__bmp", "=", "args", "[", "1", "]" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_gdi.py#L5315-L5326
cocos-creator/engine-native
984c4c9f5838253313b44ccd429bd8fac4ec8a6a
tools/bindings-generator/clang/cindex.py
python
File.time
(self)
return conf.lib.clang_getFileTime(self)
Return the last modification time of the file.
Return the last modification time of the file.
[ "Return", "the", "last", "modification", "time", "of", "the", "file", "." ]
def time(self): """Return the last modification time of the file.""" return conf.lib.clang_getFileTime(self)
[ "def", "time", "(", "self", ")", ":", "return", "conf", ".", "lib", ".", "clang_getFileTime", "(", "self", ")" ]
https://github.com/cocos-creator/engine-native/blob/984c4c9f5838253313b44ccd429bd8fac4ec8a6a/tools/bindings-generator/clang/cindex.py#L3106-L3108
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/_controls.py
python
TreeCtrl.GetSelections
(*args, **kwargs)
return _controls_.TreeCtrl_GetSelections(*args, **kwargs)
GetSelections(self) -> PyObject
GetSelections(self) -> PyObject
[ "GetSelections", "(", "self", ")", "-", ">", "PyObject" ]
def GetSelections(*args, **kwargs): """GetSelections(self) -> PyObject""" return _controls_.TreeCtrl_GetSelections(*args, **kwargs)
[ "def", "GetSelections", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_controls_", ".", "TreeCtrl_GetSelections", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_controls.py#L5367-L5369
pmq20/node-packer
12c46c6e44fbc14d9ee645ebd17d5296b324f7e0
lts/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py
python
MsvsSettings._GetAdditionalManifestFiles
(self, config, gyp_to_build_path)
return [os.path.normpath( gyp_to_build_path(self.ConvertVSMacros(f, config=config))) for f in files]
Gets additional manifest files that are added to the default one generated by the linker.
Gets additional manifest files that are added to the default one generated by the linker.
[ "Gets", "additional", "manifest", "files", "that", "are", "added", "to", "the", "default", "one", "generated", "by", "the", "linker", "." ]
def _GetAdditionalManifestFiles(self, config, gyp_to_build_path): """Gets additional manifest files that are added to the default one generated by the linker.""" files = self._Setting(('VCManifestTool', 'AdditionalManifestFiles'), config, default=[]) if isinstance(files, str): files = files.split(';') return [os.path.normpath( gyp_to_build_path(self.ConvertVSMacros(f, config=config))) for f in files]
[ "def", "_GetAdditionalManifestFiles", "(", "self", ",", "config", ",", "gyp_to_build_path", ")", ":", "files", "=", "self", ".", "_Setting", "(", "(", "'VCManifestTool'", ",", "'AdditionalManifestFiles'", ")", ",", "config", ",", "default", "=", "[", "]", ")", "if", "isinstance", "(", "files", ",", "str", ")", ":", "files", "=", "files", ".", "split", "(", "';'", ")", "return", "[", "os", ".", "path", ".", "normpath", "(", "gyp_to_build_path", "(", "self", ".", "ConvertVSMacros", "(", "f", ",", "config", "=", "config", ")", ")", ")", "for", "f", "in", "files", "]" ]
https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/lts/deps/npm/node_modules/node-gyp/gyp/pylib/gyp/msvs_emulation.py#L757-L766
google/syzygy
8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5
third_party/numpy/files/numpy/polynomial/chebyshev.py
python
chebmul
(c1, c2)
return pu.trimseq(ret)
Multiply one Chebyshev series by another. Returns the product of two Chebyshev series `c1` * `c2`. The arguments are sequences of coefficients, from lowest order "term" to highest, e.g., [1,2,3] represents the series ``T_0 + 2*T_1 + 3*T_2``. Parameters ---------- c1, c2 : array_like 1-d arrays of Chebyshev series coefficients ordered from low to high. Returns ------- out : ndarray Of Chebyshev series coefficients representing their product. See Also -------- chebadd, chebsub, chebdiv, chebpow Notes ----- In general, the (polynomial) product of two C-series results in terms that are not in the Chebyshev polynomial basis set. Thus, to express the product as a C-series, it is typically necessary to "re-project" the product onto said basis set, which typically produces "un-intuitive" (but correct) results; see Examples section below. Examples -------- >>> from numpy.polynomial import chebyshev as C >>> c1 = (1,2,3) >>> c2 = (3,2,1) >>> C.chebmul(c1,c2) # multiplication requires "reprojection" array([ 6.5, 12. , 12. , 4. , 1.5])
Multiply one Chebyshev series by another.
[ "Multiply", "one", "Chebyshev", "series", "by", "another", "." ]
def chebmul(c1, c2): """ Multiply one Chebyshev series by another. Returns the product of two Chebyshev series `c1` * `c2`. The arguments are sequences of coefficients, from lowest order "term" to highest, e.g., [1,2,3] represents the series ``T_0 + 2*T_1 + 3*T_2``. Parameters ---------- c1, c2 : array_like 1-d arrays of Chebyshev series coefficients ordered from low to high. Returns ------- out : ndarray Of Chebyshev series coefficients representing their product. See Also -------- chebadd, chebsub, chebdiv, chebpow Notes ----- In general, the (polynomial) product of two C-series results in terms that are not in the Chebyshev polynomial basis set. Thus, to express the product as a C-series, it is typically necessary to "re-project" the product onto said basis set, which typically produces "un-intuitive" (but correct) results; see Examples section below. Examples -------- >>> from numpy.polynomial import chebyshev as C >>> c1 = (1,2,3) >>> c2 = (3,2,1) >>> C.chebmul(c1,c2) # multiplication requires "reprojection" array([ 6.5, 12. , 12. , 4. , 1.5]) """ # c1, c2 are trimmed copies [c1, c2] = pu.as_series([c1, c2]) z1 = _cseries_to_zseries(c1) z2 = _cseries_to_zseries(c2) prd = _zseries_mul(z1, z2) ret = _zseries_to_cseries(prd) return pu.trimseq(ret)
[ "def", "chebmul", "(", "c1", ",", "c2", ")", ":", "# c1, c2 are trimmed copies", "[", "c1", ",", "c2", "]", "=", "pu", ".", "as_series", "(", "[", "c1", ",", "c2", "]", ")", "z1", "=", "_cseries_to_zseries", "(", "c1", ")", "z2", "=", "_cseries_to_zseries", "(", "c2", ")", "prd", "=", "_zseries_mul", "(", "z1", ",", "z2", ")", "ret", "=", "_zseries_to_cseries", "(", "prd", ")", "return", "pu", ".", "trimseq", "(", "ret", ")" ]
https://github.com/google/syzygy/blob/8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5/third_party/numpy/files/numpy/polynomial/chebyshev.py#L676-L722
CRYTEK/CRYENGINE
232227c59a220cbbd311576f0fbeba7bb53b2a8c
Editor/Python/windows/Lib/site-packages/setuptools/wheel.py
python
Wheel.install_as_egg
(self, destination_eggdir)
Install wheel as an egg directory.
Install wheel as an egg directory.
[ "Install", "wheel", "as", "an", "egg", "directory", "." ]
def install_as_egg(self, destination_eggdir): '''Install wheel as an egg directory.''' with zipfile.ZipFile(self.filename) as zf: self._install_as_egg(destination_eggdir, zf)
[ "def", "install_as_egg", "(", "self", ",", "destination_eggdir", ")", ":", "with", "zipfile", ".", "ZipFile", "(", "self", ".", "filename", ")", "as", "zf", ":", "self", ".", "_install_as_egg", "(", "destination_eggdir", ",", "zf", ")" ]
https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Editor/Python/windows/Lib/site-packages/setuptools/wheel.py#L98-L101
LiXizhi/NPLRuntime
a42720e5fe9a6960e0a9ce40bbbcd809192906be
Client/trunk/externals/assimp-4.0.0/port/PyAssimp/scripts/transformations.py
python
Arcball.setaxes
(self, *axes)
Set axes to constrain rotations.
Set axes to constrain rotations.
[ "Set", "axes", "to", "constrain", "rotations", "." ]
def setaxes(self, *axes): """Set axes to constrain rotations.""" if axes is None: self._axes = None else: self._axes = [unit_vector(axis) for axis in axes]
[ "def", "setaxes", "(", "self", ",", "*", "axes", ")", ":", "if", "axes", "is", "None", ":", "self", ".", "_axes", "=", "None", "else", ":", "self", ".", "_axes", "=", "[", "unit_vector", "(", "axis", ")", "for", "axis", "in", "axes", "]" ]
https://github.com/LiXizhi/NPLRuntime/blob/a42720e5fe9a6960e0a9ce40bbbcd809192906be/Client/trunk/externals/assimp-4.0.0/port/PyAssimp/scripts/transformations.py#L1420-L1425
baidu/AnyQ
d94d450d2aaa5f7ed73424b10aa4539835b97527
tools/simnet/preprocess/workflow.py
python
WorkFlow.read_pairwise
(self, filelist)
return dataset
Load pairwise data
Load pairwise data
[ "Load", "pairwise", "data" ]
def read_pairwise(self, filelist): """ Load pairwise data """ inputdata = [] for file in filelist: with open(file) as f: for line in f: tpl = line.rstrip('\n').split('\t') if 0 in map(lambda t:len(t), tpl): continue inputdata.append(tpl) dataset = [] qid = 0 prev_query = '' sorted_data = sorted(inputdata, key=lambda t:t[1]) for (label, query, title) in sorted_data: if query != prev_query: qid += 1 prev_query = query oo = op_out.OperationOut() oo.set_pairwise_data(qid=qid, label=label, query=query, \ title=title, src_sep=self.src_sep) dataset.append(oo) return dataset
[ "def", "read_pairwise", "(", "self", ",", "filelist", ")", ":", "inputdata", "=", "[", "]", "for", "file", "in", "filelist", ":", "with", "open", "(", "file", ")", "as", "f", ":", "for", "line", "in", "f", ":", "tpl", "=", "line", ".", "rstrip", "(", "'\\n'", ")", ".", "split", "(", "'\\t'", ")", "if", "0", "in", "map", "(", "lambda", "t", ":", "len", "(", "t", ")", ",", "tpl", ")", ":", "continue", "inputdata", ".", "append", "(", "tpl", ")", "dataset", "=", "[", "]", "qid", "=", "0", "prev_query", "=", "''", "sorted_data", "=", "sorted", "(", "inputdata", ",", "key", "=", "lambda", "t", ":", "t", "[", "1", "]", ")", "for", "(", "label", ",", "query", ",", "title", ")", "in", "sorted_data", ":", "if", "query", "!=", "prev_query", ":", "qid", "+=", "1", "prev_query", "=", "query", "oo", "=", "op_out", ".", "OperationOut", "(", ")", "oo", ".", "set_pairwise_data", "(", "qid", "=", "qid", ",", "label", "=", "label", ",", "query", "=", "query", ",", "title", "=", "title", ",", "src_sep", "=", "self", ".", "src_sep", ")", "dataset", ".", "append", "(", "oo", ")", "return", "dataset" ]
https://github.com/baidu/AnyQ/blob/d94d450d2aaa5f7ed73424b10aa4539835b97527/tools/simnet/preprocess/workflow.py#L80-L104
bayandin/chromedriver
d40a2092b50f2fca817221eeb5ea093e0e642c10
log_replay/client_replay.py
python
_CountChar
(line, opening_char, closing_char)
return total
Count (number of opening_char) - (number of closing_char) in |line|. Used to check for the end of JSON parameters. Ignores characters inside of non-escaped quotes. Args: line: line to count characters in opening_char: "+1" character, { or [ closing_char: "-1" character, ] or } Returns: (number of opening_char) - (number of closing_char)
Count (number of opening_char) - (number of closing_char) in |line|.
[ "Count", "(", "number", "of", "opening_char", ")", "-", "(", "number", "of", "closing_char", ")", "in", "|line|", "." ]
def _CountChar(line, opening_char, closing_char): """Count (number of opening_char) - (number of closing_char) in |line|. Used to check for the end of JSON parameters. Ignores characters inside of non-escaped quotes. Args: line: line to count characters in opening_char: "+1" character, { or [ closing_char: "-1" character, ] or } Returns: (number of opening_char) - (number of closing_char) """ in_quote = False total = 0 for i, c in enumerate(line): if not in_quote and c is opening_char: total += 1 if not in_quote and c is closing_char: total -= 1 if c == '"' and (i == 0 or line[i-1] != "\\"): in_quote = not in_quote return total
[ "def", "_CountChar", "(", "line", ",", "opening_char", ",", "closing_char", ")", ":", "in_quote", "=", "False", "total", "=", "0", "for", "i", ",", "c", "in", "enumerate", "(", "line", ")", ":", "if", "not", "in_quote", "and", "c", "is", "opening_char", ":", "total", "+=", "1", "if", "not", "in_quote", "and", "c", "is", "closing_char", ":", "total", "-=", "1", "if", "c", "==", "'\"'", "and", "(", "i", "==", "0", "or", "line", "[", "i", "-", "1", "]", "!=", "\"\\\\\"", ")", ":", "in_quote", "=", "not", "in_quote", "return", "total" ]
https://github.com/bayandin/chromedriver/blob/d40a2092b50f2fca817221eeb5ea093e0e642c10/log_replay/client_replay.py#L231-L254
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/webapp2/webapp2.py
python
ResponseHeaders.__str__
(self)
return '\r\n'.join(['%s: %s' % v for v in self.items()] + ['', ''])
Returns the formatted headers ready for HTTP transmission.
Returns the formatted headers ready for HTTP transmission.
[ "Returns", "the", "formatted", "headers", "ready", "for", "HTTP", "transmission", "." ]
def __str__(self): """Returns the formatted headers ready for HTTP transmission.""" return '\r\n'.join(['%s: %s' % v for v in self.items()] + ['', ''])
[ "def", "__str__", "(", "self", ")", ":", "return", "'\\r\\n'", ".", "join", "(", "[", "'%s: %s'", "%", "v", "for", "v", "in", "self", ".", "items", "(", ")", "]", "+", "[", "''", ",", "''", "]", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/webapp2/webapp2.py#L333-L335
scummvm/scummvm
9c039d027e7ffb9d83ae2e274147e2daf8d57ce2
devtools/agi-palex.py
python
printColor
(color, tabulate = True, printLastComma = True, newLine = True)
Prints color with optional start tabulation, comma in the end and a newline
Prints color with optional start tabulation, comma in the end and a newline
[ "Prints", "color", "with", "optional", "start", "tabulation", "comma", "in", "the", "end", "and", "a", "newline" ]
def printColor(color, tabulate = True, printLastComma = True, newLine = True): """Prints color with optional start tabulation, comma in the end and a newline""" result = "" if tabulate: result += "\t" for component in color[:-1]: result += ((componentPrintFormat + ", ") % component) result += (componentPrintFormat % color[-1]) if printLastComma: result += "," if newLine: print result else: print result,
[ "def", "printColor", "(", "color", ",", "tabulate", "=", "True", ",", "printLastComma", "=", "True", ",", "newLine", "=", "True", ")", ":", "result", "=", "\"\"", "if", "tabulate", ":", "result", "+=", "\"\\t\"", "for", "component", "in", "color", "[", ":", "-", "1", "]", ":", "result", "+=", "(", "(", "componentPrintFormat", "+", "\", \"", ")", "%", "component", ")", "result", "+=", "(", "componentPrintFormat", "%", "color", "[", "-", "1", "]", ")", "if", "printLastComma", ":", "result", "+=", "\",\"", "if", "newLine", ":", "print", "result", "else", ":", "print", "result", "," ]
https://github.com/scummvm/scummvm/blob/9c039d027e7ffb9d83ae2e274147e2daf8d57ce2/devtools/agi-palex.py#L49-L62
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/retrying.py
python
Retrying.fixed_sleep
(self, previous_attempt_number, delay_since_first_attempt_ms)
return self._wait_fixed
Sleep a fixed amount of time between each retry.
Sleep a fixed amount of time between each retry.
[ "Sleep", "a", "fixed", "amount", "of", "time", "between", "each", "retry", "." ]
def fixed_sleep(self, previous_attempt_number, delay_since_first_attempt_ms): """Sleep a fixed amount of time between each retry.""" return self._wait_fixed
[ "def", "fixed_sleep", "(", "self", ",", "previous_attempt_number", ",", "delay_since_first_attempt_ms", ")", ":", "return", "self", ".", "_wait_fixed" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/pip/_vendor/retrying.py#L305-L309
francinexue/xuefu
b6ff79747a42e020588c0c0a921048e08fe4680c
cnx/tickds.py
python
TickDataSeries.getExtraDataSeries
(self, name)
return self.__getOrCreateExtraDS(name)
Returns a :class:`pyalgotrade.dataseries.DataSeries` for an extra column.
Returns a :class:`pyalgotrade.dataseries.DataSeries` for an extra column.
[ "Returns", "a", ":", "class", ":", "pyalgotrade", ".", "dataseries", ".", "DataSeries", "for", "an", "extra", "column", "." ]
def getExtraDataSeries(self, name): """Returns a :class:`pyalgotrade.dataseries.DataSeries` for an extra column.""" return self.__getOrCreateExtraDS(name)
[ "def", "getExtraDataSeries", "(", "self", ",", "name", ")", ":", "return", "self", ".", "__getOrCreateExtraDS", "(", "name", ")" ]
https://github.com/francinexue/xuefu/blob/b6ff79747a42e020588c0c0a921048e08fe4680c/cnx/tickds.py#L164-L166
tensorflow/tensorflow
419e3a6b650ea4bd1b0cba23c4348f8a69f3272e
tensorflow/python/data/ops/structured_function.py
python
_should_pack
(arg)
return isinstance(arg, list)
Determines whether the caller needs to pack the argument in a tuple. If user-defined function returns a list of tensors, `nest.flatten()` and `ops.convert_to_tensor()` and would conspire to attempt to stack those tensors into a single tensor because the tf.data version of `nest.flatten()` does not recurse into lists. Since it is more likely that the list arose from returning the result of an operation (such as `tf.numpy_function()`) that returns a list of not-necessarily-stackable tensors, we treat the returned value as a `tuple` instead. A user wishing to pack the return value into a single tensor can use an explicit `tf.stack()` before returning. Args: arg: argument to check Returns: Indication of whether the caller needs to pack the argument in a tuple.
Determines whether the caller needs to pack the argument in a tuple.
[ "Determines", "whether", "the", "caller", "needs", "to", "pack", "the", "argument", "in", "a", "tuple", "." ]
def _should_pack(arg): """Determines whether the caller needs to pack the argument in a tuple. If user-defined function returns a list of tensors, `nest.flatten()` and `ops.convert_to_tensor()` and would conspire to attempt to stack those tensors into a single tensor because the tf.data version of `nest.flatten()` does not recurse into lists. Since it is more likely that the list arose from returning the result of an operation (such as `tf.numpy_function()`) that returns a list of not-necessarily-stackable tensors, we treat the returned value as a `tuple` instead. A user wishing to pack the return value into a single tensor can use an explicit `tf.stack()` before returning. Args: arg: argument to check Returns: Indication of whether the caller needs to pack the argument in a tuple. """ return isinstance(arg, list)
[ "def", "_should_pack", "(", "arg", ")", ":", "return", "isinstance", "(", "arg", ",", "list", ")" ]
https://github.com/tensorflow/tensorflow/blob/419e3a6b650ea4bd1b0cba23c4348f8a69f3272e/tensorflow/python/data/ops/structured_function.py#L50-L68
llvm/llvm-project
ffa6262cb4e2a335d26416fad39a581b4f98c5f4
clang/tools/scan-build-py/lib/libscanbuild/report.py
python
document
(args)
return result
Generates cover report and returns the number of bugs/crashes.
Generates cover report and returns the number of bugs/crashes.
[ "Generates", "cover", "report", "and", "returns", "the", "number", "of", "bugs", "/", "crashes", "." ]
def document(args): """ Generates cover report and returns the number of bugs/crashes. """ html_reports_available = args.output_format in {'html', 'plist-html', 'sarif-html'} sarif_reports_available = args.output_format in {'sarif', 'sarif-html'} logging.debug('count crashes and bugs') crash_count = sum(1 for _ in read_crashes(args.output)) bug_counter = create_counters() for bug in read_bugs(args.output, html_reports_available): bug_counter(bug) result = crash_count + bug_counter.total if html_reports_available and result: use_cdb = os.path.exists(args.cdb) logging.debug('generate index.html file') # common prefix for source files to have sorter path prefix = commonprefix_from(args.cdb) if use_cdb else os.getcwd() # assemble the cover from multiple fragments fragments = [] try: if bug_counter.total: fragments.append(bug_summary(args.output, bug_counter)) fragments.append(bug_report(args.output, prefix)) if crash_count: fragments.append(crash_report(args.output, prefix)) assemble_cover(args, prefix, fragments) # copy additional files to the report copy_resource_files(args.output) if use_cdb: shutil.copy(args.cdb, args.output) finally: for fragment in fragments: os.remove(fragment) if sarif_reports_available: logging.debug('merging sarif files') merge_sarif_files(args.output) return result
[ "def", "document", "(", "args", ")", ":", "html_reports_available", "=", "args", ".", "output_format", "in", "{", "'html'", ",", "'plist-html'", ",", "'sarif-html'", "}", "sarif_reports_available", "=", "args", ".", "output_format", "in", "{", "'sarif'", ",", "'sarif-html'", "}", "logging", ".", "debug", "(", "'count crashes and bugs'", ")", "crash_count", "=", "sum", "(", "1", "for", "_", "in", "read_crashes", "(", "args", ".", "output", ")", ")", "bug_counter", "=", "create_counters", "(", ")", "for", "bug", "in", "read_bugs", "(", "args", ".", "output", ",", "html_reports_available", ")", ":", "bug_counter", "(", "bug", ")", "result", "=", "crash_count", "+", "bug_counter", ".", "total", "if", "html_reports_available", "and", "result", ":", "use_cdb", "=", "os", ".", "path", ".", "exists", "(", "args", ".", "cdb", ")", "logging", ".", "debug", "(", "'generate index.html file'", ")", "# common prefix for source files to have sorter path", "prefix", "=", "commonprefix_from", "(", "args", ".", "cdb", ")", "if", "use_cdb", "else", "os", ".", "getcwd", "(", ")", "# assemble the cover from multiple fragments", "fragments", "=", "[", "]", "try", ":", "if", "bug_counter", ".", "total", ":", "fragments", ".", "append", "(", "bug_summary", "(", "args", ".", "output", ",", "bug_counter", ")", ")", "fragments", ".", "append", "(", "bug_report", "(", "args", ".", "output", ",", "prefix", ")", ")", "if", "crash_count", ":", "fragments", ".", "append", "(", "crash_report", "(", "args", ".", "output", ",", "prefix", ")", ")", "assemble_cover", "(", "args", ",", "prefix", ",", "fragments", ")", "# copy additional files to the report", "copy_resource_files", "(", "args", ".", "output", ")", "if", "use_cdb", ":", "shutil", ".", "copy", "(", "args", ".", "cdb", ",", "args", ".", "output", ")", "finally", ":", "for", "fragment", "in", "fragments", ":", "os", ".", "remove", "(", "fragment", ")", "if", "sarif_reports_available", ":", "logging", ".", "debug", "(", "'merging sarif files'", ")", "merge_sarif_files", "(", "args", ".", "output", ")", "return", "result" ]
https://github.com/llvm/llvm-project/blob/ffa6262cb4e2a335d26416fad39a581b4f98c5f4/clang/tools/scan-build-py/lib/libscanbuild/report.py#L26-L66
OSGeo/gdal
3748fc4ba4fba727492774b2b908a2130c864a83
swig/python/osgeo/osr.py
python
GetCRSInfoListFromDatabase
(*args)
return _osr.GetCRSInfoListFromDatabase(*args)
r"""GetCRSInfoListFromDatabase(char const * authName)
r"""GetCRSInfoListFromDatabase(char const * authName)
[ "r", "GetCRSInfoListFromDatabase", "(", "char", "const", "*", "authName", ")" ]
def GetCRSInfoListFromDatabase(*args): r"""GetCRSInfoListFromDatabase(char const * authName)""" return _osr.GetCRSInfoListFromDatabase(*args)
[ "def", "GetCRSInfoListFromDatabase", "(", "*", "args", ")", ":", "return", "_osr", ".", "GetCRSInfoListFromDatabase", "(", "*", "args", ")" ]
https://github.com/OSGeo/gdal/blob/3748fc4ba4fba727492774b2b908a2130c864a83/swig/python/osgeo/osr.py#L1048-L1050
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/x86/toolchain/lib/python2.7/plat-mac/lib-scriptpackages/CodeWarrior/Metrowerks_Shell_Suite.py
python
Metrowerks_Shell_Suite_Events.Precompile
(self, _object, _attributes={}, **_arguments)
Precompile: Precompile the specified file to the specified destination file Required argument: File to precompile Keyword argument Saving_As: Destination file for precompiled header Keyword argument ExternalEditor: Should the contents of the message window be returned to the caller? Keyword argument _attributes: AppleEvent attribute dictionary Returns: Errors for the precompiled file
Precompile: Precompile the specified file to the specified destination file Required argument: File to precompile Keyword argument Saving_As: Destination file for precompiled header Keyword argument ExternalEditor: Should the contents of the message window be returned to the caller? Keyword argument _attributes: AppleEvent attribute dictionary Returns: Errors for the precompiled file
[ "Precompile", ":", "Precompile", "the", "specified", "file", "to", "the", "specified", "destination", "file", "Required", "argument", ":", "File", "to", "precompile", "Keyword", "argument", "Saving_As", ":", "Destination", "file", "for", "precompiled", "header", "Keyword", "argument", "ExternalEditor", ":", "Should", "the", "contents", "of", "the", "message", "window", "be", "returned", "to", "the", "caller?", "Keyword", "argument", "_attributes", ":", "AppleEvent", "attribute", "dictionary", "Returns", ":", "Errors", "for", "the", "precompiled", "file" ]
def Precompile(self, _object, _attributes={}, **_arguments): """Precompile: Precompile the specified file to the specified destination file Required argument: File to precompile Keyword argument Saving_As: Destination file for precompiled header Keyword argument ExternalEditor: Should the contents of the message window be returned to the caller? Keyword argument _attributes: AppleEvent attribute dictionary Returns: Errors for the precompiled file """ _code = 'MMPR' _subcode = 'PreC' aetools.keysubst(_arguments, self._argmap_Precompile) _arguments['----'] = _object _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----']
[ "def", "Precompile", "(", "self", ",", "_object", ",", "_attributes", "=", "{", "}", ",", "*", "*", "_arguments", ")", ":", "_code", "=", "'MMPR'", "_subcode", "=", "'PreC'", "aetools", ".", "keysubst", "(", "_arguments", ",", "self", ".", "_argmap_Precompile", ")", "_arguments", "[", "'----'", "]", "=", "_object", "_reply", ",", "_arguments", ",", "_attributes", "=", "self", ".", "send", "(", "_code", ",", "_subcode", ",", "_arguments", ",", "_attributes", ")", "if", "_arguments", ".", "get", "(", "'errn'", ",", "0", ")", ":", "raise", "aetools", ".", "Error", ",", "aetools", ".", "decodeerror", "(", "_arguments", ")", "# XXXX Optionally decode result", "if", "_arguments", ".", "has_key", "(", "'----'", ")", ":", "return", "_arguments", "[", "'----'", "]" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/x86/toolchain/lib/python2.7/plat-mac/lib-scriptpackages/CodeWarrior/Metrowerks_Shell_Suite.py#L449-L470