nwo
stringlengths
5
86
sha
stringlengths
40
40
path
stringlengths
4
189
language
stringclasses
1 value
identifier
stringlengths
1
94
parameters
stringlengths
2
4.03k
argument_list
stringclasses
1 value
return_statement
stringlengths
0
11.5k
docstring
stringlengths
1
33.2k
docstring_summary
stringlengths
0
5.15k
docstring_tokens
sequence
function
stringlengths
34
151k
function_tokens
sequence
url
stringlengths
90
278
stan-dev/math
5fd79f89933269a4ca4d8dd1fde2a36d53d4768c
lib/cpplint_1.4.5/cpplint.py
python
_AddFilters
(filters)
Adds more filter overrides. Unlike _SetFilters, this function does not reset the current list of filters available. Args: filters: A string of comma-separated filters (eg "whitespace/indent"). Each filter should start with + or -; else we die.
Adds more filter overrides.
[ "Adds", "more", "filter", "overrides", "." ]
def _AddFilters(filters): """Adds more filter overrides. Unlike _SetFilters, this function does not reset the current list of filters available. Args: filters: A string of comma-separated filters (eg "whitespace/indent"). Each filter should start with + or -; else we die. """ _cpplint_state.AddFilters(filters)
[ "def", "_AddFilters", "(", "filters", ")", ":", "_cpplint_state", ".", "AddFilters", "(", "filters", ")" ]
https://github.com/stan-dev/math/blob/5fd79f89933269a4ca4d8dd1fde2a36d53d4768c/lib/cpplint_1.4.5/cpplint.py#L1221-L1231
cms-sw/cmssw
fd9de012d503d3405420bcbeec0ec879baa57cf2
Alignment/OfflineValidation/python/TkAlAllInOneTool/helperFunctions.py
python
replaceByMap
(target, the_map)
return result
This function replaces `.oO[key]Oo.` by `the_map[key]` in target. Arguments: - `target`: String which contains symbolic tags of the form `.oO[key]Oo.` - `the_map`: Dictionary which has to contain the `key`s in `target` as keys
This function replaces `.oO[key]Oo.` by `the_map[key]` in target.
[ "This", "function", "replaces", ".", "oO", "[", "key", "]", "Oo", ".", "by", "the_map", "[", "key", "]", "in", "target", "." ]
def replaceByMap(target, the_map): """This function replaces `.oO[key]Oo.` by `the_map[key]` in target. Arguments: - `target`: String which contains symbolic tags of the form `.oO[key]Oo.` - `the_map`: Dictionary which has to contain the `key`s in `target` as keys """ result = target for key in the_map: lifeSaver = 10e3 iteration = 0 while ".oO[" in result and "]Oo." in result: for key in the_map: try: result = result.replace(".oO["+key+"]Oo.",the_map[key]) except TypeError: #try a dict try: for keykey, value in the_map[key].items(): result = result.replace(".oO[" + key + "['" + keykey + "']]Oo.", value) result = result.replace(".oO[" + key + '["' + keykey + '"]]Oo.', value) except AttributeError: #try a list try: for index, value in enumerate(the_map[key]): result = result.replace(".oO[" + key + "[" + str(index) + "]]Oo.", value) except TypeError: raise TypeError("Something is wrong in replaceByMap! Need a string, dict, or list, but the_map(%s)=%s!"%(repr(key), repr(the_map[key]))) iteration += 1 if iteration > lifeSaver: problematicLines = "" for line in result.splitlines(): if ".oO[" in result and "]Oo." in line: problematicLines += "%s\n"%line msg = ("Oh Dear, there seems to be an endless loop in " "replaceByMap!!\n%s\n%s"%(problematicLines, the_map)) raise AllInOneError(msg) return result
[ "def", "replaceByMap", "(", "target", ",", "the_map", ")", ":", "result", "=", "target", "for", "key", "in", "the_map", ":", "lifeSaver", "=", "10e3", "iteration", "=", "0", "while", "\".oO[\"", "in", "result", "and", "\"]Oo.\"", "in", "result", ":", "for", "key", "in", "the_map", ":", "try", ":", "result", "=", "result", ".", "replace", "(", "\".oO[\"", "+", "key", "+", "\"]Oo.\"", ",", "the_map", "[", "key", "]", ")", "except", "TypeError", ":", "#try a dict", "try", ":", "for", "keykey", ",", "value", "in", "the_map", "[", "key", "]", ".", "items", "(", ")", ":", "result", "=", "result", ".", "replace", "(", "\".oO[\"", "+", "key", "+", "\"['\"", "+", "keykey", "+", "\"']]Oo.\"", ",", "value", ")", "result", "=", "result", ".", "replace", "(", "\".oO[\"", "+", "key", "+", "'[\"'", "+", "keykey", "+", "'\"]]Oo.'", ",", "value", ")", "except", "AttributeError", ":", "#try a list", "try", ":", "for", "index", ",", "value", "in", "enumerate", "(", "the_map", "[", "key", "]", ")", ":", "result", "=", "result", ".", "replace", "(", "\".oO[\"", "+", "key", "+", "\"[\"", "+", "str", "(", "index", ")", "+", "\"]]Oo.\"", ",", "value", ")", "except", "TypeError", ":", "raise", "TypeError", "(", "\"Something is wrong in replaceByMap! Need a string, dict, or list, but the_map(%s)=%s!\"", "%", "(", "repr", "(", "key", ")", ",", "repr", "(", "the_map", "[", "key", "]", ")", ")", ")", "iteration", "+=", "1", "if", "iteration", ">", "lifeSaver", ":", "problematicLines", "=", "\"\"", "for", "line", "in", "result", ".", "splitlines", "(", ")", ":", "if", "\".oO[\"", "in", "result", "and", "\"]Oo.\"", "in", "line", ":", "problematicLines", "+=", "\"%s\\n\"", "%", "line", "msg", "=", "(", "\"Oh Dear, there seems to be an endless loop in \"", "\"replaceByMap!!\\n%s\\n%s\"", "%", "(", "problematicLines", ",", "the_map", ")", ")", "raise", "AllInOneError", "(", "msg", ")", "return", "result" ]
https://github.com/cms-sw/cmssw/blob/fd9de012d503d3405420bcbeec0ec879baa57cf2/Alignment/OfflineValidation/python/TkAlAllInOneTool/helperFunctions.py#L12-L48
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/gsutil/third_party/boto/boto/codedeploy/layer1.py
python
CodeDeployConnection.create_deployment_config
(self, deployment_config_name, minimum_healthy_hosts=None)
return self.make_request(action='CreateDeploymentConfig', body=json.dumps(params))
Creates a new deployment configuration. :type deployment_config_name: string :param deployment_config_name: The name of the deployment configuration to create. :type minimum_healthy_hosts: dict :param minimum_healthy_hosts: The minimum number of healthy instances that should be available at any time during the deployment. There are two parameters expected in the input: type and value. The type parameter takes either of the following values: + HOST_COUNT: The value parameter represents the minimum number of healthy instances, as an absolute value. + FLEET_PERCENT: The value parameter represents the minimum number of healthy instances, as a percentage of the total number of instances in the deployment. If you specify FLEET_PERCENT, then at the start of the deployment AWS CodeDeploy converts the percentage to the equivalent number of instances and rounds fractional instances up. The value parameter takes an integer. For example, to set a minimum of 95% healthy instances, specify a type of FLEET_PERCENT and a value of 95.
Creates a new deployment configuration.
[ "Creates", "a", "new", "deployment", "configuration", "." ]
def create_deployment_config(self, deployment_config_name, minimum_healthy_hosts=None): """ Creates a new deployment configuration. :type deployment_config_name: string :param deployment_config_name: The name of the deployment configuration to create. :type minimum_healthy_hosts: dict :param minimum_healthy_hosts: The minimum number of healthy instances that should be available at any time during the deployment. There are two parameters expected in the input: type and value. The type parameter takes either of the following values: + HOST_COUNT: The value parameter represents the minimum number of healthy instances, as an absolute value. + FLEET_PERCENT: The value parameter represents the minimum number of healthy instances, as a percentage of the total number of instances in the deployment. If you specify FLEET_PERCENT, then at the start of the deployment AWS CodeDeploy converts the percentage to the equivalent number of instances and rounds fractional instances up. The value parameter takes an integer. For example, to set a minimum of 95% healthy instances, specify a type of FLEET_PERCENT and a value of 95. """ params = {'deploymentConfigName': deployment_config_name, } if minimum_healthy_hosts is not None: params['minimumHealthyHosts'] = minimum_healthy_hosts return self.make_request(action='CreateDeploymentConfig', body=json.dumps(params))
[ "def", "create_deployment_config", "(", "self", ",", "deployment_config_name", ",", "minimum_healthy_hosts", "=", "None", ")", ":", "params", "=", "{", "'deploymentConfigName'", ":", "deployment_config_name", ",", "}", "if", "minimum_healthy_hosts", "is", "not", "None", ":", "params", "[", "'minimumHealthyHosts'", "]", "=", "minimum_healthy_hosts", "return", "self", ".", "make_request", "(", "action", "=", "'CreateDeploymentConfig'", ",", "body", "=", "json", ".", "dumps", "(", "params", ")", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/gsutil/third_party/boto/boto/codedeploy/layer1.py#L262-L297
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/py2/scipy/optimize/_trustregion_ncg.py
python
_minimize_trust_ncg
(fun, x0, args=(), jac=None, hess=None, hessp=None, **trust_region_options)
return _minimize_trust_region(fun, x0, args=args, jac=jac, hess=hess, hessp=hessp, subproblem=CGSteihaugSubproblem, **trust_region_options)
Minimization of scalar function of one or more variables using the Newton conjugate gradient trust-region algorithm. Options ------- initial_trust_radius : float Initial trust-region radius. max_trust_radius : float Maximum value of the trust-region radius. No steps that are longer than this value will be proposed. eta : float Trust region related acceptance stringency for proposed steps. gtol : float Gradient norm must be less than `gtol` before successful termination.
Minimization of scalar function of one or more variables using the Newton conjugate gradient trust-region algorithm.
[ "Minimization", "of", "scalar", "function", "of", "one", "or", "more", "variables", "using", "the", "Newton", "conjugate", "gradient", "trust", "-", "region", "algorithm", "." ]
def _minimize_trust_ncg(fun, x0, args=(), jac=None, hess=None, hessp=None, **trust_region_options): """ Minimization of scalar function of one or more variables using the Newton conjugate gradient trust-region algorithm. Options ------- initial_trust_radius : float Initial trust-region radius. max_trust_radius : float Maximum value of the trust-region radius. No steps that are longer than this value will be proposed. eta : float Trust region related acceptance stringency for proposed steps. gtol : float Gradient norm must be less than `gtol` before successful termination. """ if jac is None: raise ValueError('Jacobian is required for Newton-CG trust-region ' 'minimization') if hess is None and hessp is None: raise ValueError('Either the Hessian or the Hessian-vector product ' 'is required for Newton-CG trust-region minimization') return _minimize_trust_region(fun, x0, args=args, jac=jac, hess=hess, hessp=hessp, subproblem=CGSteihaugSubproblem, **trust_region_options)
[ "def", "_minimize_trust_ncg", "(", "fun", ",", "x0", ",", "args", "=", "(", ")", ",", "jac", "=", "None", ",", "hess", "=", "None", ",", "hessp", "=", "None", ",", "*", "*", "trust_region_options", ")", ":", "if", "jac", "is", "None", ":", "raise", "ValueError", "(", "'Jacobian is required for Newton-CG trust-region '", "'minimization'", ")", "if", "hess", "is", "None", "and", "hessp", "is", "None", ":", "raise", "ValueError", "(", "'Either the Hessian or the Hessian-vector product '", "'is required for Newton-CG trust-region minimization'", ")", "return", "_minimize_trust_region", "(", "fun", ",", "x0", ",", "args", "=", "args", ",", "jac", "=", "jac", ",", "hess", "=", "hess", ",", "hessp", "=", "hessp", ",", "subproblem", "=", "CGSteihaugSubproblem", ",", "*", "*", "trust_region_options", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py2/scipy/optimize/_trustregion_ncg.py#L13-L41
PaddlePaddle/Anakin
5fd68a6cc4c4620cd1a30794c1bf06eebd3f4730
tools/external_converter_v2/parser/caffe/caffe_layer_param_transmit.py
python
NotNeededInInference
(args)
Not need to parsing
Not need to parsing
[ "Not", "need", "to", "parsing" ]
def NotNeededInInference(args): """ Not need to parsing """ # args is tuple object node_io = args[0] layer = args[1] tensors = args[2] logger(verbose.INFO).feed("Layer type(", layer.name, " : ", layer.type, ") with ", \ len(tensors), " tensors not needed in inference.")
[ "def", "NotNeededInInference", "(", "args", ")", ":", "# args is tuple object", "node_io", "=", "args", "[", "0", "]", "layer", "=", "args", "[", "1", "]", "tensors", "=", "args", "[", "2", "]", "logger", "(", "verbose", ".", "INFO", ")", ".", "feed", "(", "\"Layer type(\"", ",", "layer", ".", "name", ",", "\" : \"", ",", "layer", ".", "type", ",", "\") with \"", ",", "len", "(", "tensors", ")", ",", "\" tensors not needed in inference.\"", ")" ]
https://github.com/PaddlePaddle/Anakin/blob/5fd68a6cc4c4620cd1a30794c1bf06eebd3f4730/tools/external_converter_v2/parser/caffe/caffe_layer_param_transmit.py#L63-L72
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/site-packages/setuptools/sandbox.py
python
AbstractSandbox.run
(self, func)
Run 'func' under os sandboxing
Run 'func' under os sandboxing
[ "Run", "func", "under", "os", "sandboxing" ]
def run(self, func): """Run 'func' under os sandboxing""" with self: return func()
[ "def", "run", "(", "self", ",", "func", ")", ":", "with", "self", ":", "return", "func", "(", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/site-packages/setuptools/sandbox.py#L286-L289
livecode/livecode
4606a10ea10b16d5071d0f9f263ccdd7ede8b31d
gyp/pylib/gyp/msvs_emulation.py
python
_GenericRetrieve
(root, default, path)
return _GenericRetrieve(root.get(path[0]), default, path[1:])
Given a list of dictionary keys |path| and a tree of dicts |root|, find value at path, or return |default| if any of the path doesn't exist.
Given a list of dictionary keys |path| and a tree of dicts |root|, find value at path, or return |default| if any of the path doesn't exist.
[ "Given", "a", "list", "of", "dictionary", "keys", "|path|", "and", "a", "tree", "of", "dicts", "|root|", "find", "value", "at", "path", "or", "return", "|default|", "if", "any", "of", "the", "path", "doesn", "t", "exist", "." ]
def _GenericRetrieve(root, default, path): """Given a list of dictionary keys |path| and a tree of dicts |root|, find value at path, or return |default| if any of the path doesn't exist.""" if not root: return default if not path: return root return _GenericRetrieve(root.get(path[0]), default, path[1:])
[ "def", "_GenericRetrieve", "(", "root", ",", "default", ",", "path", ")", ":", "if", "not", "root", ":", "return", "default", "if", "not", "path", ":", "return", "root", "return", "_GenericRetrieve", "(", "root", ".", "get", "(", "path", "[", "0", "]", ")", ",", "default", ",", "path", "[", "1", ":", "]", ")" ]
https://github.com/livecode/livecode/blob/4606a10ea10b16d5071d0f9f263ccdd7ede8b31d/gyp/pylib/gyp/msvs_emulation.py#L69-L76
ricardoquesada/Spidermonkey
4a75ea2543408bd1b2c515aa95901523eeef7858
build/upload.py
python
DoSSHCommand
(command, user, host, port=None, ssh_key=None)
return cmd.stdout.read().strip()
Execute command on user@host using ssh. Optionally use port and ssh_key, if provided.
Execute command on user
[ "Execute", "command", "on", "user" ]
def DoSSHCommand(command, user, host, port=None, ssh_key=None): """Execute command on user@host using ssh. Optionally use port and ssh_key, if provided.""" cmdline = ["ssh"] AppendOptionalArgsToSSHCommandline(cmdline, port, ssh_key) cmdline.extend(["%s@%s" % (user, host), command]) cmd = Popen(cmdline, stdout=PIPE) retcode = cmd.wait() if retcode != 0: raise Exception("Command %s returned non-zero exit code: %i" % \ (cmdline, retcode)) return cmd.stdout.read().strip()
[ "def", "DoSSHCommand", "(", "command", ",", "user", ",", "host", ",", "port", "=", "None", ",", "ssh_key", "=", "None", ")", ":", "cmdline", "=", "[", "\"ssh\"", "]", "AppendOptionalArgsToSSHCommandline", "(", "cmdline", ",", "port", ",", "ssh_key", ")", "cmdline", ".", "extend", "(", "[", "\"%s@%s\"", "%", "(", "user", ",", "host", ")", ",", "command", "]", ")", "cmd", "=", "Popen", "(", "cmdline", ",", "stdout", "=", "PIPE", ")", "retcode", "=", "cmd", ".", "wait", "(", ")", "if", "retcode", "!=", "0", ":", "raise", "Exception", "(", "\"Command %s returned non-zero exit code: %i\"", "%", "(", "cmdline", ",", "retcode", ")", ")", "return", "cmd", ".", "stdout", ".", "read", "(", ")", ".", "strip", "(", ")" ]
https://github.com/ricardoquesada/Spidermonkey/blob/4a75ea2543408bd1b2c515aa95901523eeef7858/build/upload.py#L79-L90
pmq20/node-packer
12c46c6e44fbc14d9ee645ebd17d5296b324f7e0
lts/tools/inspector_protocol/jinja2/idtracking.py
python
FrameSymbolVisitor.visit_OverlayScope
(self, node, **kwargs)
Do not visit into overlay scopes.
Do not visit into overlay scopes.
[ "Do", "not", "visit", "into", "overlay", "scopes", "." ]
def visit_OverlayScope(self, node, **kwargs): """Do not visit into overlay scopes."""
[ "def", "visit_OverlayScope", "(", "self", ",", "node", ",", "*", "*", "kwargs", ")", ":" ]
https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/lts/tools/inspector_protocol/jinja2/idtracking.py#L285-L286
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/setuptools/command/easy_install.py
python
update_dist_caches
(dist_path, fix_zipimporter_caches)
Fix any globally cached `dist_path` related data `dist_path` should be a path of a newly installed egg distribution (zipped or unzipped). sys.path_importer_cache contains finder objects that have been cached when importing data from the original distribution. Any such finders need to be cleared since the replacement distribution might be packaged differently, e.g. a zipped egg distribution might get replaced with an unzipped egg folder or vice versa. Having the old finders cached may then cause Python to attempt loading modules from the replacement distribution using an incorrect loader. zipimport.zipimporter objects are Python loaders charged with importing data packaged inside zip archives. If stale loaders referencing the original distribution, are left behind, they can fail to load modules from the replacement distribution. E.g. if an old zipimport.zipimporter instance is used to load data from a new zipped egg archive, it may cause the operation to attempt to locate the requested data in the wrong location - one indicated by the original distribution's zip archive directory information. Such an operation may then fail outright, e.g. report having read a 'bad local file header', or even worse, it may fail silently & return invalid data. zipimport._zip_directory_cache contains cached zip archive directory information for all existing zipimport.zipimporter instances and all such instances connected to the same archive share the same cached directory information. If asked, and the underlying Python implementation allows it, we can fix all existing zipimport.zipimporter instances instead of having to track them down and remove them one by one, by updating their shared cached zip archive directory information. This, of course, assumes that the replacement distribution is packaged as a zipped egg. If not asked to fix existing zipimport.zipimporter instances, we still do our best to clear any remaining zipimport.zipimporter related cached data that might somehow later get used when attempting to load data from the new distribution and thus cause such load operations to fail. Note that when tracking down such remaining stale data, we can not catch every conceivable usage from here, and we clear only those that we know of and have found to cause problems if left alive. Any remaining caches should be updated by whomever is in charge of maintaining them, i.e. they should be ready to handle us replacing their zip archives with new distributions at runtime.
Fix any globally cached `dist_path` related data
[ "Fix", "any", "globally", "cached", "dist_path", "related", "data" ]
def update_dist_caches(dist_path, fix_zipimporter_caches): """ Fix any globally cached `dist_path` related data `dist_path` should be a path of a newly installed egg distribution (zipped or unzipped). sys.path_importer_cache contains finder objects that have been cached when importing data from the original distribution. Any such finders need to be cleared since the replacement distribution might be packaged differently, e.g. a zipped egg distribution might get replaced with an unzipped egg folder or vice versa. Having the old finders cached may then cause Python to attempt loading modules from the replacement distribution using an incorrect loader. zipimport.zipimporter objects are Python loaders charged with importing data packaged inside zip archives. If stale loaders referencing the original distribution, are left behind, they can fail to load modules from the replacement distribution. E.g. if an old zipimport.zipimporter instance is used to load data from a new zipped egg archive, it may cause the operation to attempt to locate the requested data in the wrong location - one indicated by the original distribution's zip archive directory information. Such an operation may then fail outright, e.g. report having read a 'bad local file header', or even worse, it may fail silently & return invalid data. zipimport._zip_directory_cache contains cached zip archive directory information for all existing zipimport.zipimporter instances and all such instances connected to the same archive share the same cached directory information. If asked, and the underlying Python implementation allows it, we can fix all existing zipimport.zipimporter instances instead of having to track them down and remove them one by one, by updating their shared cached zip archive directory information. This, of course, assumes that the replacement distribution is packaged as a zipped egg. If not asked to fix existing zipimport.zipimporter instances, we still do our best to clear any remaining zipimport.zipimporter related cached data that might somehow later get used when attempting to load data from the new distribution and thus cause such load operations to fail. Note that when tracking down such remaining stale data, we can not catch every conceivable usage from here, and we clear only those that we know of and have found to cause problems if left alive. Any remaining caches should be updated by whomever is in charge of maintaining them, i.e. they should be ready to handle us replacing their zip archives with new distributions at runtime. """ # There are several other known sources of stale zipimport.zipimporter # instances that we do not clear here, but might if ever given a reason to # do so: # * Global setuptools pkg_resources.working_set (a.k.a. 'master working # set') may contain distributions which may in turn contain their # zipimport.zipimporter loaders. # * Several zipimport.zipimporter loaders held by local variables further # up the function call stack when running the setuptools installation. # * Already loaded modules may have their __loader__ attribute set to the # exact loader instance used when importing them. Python 3.4 docs state # that this information is intended mostly for introspection and so is # not expected to cause us problems. normalized_path = normalize_path(dist_path) _uncache(normalized_path, sys.path_importer_cache) if fix_zipimporter_caches: _replace_zip_directory_cache_data(normalized_path) else: # Here, even though we do not want to fix existing and now stale # zipimporter cache information, we still want to remove it. Related to # Python's zip archive directory information cache, we clear each of # its stale entries in two phases: # 1. Clear the entry so attempting to access zip archive information # via any existing stale zipimport.zipimporter instances fails. # 2. Remove the entry from the cache so any newly constructed # zipimport.zipimporter instances do not end up using old stale # zip archive directory information. # This whole stale data removal step does not seem strictly necessary, # but has been left in because it was done before we started replacing # the zip archive directory information cache content if possible, and # there are no relevant unit tests that we can depend on to tell us if # this is really needed. _remove_and_clear_zip_directory_cache_data(normalized_path)
[ "def", "update_dist_caches", "(", "dist_path", ",", "fix_zipimporter_caches", ")", ":", "# There are several other known sources of stale zipimport.zipimporter", "# instances that we do not clear here, but might if ever given a reason to", "# do so:", "# * Global setuptools pkg_resources.working_set (a.k.a. 'master working", "# set') may contain distributions which may in turn contain their", "# zipimport.zipimporter loaders.", "# * Several zipimport.zipimporter loaders held by local variables further", "# up the function call stack when running the setuptools installation.", "# * Already loaded modules may have their __loader__ attribute set to the", "# exact loader instance used when importing them. Python 3.4 docs state", "# that this information is intended mostly for introspection and so is", "# not expected to cause us problems.", "normalized_path", "=", "normalize_path", "(", "dist_path", ")", "_uncache", "(", "normalized_path", ",", "sys", ".", "path_importer_cache", ")", "if", "fix_zipimporter_caches", ":", "_replace_zip_directory_cache_data", "(", "normalized_path", ")", "else", ":", "# Here, even though we do not want to fix existing and now stale", "# zipimporter cache information, we still want to remove it. Related to", "# Python's zip archive directory information cache, we clear each of", "# its stale entries in two phases:", "# 1. Clear the entry so attempting to access zip archive information", "# via any existing stale zipimport.zipimporter instances fails.", "# 2. Remove the entry from the cache so any newly constructed", "# zipimport.zipimporter instances do not end up using old stale", "# zip archive directory information.", "# This whole stale data removal step does not seem strictly necessary,", "# but has been left in because it was done before we started replacing", "# the zip archive directory information cache content if possible, and", "# there are no relevant unit tests that we can depend on to tell us if", "# this is really needed.", "_remove_and_clear_zip_directory_cache_data", "(", "normalized_path", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/linux_x64/lib/python3.7/site-packages/setuptools/command/easy_install.py#L1742-L1821
ianmaclarty/amulet
3d1363e0a0dde5e4c346409cefab66c2dc91b237
third_party/freetype-2.5.5/src/tools/docmaker/content.py
python
ContentProcessor.__init__
( self )
Initialize a block content processor.
Initialize a block content processor.
[ "Initialize", "a", "block", "content", "processor", "." ]
def __init__( self ): """Initialize a block content processor.""" self.reset() self.sections = {} # dictionary of documentation sections self.section = None # current documentation section self.chapters = [] # list of chapters self.headers = {}
[ "def", "__init__", "(", "self", ")", ":", "self", ".", "reset", "(", ")", "self", ".", "sections", "=", "{", "}", "# dictionary of documentation sections", "self", ".", "section", "=", "None", "# current documentation section", "self", ".", "chapters", "=", "[", "]", "# list of chapters", "self", ".", "headers", "=", "{", "}" ]
https://github.com/ianmaclarty/amulet/blob/3d1363e0a0dde5e4c346409cefab66c2dc91b237/third_party/freetype-2.5.5/src/tools/docmaker/content.py#L385-L394
Alexhuszagh/rust-lexical
01fcdcf8efc8850edb35d8fc65fd5f31bd0981a0
scripts/timings.py
python
filename
(basename, args)
return name
Get a resilient name for the benchmark data.
Get a resilient name for the benchmark data.
[ "Get", "a", "resilient", "name", "for", "the", "benchmark", "data", "." ]
def filename(basename, args): '''Get a resilient name for the benchmark data.''' name = basename if args.no_default_features: name = f'{name}_nodefault' if args.features: name = f'{name}_features={args.features}' return name
[ "def", "filename", "(", "basename", ",", "args", ")", ":", "name", "=", "basename", "if", "args", ".", "no_default_features", ":", "name", "=", "f'{name}_nodefault'", "if", "args", ".", "features", ":", "name", "=", "f'{name}_features={args.features}'", "return", "name" ]
https://github.com/Alexhuszagh/rust-lexical/blob/01fcdcf8efc8850edb35d8fc65fd5f31bd0981a0/scripts/timings.py#L84-L92
OpenNI/OpenNI
1e9524ffd759841789dadb4ca19fb5d4ac5820e7
Platform/Linux/CreateRedist/Redist_OpenNi.py
python
check_sample
(sample_dir)
return rc
Checks if a sample is a tool or should be skipped, returns: 0 - Regular, 1 - Skip, 2 - Tool
Checks if a sample is a tool or should be skipped, returns: 0 - Regular, 1 - Skip, 2 - Tool
[ "Checks", "if", "a", "sample", "is", "a", "tool", "or", "should", "be", "skipped", "returns", ":", "0", "-", "Regular", "1", "-", "Skip", "2", "-", "Tool" ]
def check_sample(sample_dir): "Checks if a sample is a tool or should be skipped, returns: 0 - Regular, 1 - Skip, 2 - Tool" rc = 0 if os.path.exists(sample_dir + "/.redist"): redistFile = open(sample_dir + "/.redist") else: rc=0 return rc redist_lines =redistFile.readlines() skip_re = re.compile("^SKIP=([^\|]*\|)*(" + PLATFORM + "|ALL)(\|[^\|]*)*$") tool_re = re.compile("^TOOL=([^\|]*\|)*(" + PLATFORM + "|ALL)(\|[^\|]*)*$") for line in redist_lines: if skip_re.search(line): rc = 1 redistFile.close() return rc if tool_re.search(line): rc = 2 redistFile.close() return rc redistFile.close() return rc
[ "def", "check_sample", "(", "sample_dir", ")", ":", "rc", "=", "0", "if", "os", ".", "path", ".", "exists", "(", "sample_dir", "+", "\"/.redist\"", ")", ":", "redistFile", "=", "open", "(", "sample_dir", "+", "\"/.redist\"", ")", "else", ":", "rc", "=", "0", "return", "rc", "redist_lines", "=", "redistFile", ".", "readlines", "(", ")", "skip_re", "=", "re", ".", "compile", "(", "\"^SKIP=([^\\|]*\\|)*(\"", "+", "PLATFORM", "+", "\"|ALL)(\\|[^\\|]*)*$\"", ")", "tool_re", "=", "re", ".", "compile", "(", "\"^TOOL=([^\\|]*\\|)*(\"", "+", "PLATFORM", "+", "\"|ALL)(\\|[^\\|]*)*$\"", ")", "for", "line", "in", "redist_lines", ":", "if", "skip_re", ".", "search", "(", "line", ")", ":", "rc", "=", "1", "redistFile", ".", "close", "(", ")", "return", "rc", "if", "tool_re", ".", "search", "(", "line", ")", ":", "rc", "=", "2", "redistFile", ".", "close", "(", ")", "return", "rc", "redistFile", ".", "close", "(", ")", "return", "rc" ]
https://github.com/OpenNI/OpenNI/blob/1e9524ffd759841789dadb4ca19fb5d4ac5820e7/Platform/Linux/CreateRedist/Redist_OpenNi.py#L82-L103
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python3/src/Lib/inspect.py
python
BoundArguments.apply_defaults
(self)
Set default values for missing arguments. For variable-positional arguments (*args) the default is an empty tuple. For variable-keyword arguments (**kwargs) the default is an empty dict.
Set default values for missing arguments.
[ "Set", "default", "values", "for", "missing", "arguments", "." ]
def apply_defaults(self): """Set default values for missing arguments. For variable-positional arguments (*args) the default is an empty tuple. For variable-keyword arguments (**kwargs) the default is an empty dict. """ arguments = self.arguments new_arguments = [] for name, param in self._signature.parameters.items(): try: new_arguments.append((name, arguments[name])) except KeyError: if param.default is not _empty: val = param.default elif param.kind is _VAR_POSITIONAL: val = () elif param.kind is _VAR_KEYWORD: val = {} else: # This BoundArguments was likely produced by # Signature.bind_partial(). continue new_arguments.append((name, val)) self.arguments = dict(new_arguments)
[ "def", "apply_defaults", "(", "self", ")", ":", "arguments", "=", "self", ".", "arguments", "new_arguments", "=", "[", "]", "for", "name", ",", "param", "in", "self", ".", "_signature", ".", "parameters", ".", "items", "(", ")", ":", "try", ":", "new_arguments", ".", "append", "(", "(", "name", ",", "arguments", "[", "name", "]", ")", ")", "except", "KeyError", ":", "if", "param", ".", "default", "is", "not", "_empty", ":", "val", "=", "param", ".", "default", "elif", "param", ".", "kind", "is", "_VAR_POSITIONAL", ":", "val", "=", "(", ")", "elif", "param", ".", "kind", "is", "_VAR_KEYWORD", ":", "val", "=", "{", "}", "else", ":", "# This BoundArguments was likely produced by", "# Signature.bind_partial().", "continue", "new_arguments", ".", "append", "(", "(", "name", ",", "val", ")", ")", "self", ".", "arguments", "=", "dict", "(", "new_arguments", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/inspect.py#L2701-L2727
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/experimental/statistical_analysis/results_stats.py
python
IsScipyMannTestOneSided
()
return scipy_version[0] < 1 and scipy_version[1] < 17
Checks if Scipy version is < 0.17.0. This is the version where stats.mannwhitneyu(...) is changed from returning a one-sided to returning a two-sided p-value.
Checks if Scipy version is < 0.17.0.
[ "Checks", "if", "Scipy", "version", "is", "<", "0", ".", "17", ".", "0", "." ]
def IsScipyMannTestOneSided(): """Checks if Scipy version is < 0.17.0. This is the version where stats.mannwhitneyu(...) is changed from returning a one-sided to returning a two-sided p-value. """ scipy_version = [int(num) for num in scipy.version.version.split('.')] return scipy_version[0] < 1 and scipy_version[1] < 17
[ "def", "IsScipyMannTestOneSided", "(", ")", ":", "scipy_version", "=", "[", "int", "(", "num", ")", "for", "num", "in", "scipy", ".", "version", ".", "version", ".", "split", "(", "'.'", ")", "]", "return", "scipy_version", "[", "0", "]", "<", "1", "and", "scipy_version", "[", "1", "]", "<", "17" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/experimental/statistical_analysis/results_stats.py#L46-L53
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/_core.py
python
UpdateUIEvent.GetText
(*args, **kwargs)
return _core_.UpdateUIEvent_GetText(*args, **kwargs)
GetText(self) -> String Returns the text that should be set for the UI element.
GetText(self) -> String
[ "GetText", "(", "self", ")", "-", ">", "String" ]
def GetText(*args, **kwargs): """ GetText(self) -> String Returns the text that should be set for the UI element. """ return _core_.UpdateUIEvent_GetText(*args, **kwargs)
[ "def", "GetText", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "UpdateUIEvent_GetText", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_core.py#L6773-L6779
y123456yz/reading-and-annotate-mongodb-3.6
93280293672ca7586dc24af18132aa61e4ed7fcf
mongo/buildscripts/resmokelib/parser.py
python
validate_options
(parser, options, args)
Do preliminary validation on the options and error on any invalid options.
Do preliminary validation on the options and error on any invalid options.
[ "Do", "preliminary", "validation", "on", "the", "options", "and", "error", "on", "any", "invalid", "options", "." ]
def validate_options(parser, options, args): """ Do preliminary validation on the options and error on any invalid options. """ if options.shell_port is not None and options.shell_conn_string is not None: parser.error("Cannot specify both `shellPort` and `shellConnString`") if options.executor_file: parser.error("--executor is superseded by --suites; specify --suites={} {} to run the" "test(s) under those suite configuration(s)" .format(options.executor_file, " ".join(args)))
[ "def", "validate_options", "(", "parser", ",", "options", ",", "args", ")", ":", "if", "options", ".", "shell_port", "is", "not", "None", "and", "options", ".", "shell_conn_string", "is", "not", "None", ":", "parser", ".", "error", "(", "\"Cannot specify both `shellPort` and `shellConnString`\"", ")", "if", "options", ".", "executor_file", ":", "parser", ".", "error", "(", "\"--executor is superseded by --suites; specify --suites={} {} to run the\"", "\"test(s) under those suite configuration(s)\"", ".", "format", "(", "options", ".", "executor_file", ",", "\" \"", ".", "join", "(", "args", ")", ")", ")" ]
https://github.com/y123456yz/reading-and-annotate-mongodb-3.6/blob/93280293672ca7586dc24af18132aa61e4ed7fcf/mongo/buildscripts/resmokelib/parser.py#L292-L303
hpi-xnor/BMXNet
ed0b201da6667887222b8e4b5f997c4f6b61943d
example/cnn_text_classification/data_helpers.py
python
load_data_and_labels
()
return [x_text, y]
Loads MR polarity data from files, splits the data into words and generates labels. Returns split sentences and labels.
Loads MR polarity data from files, splits the data into words and generates labels. Returns split sentences and labels.
[ "Loads", "MR", "polarity", "data", "from", "files", "splits", "the", "data", "into", "words", "and", "generates", "labels", ".", "Returns", "split", "sentences", "and", "labels", "." ]
def load_data_and_labels(): """ Loads MR polarity data from files, splits the data into words and generates labels. Returns split sentences and labels. """ # Load data from files pos_path = "./data/rt-polaritydata/rt-polarity.pos" neg_path = "./data/rt-polaritydata/rt-polarity.neg" if not os.path.exists(pos_path): os.system("git clone https://github.com/dennybritz/cnn-text-classification-tf.git") os.system('mv cnn-text-classification-tf/data .') os.system('rm -rf cnn-text-classification-tf') positive_examples = list(open(pos_path).readlines()) positive_examples = [s.strip() for s in positive_examples] negative_examples = list(open(neg_path).readlines()) negative_examples = [s.strip() for s in negative_examples] # Split by words x_text = positive_examples + negative_examples x_text = [clean_str(sent) for sent in x_text] x_text = [s.split(" ") for s in x_text] # Generate labels positive_labels = [1 for _ in positive_examples] negative_labels = [0 for _ in negative_examples] y = np.concatenate([positive_labels, negative_labels], 0) return [x_text, y]
[ "def", "load_data_and_labels", "(", ")", ":", "# Load data from files", "pos_path", "=", "\"./data/rt-polaritydata/rt-polarity.pos\"", "neg_path", "=", "\"./data/rt-polaritydata/rt-polarity.neg\"", "if", "not", "os", ".", "path", ".", "exists", "(", "pos_path", ")", ":", "os", ".", "system", "(", "\"git clone https://github.com/dennybritz/cnn-text-classification-tf.git\"", ")", "os", ".", "system", "(", "'mv cnn-text-classification-tf/data .'", ")", "os", ".", "system", "(", "'rm -rf cnn-text-classification-tf'", ")", "positive_examples", "=", "list", "(", "open", "(", "pos_path", ")", ".", "readlines", "(", ")", ")", "positive_examples", "=", "[", "s", ".", "strip", "(", ")", "for", "s", "in", "positive_examples", "]", "negative_examples", "=", "list", "(", "open", "(", "neg_path", ")", ".", "readlines", "(", ")", ")", "negative_examples", "=", "[", "s", ".", "strip", "(", ")", "for", "s", "in", "negative_examples", "]", "# Split by words", "x_text", "=", "positive_examples", "+", "negative_examples", "x_text", "=", "[", "clean_str", "(", "sent", ")", "for", "sent", "in", "x_text", "]", "x_text", "=", "[", "s", ".", "split", "(", "\" \"", ")", "for", "s", "in", "x_text", "]", "# Generate labels", "positive_labels", "=", "[", "1", "for", "_", "in", "positive_examples", "]", "negative_labels", "=", "[", "0", "for", "_", "in", "negative_examples", "]", "y", "=", "np", ".", "concatenate", "(", "[", "positive_labels", ",", "negative_labels", "]", ",", "0", ")", "return", "[", "x_text", ",", "y", "]" ]
https://github.com/hpi-xnor/BMXNet/blob/ed0b201da6667887222b8e4b5f997c4f6b61943d/example/cnn_text_classification/data_helpers.py#L46-L70
taichi-dev/taichi
973c04d6ba40f34e9e3bd5a28ae0ee0802f136a6
python/taichi/lang/common_ops.py
python
TaichiOperations.atomic_sub
(self, other)
return ops.atomic_sub(self, other)
Return the new expression of computing atomic sub between self and a given operand. Args: other (Any): Given operand. Returns: :class:`~taichi.lang.expr.Expr`: The computing expression of atomic sub.
Return the new expression of computing atomic sub between self and a given operand.
[ "Return", "the", "new", "expression", "of", "computing", "atomic", "sub", "between", "self", "and", "a", "given", "operand", "." ]
def atomic_sub(self, other): """Return the new expression of computing atomic sub between self and a given operand. Args: other (Any): Given operand. Returns: :class:`~taichi.lang.expr.Expr`: The computing expression of atomic sub.""" return ops.atomic_sub(self, other)
[ "def", "atomic_sub", "(", "self", ",", "other", ")", ":", "return", "ops", ".", "atomic_sub", "(", "self", ",", "other", ")" ]
https://github.com/taichi-dev/taichi/blob/973c04d6ba40f34e9e3bd5a28ae0ee0802f136a6/python/taichi/lang/common_ops.py#L138-L146
garyexplains/examples
2e2f7c1b990da00f07fc9f3e30086c93c81756a4
Raspberry Pi Pico/MicroPython/primes.py
python
is_prime
(n: int)
return True
Primality test using 6k+-1 optimization.
Primality test using 6k+-1 optimization.
[ "Primality", "test", "using", "6k", "+", "-", "1", "optimization", "." ]
def is_prime(n: int) -> bool: """Primality test using 6k+-1 optimization.""" if n <= 3: return n > 1 if n % 2 == 0 or n % 3 == 0: return False i = 5 while i ** 2 <= n: if n % i == 0 or n % (i + 2) == 0: return False i += 6 return True
[ "def", "is_prime", "(", "n", ":", "int", ")", "->", "bool", ":", "if", "n", "<=", "3", ":", "return", "n", ">", "1", "if", "n", "%", "2", "==", "0", "or", "n", "%", "3", "==", "0", ":", "return", "False", "i", "=", "5", "while", "i", "**", "2", "<=", "n", ":", "if", "n", "%", "i", "==", "0", "or", "n", "%", "(", "i", "+", "2", ")", "==", "0", ":", "return", "False", "i", "+=", "6", "return", "True" ]
https://github.com/garyexplains/examples/blob/2e2f7c1b990da00f07fc9f3e30086c93c81756a4/Raspberry Pi Pico/MicroPython/primes.py#L33-L44
potassco/clingo
e0c91d8f95cc28de1c480a871f9c97c30de83d40
libpyclingo/clingo/ast.py
python
TheoryAtom
(location: Location, term: AST, elements: Sequence[AST], guard: Optional[AST])
return AST(p_ast[0])
Construct an AST node of type `ASTType.TheoryAtom`.
Construct an AST node of type `ASTType.TheoryAtom`.
[ "Construct", "an", "AST", "node", "of", "type", "ASTType", ".", "TheoryAtom", "." ]
def TheoryAtom(location: Location, term: AST, elements: Sequence[AST], guard: Optional[AST]) -> AST: ''' Construct an AST node of type `ASTType.TheoryAtom`. ''' p_ast = _ffi.new('clingo_ast_t**') c_location = _c_location(location) _handle_error(_lib.clingo_ast_build( _lib.clingo_ast_type_theory_atom, p_ast, c_location[0], term._rep, _ffi.new('clingo_ast_t*[]', [ x._rep for x in elements ]), _ffi.cast('size_t', len(elements)), _ffi.NULL if guard is None else guard._rep)) return AST(p_ast[0])
[ "def", "TheoryAtom", "(", "location", ":", "Location", ",", "term", ":", "AST", ",", "elements", ":", "Sequence", "[", "AST", "]", ",", "guard", ":", "Optional", "[", "AST", "]", ")", "->", "AST", ":", "p_ast", "=", "_ffi", ".", "new", "(", "'clingo_ast_t**'", ")", "c_location", "=", "_c_location", "(", "location", ")", "_handle_error", "(", "_lib", ".", "clingo_ast_build", "(", "_lib", ".", "clingo_ast_type_theory_atom", ",", "p_ast", ",", "c_location", "[", "0", "]", ",", "term", ".", "_rep", ",", "_ffi", ".", "new", "(", "'clingo_ast_t*[]'", ",", "[", "x", ".", "_rep", "for", "x", "in", "elements", "]", ")", ",", "_ffi", ".", "cast", "(", "'size_t'", ",", "len", "(", "elements", ")", ")", ",", "_ffi", ".", "NULL", "if", "guard", "is", "None", "else", "guard", ".", "_rep", ")", ")", "return", "AST", "(", "p_ast", "[", "0", "]", ")" ]
https://github.com/potassco/clingo/blob/e0c91d8f95cc28de1c480a871f9c97c30de83d40/libpyclingo/clingo/ast.py#L1603-L1616
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_core.py
python
TextAreaBase.SetDefaultStyle
(*args, **kwargs)
return _core_.TextAreaBase_SetDefaultStyle(*args, **kwargs)
SetDefaultStyle(self, wxTextAttr style) -> bool
SetDefaultStyle(self, wxTextAttr style) -> bool
[ "SetDefaultStyle", "(", "self", "wxTextAttr", "style", ")", "-", ">", "bool" ]
def SetDefaultStyle(*args, **kwargs): """SetDefaultStyle(self, wxTextAttr style) -> bool""" return _core_.TextAreaBase_SetDefaultStyle(*args, **kwargs)
[ "def", "SetDefaultStyle", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "TextAreaBase_SetDefaultStyle", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_core.py#L13428-L13430
albertz/openlierox
d316c14a8eb57848ef56e9bfa7b23a56f694a51b
tools/DedicatedServerVideo/gdata/youtube/__init__.py
python
YouTubeSubscriptionEntry.GetSubscriptionType
(self)
Retrieve the type of this subscription. Returns: A string that is either 'channel, 'query' or 'favorites'
Retrieve the type of this subscription.
[ "Retrieve", "the", "type", "of", "this", "subscription", "." ]
def GetSubscriptionType(self): """Retrieve the type of this subscription. Returns: A string that is either 'channel, 'query' or 'favorites' """ for category in self.category: if category.scheme == YOUTUBE_SUBSCRIPTION_TYPE_SCHEME: return category.term
[ "def", "GetSubscriptionType", "(", "self", ")", ":", "for", "category", "in", "self", ".", "category", ":", "if", "category", ".", "scheme", "==", "YOUTUBE_SUBSCRIPTION_TYPE_SCHEME", ":", "return", "category", ".", "term" ]
https://github.com/albertz/openlierox/blob/d316c14a8eb57848ef56e9bfa7b23a56f694a51b/tools/DedicatedServerVideo/gdata/youtube/__init__.py#L308-L316
jackaudio/jack2
21b293dbc37d42446141a08922cdec0d2550c6a0
waflib/Build.py
python
BuildContext.compile
(self)
Run the build by creating an instance of :py:class:`waflib.Runner.Parallel` The cache file is written when at least a task was executed. :raises: :py:class:`waflib.Errors.BuildError` in case the build fails
Run the build by creating an instance of :py:class:`waflib.Runner.Parallel` The cache file is written when at least a task was executed.
[ "Run", "the", "build", "by", "creating", "an", "instance", "of", ":", "py", ":", "class", ":", "waflib", ".", "Runner", ".", "Parallel", "The", "cache", "file", "is", "written", "when", "at", "least", "a", "task", "was", "executed", "." ]
def compile(self): """ Run the build by creating an instance of :py:class:`waflib.Runner.Parallel` The cache file is written when at least a task was executed. :raises: :py:class:`waflib.Errors.BuildError` in case the build fails """ Logs.debug('build: compile()') # delegate the producer-consumer logic to another object to reduce the complexity self.producer = Runner.Parallel(self, self.jobs) self.producer.biter = self.get_build_iterator() try: self.producer.start() except KeyboardInterrupt: if self.is_dirty(): self.store() raise else: if self.is_dirty(): self.store() if self.producer.error: raise Errors.BuildError(self.producer.error)
[ "def", "compile", "(", "self", ")", ":", "Logs", ".", "debug", "(", "'build: compile()'", ")", "# delegate the producer-consumer logic to another object to reduce the complexity", "self", ".", "producer", "=", "Runner", ".", "Parallel", "(", "self", ",", "self", ".", "jobs", ")", "self", ".", "producer", ".", "biter", "=", "self", ".", "get_build_iterator", "(", ")", "try", ":", "self", ".", "producer", ".", "start", "(", ")", "except", "KeyboardInterrupt", ":", "if", "self", ".", "is_dirty", "(", ")", ":", "self", ".", "store", "(", ")", "raise", "else", ":", "if", "self", ".", "is_dirty", "(", ")", ":", "self", ".", "store", "(", ")", "if", "self", ".", "producer", ".", "error", ":", "raise", "Errors", ".", "BuildError", "(", "self", ".", "producer", ".", "error", ")" ]
https://github.com/jackaudio/jack2/blob/21b293dbc37d42446141a08922cdec0d2550c6a0/waflib/Build.py#L332-L355
miyosuda/TensorFlowAndroidDemo
35903e0221aa5f109ea2dbef27f20b52e317f42d
jni-build/jni/include/tensorflow/contrib/distributions/python/ops/inverse_gamma.py
python
InverseGamma.allow_nan_stats
(self)
return self._allow_nan_stats
Boolean describing behavior when a stat is undefined for batch member.
Boolean describing behavior when a stat is undefined for batch member.
[ "Boolean", "describing", "behavior", "when", "a", "stat", "is", "undefined", "for", "batch", "member", "." ]
def allow_nan_stats(self): """Boolean describing behavior when a stat is undefined for batch member.""" return self._allow_nan_stats
[ "def", "allow_nan_stats", "(", "self", ")", ":", "return", "self", ".", "_allow_nan_stats" ]
https://github.com/miyosuda/TensorFlowAndroidDemo/blob/35903e0221aa5f109ea2dbef27f20b52e317f42d/jni-build/jni/include/tensorflow/contrib/distributions/python/ops/inverse_gamma.py#L108-L110
openvinotoolkit/openvino
dedcbeafa8b84cccdc55ca64b8da516682b381c7
tools/mo/openvino/tools/mo/front/softsign_replacer.py
python
SoftSign.replace_op
(self, graph: Graph, node: Node)
return [div_node.id]
Replace Softsign according to formula feature/(abs(feature)+1)
Replace Softsign according to formula feature/(abs(feature)+1)
[ "Replace", "Softsign", "according", "to", "formula", "feature", "/", "(", "abs", "(", "feature", ")", "+", "1", ")" ]
def replace_op(self, graph: Graph, node: Node): """ Replace Softsign according to formula feature/(abs(feature)+1) """ abs_node = Abs(graph, {'name': "abs_" + node.id}).create_node() abs_node.in_port(0).connect(node.in_port(0).get_source()) add_node = create_op_node_with_second_input(graph, Add, np.ones([1], dtype=np.float32), {"name": node.id + "_plus_1"}) add_node.in_port(0).connect(abs_node.out_port(0)) div_node = Div(graph, {"name": "div_" + node.id}).create_node() div_node.in_port(0).connect(node.in_port(0).get_source()) div_node.in_port(1).connect(add_node.out_port(0)) return [div_node.id]
[ "def", "replace_op", "(", "self", ",", "graph", ":", "Graph", ",", "node", ":", "Node", ")", ":", "abs_node", "=", "Abs", "(", "graph", ",", "{", "'name'", ":", "\"abs_\"", "+", "node", ".", "id", "}", ")", ".", "create_node", "(", ")", "abs_node", ".", "in_port", "(", "0", ")", ".", "connect", "(", "node", ".", "in_port", "(", "0", ")", ".", "get_source", "(", ")", ")", "add_node", "=", "create_op_node_with_second_input", "(", "graph", ",", "Add", ",", "np", ".", "ones", "(", "[", "1", "]", ",", "dtype", "=", "np", ".", "float32", ")", ",", "{", "\"name\"", ":", "node", ".", "id", "+", "\"_plus_1\"", "}", ")", "add_node", ".", "in_port", "(", "0", ")", ".", "connect", "(", "abs_node", ".", "out_port", "(", "0", ")", ")", "div_node", "=", "Div", "(", "graph", ",", "{", "\"name\"", ":", "\"div_\"", "+", "node", ".", "id", "}", ")", ".", "create_node", "(", ")", "div_node", ".", "in_port", "(", "0", ")", ".", "connect", "(", "node", ".", "in_port", "(", "0", ")", ".", "get_source", "(", ")", ")", "div_node", ".", "in_port", "(", "1", ")", ".", "connect", "(", "add_node", ".", "out_port", "(", "0", ")", ")", "return", "[", "div_node", ".", "id", "]" ]
https://github.com/openvinotoolkit/openvino/blob/dedcbeafa8b84cccdc55ca64b8da516682b381c7/tools/mo/openvino/tools/mo/front/softsign_replacer.py#L17-L29
cocos-creator/engine-native
984c4c9f5838253313b44ccd429bd8fac4ec8a6a
download-deps.py
python
delete_folder_except
(folder_path, excepts)
Delete a folder excepts some files/subfolders, `excepts` doesn't recursively which means it can not include `subfoler/file1`. `excepts` is an array.
Delete a folder excepts some files/subfolders, `excepts` doesn't recursively which means it can not include `subfoler/file1`. `excepts` is an array.
[ "Delete", "a", "folder", "excepts", "some", "files", "/", "subfolders", "excepts", "doesn", "t", "recursively", "which", "means", "it", "can", "not", "include", "subfoler", "/", "file1", ".", "excepts", "is", "an", "array", "." ]
def delete_folder_except(folder_path, excepts): """ Delete a folder excepts some files/subfolders, `excepts` doesn't recursively which means it can not include `subfoler/file1`. `excepts` is an array. """ for file in os.listdir(folder_path): if (file in excepts): continue full_path = os.path.join(folder_path, file) if os.path.isdir(full_path): shutil.rmtree(full_path) else: os.remove(full_path)
[ "def", "delete_folder_except", "(", "folder_path", ",", "excepts", ")", ":", "for", "file", "in", "os", ".", "listdir", "(", "folder_path", ")", ":", "if", "(", "file", "in", "excepts", ")", ":", "continue", "full_path", "=", "os", ".", "path", ".", "join", "(", "folder_path", ",", "file", ")", "if", "os", ".", "path", ".", "isdir", "(", "full_path", ")", ":", "shutil", ".", "rmtree", "(", "full_path", ")", "else", ":", "os", ".", "remove", "(", "full_path", ")" ]
https://github.com/cocos-creator/engine-native/blob/984c4c9f5838253313b44ccd429bd8fac4ec8a6a/download-deps.py#L55-L68
sdhash/sdhash
b9eff63e4e5867e910f41fd69032bbb1c94a2a5e
external/tools/build/v2/build/property.py
python
split_conditional
(property)
return None
If 'property' is conditional property, returns condition and the property, e.g <variant>debug,<toolset>gcc:<inlining>full will become <variant>debug,<toolset>gcc <inlining>full. Otherwise, returns empty string.
If 'property' is conditional property, returns condition and the property, e.g <variant>debug,<toolset>gcc:<inlining>full will become <variant>debug,<toolset>gcc <inlining>full. Otherwise, returns empty string.
[ "If", "property", "is", "conditional", "property", "returns", "condition", "and", "the", "property", "e", ".", "g", "<variant", ">", "debug", "<toolset", ">", "gcc", ":", "<inlining", ">", "full", "will", "become", "<variant", ">", "debug", "<toolset", ">", "gcc", "<inlining", ">", "full", ".", "Otherwise", "returns", "empty", "string", "." ]
def split_conditional (property): """ If 'property' is conditional property, returns condition and the property, e.g <variant>debug,<toolset>gcc:<inlining>full will become <variant>debug,<toolset>gcc <inlining>full. Otherwise, returns empty string. """ m = __re_split_conditional.match (property) if m: return (m.group (1), '<' + m.group (2)) return None
[ "def", "split_conditional", "(", "property", ")", ":", "m", "=", "__re_split_conditional", ".", "match", "(", "property", ")", "if", "m", ":", "return", "(", "m", ".", "group", "(", "1", ")", ",", "'<'", "+", "m", ".", "group", "(", "2", ")", ")", "return", "None" ]
https://github.com/sdhash/sdhash/blob/b9eff63e4e5867e910f41fd69032bbb1c94a2a5e/external/tools/build/v2/build/property.py#L262-L274
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_core.py
python
TextEntryBase.ChangeValue
(*args, **kwargs)
return _core_.TextEntryBase_ChangeValue(*args, **kwargs)
ChangeValue(self, String value) Set the value in the text entry field. Does not generate a text change event.
ChangeValue(self, String value)
[ "ChangeValue", "(", "self", "String", "value", ")" ]
def ChangeValue(*args, **kwargs): """ ChangeValue(self, String value) Set the value in the text entry field. Does not generate a text change event. """ return _core_.TextEntryBase_ChangeValue(*args, **kwargs)
[ "def", "ChangeValue", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "TextEntryBase_ChangeValue", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_core.py#L13081-L13087
PlatformLab/RAMCloud
b1866af19124325a6dfd8cbc267e2e3ef1f965d1
scripts/recoverymetrics.py
python
FRAC
(total)
return realFrac
Returns a function that shows the average percentage of the values from the total given.
Returns a function that shows the average percentage of the values from the total given.
[ "Returns", "a", "function", "that", "shows", "the", "average", "percentage", "of", "the", "values", "from", "the", "total", "given", "." ]
def FRAC(total): """Returns a function that shows the average percentage of the values from the total given.""" def realFrac(values, unit): r = toString(sum(values) / len(values) / total * 100) r += '%' if max(values) > min(values): r += ' avg' return [r] return realFrac
[ "def", "FRAC", "(", "total", ")", ":", "def", "realFrac", "(", "values", ",", "unit", ")", ":", "r", "=", "toString", "(", "sum", "(", "values", ")", "/", "len", "(", "values", ")", "/", "total", "*", "100", ")", "r", "+=", "'%'", "if", "max", "(", "values", ")", ">", "min", "(", "values", ")", ":", "r", "+=", "' avg'", "return", "[", "r", "]", "return", "realFrac" ]
https://github.com/PlatformLab/RAMCloud/blob/b1866af19124325a6dfd8cbc267e2e3ef1f965d1/scripts/recoverymetrics.py#L208-L217
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/html.py
python
HtmlPrintout.SetHtmlText
(*args, **kwargs)
return _html.HtmlPrintout_SetHtmlText(*args, **kwargs)
SetHtmlText(self, String html, String basepath=EmptyString, bool isdir=True)
SetHtmlText(self, String html, String basepath=EmptyString, bool isdir=True)
[ "SetHtmlText", "(", "self", "String", "html", "String", "basepath", "=", "EmptyString", "bool", "isdir", "=", "True", ")" ]
def SetHtmlText(*args, **kwargs): """SetHtmlText(self, String html, String basepath=EmptyString, bool isdir=True)""" return _html.HtmlPrintout_SetHtmlText(*args, **kwargs)
[ "def", "SetHtmlText", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_html", ".", "HtmlPrintout_SetHtmlText", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/html.py#L1276-L1278
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/py/py/_code/code.py
python
TracebackEntry.ishidden
(self)
return True if the current frame has a var __tracebackhide__ resolving to True mostly for internal use
return True if the current frame has a var __tracebackhide__ resolving to True
[ "return", "True", "if", "the", "current", "frame", "has", "a", "var", "__tracebackhide__", "resolving", "to", "True" ]
def ishidden(self): """ return True if the current frame has a var __tracebackhide__ resolving to True mostly for internal use """ try: return self.frame.f_locals['__tracebackhide__'] except KeyError: try: return self.frame.f_globals['__tracebackhide__'] except KeyError: return False
[ "def", "ishidden", "(", "self", ")", ":", "try", ":", "return", "self", ".", "frame", ".", "f_locals", "[", "'__tracebackhide__'", "]", "except", "KeyError", ":", "try", ":", "return", "self", ".", "frame", ".", "f_globals", "[", "'__tracebackhide__'", "]", "except", "KeyError", ":", "return", "False" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/py/py/_code/code.py#L218-L230
rapidsai/cudf
d5b2448fc69f17509304d594f029d0df56984962
python/cudf/cudf/core/tools/datetimes.py
python
DateOffset._from_freqstr
(cls: Type[_T], freqstr: str)
return cls(**{cls._CODES_TO_UNITS[freq_part]: int(numeric_part)})
Parse a string and return a DateOffset object expects strings of the form 3D, 25W, 10ms, 42ns, etc.
Parse a string and return a DateOffset object expects strings of the form 3D, 25W, 10ms, 42ns, etc.
[ "Parse", "a", "string", "and", "return", "a", "DateOffset", "object", "expects", "strings", "of", "the", "form", "3D", "25W", "10ms", "42ns", "etc", "." ]
def _from_freqstr(cls: Type[_T], freqstr: str) -> _T: """ Parse a string and return a DateOffset object expects strings of the form 3D, 25W, 10ms, 42ns, etc. """ match = cls._FREQSTR_REGEX.match(freqstr) if match is None: raise ValueError(f"Invalid frequency string: {freqstr}") numeric_part = match.group(1) if numeric_part == "": numeric_part = "1" freq_part = match.group(2) if freq_part not in cls._CODES_TO_UNITS: raise ValueError(f"Cannot interpret frequency str: {freqstr}") return cls(**{cls._CODES_TO_UNITS[freq_part]: int(numeric_part)})
[ "def", "_from_freqstr", "(", "cls", ":", "Type", "[", "_T", "]", ",", "freqstr", ":", "str", ")", "->", "_T", ":", "match", "=", "cls", ".", "_FREQSTR_REGEX", ".", "match", "(", "freqstr", ")", "if", "match", "is", "None", ":", "raise", "ValueError", "(", "f\"Invalid frequency string: {freqstr}\"", ")", "numeric_part", "=", "match", ".", "group", "(", "1", ")", "if", "numeric_part", "==", "\"\"", ":", "numeric_part", "=", "\"1\"", "freq_part", "=", "match", ".", "group", "(", "2", ")", "if", "freq_part", "not", "in", "cls", ".", "_CODES_TO_UNITS", ":", "raise", "ValueError", "(", "f\"Cannot interpret frequency str: {freqstr}\"", ")", "return", "cls", "(", "*", "*", "{", "cls", ".", "_CODES_TO_UNITS", "[", "freq_part", "]", ":", "int", "(", "numeric_part", ")", "}", ")" ]
https://github.com/rapidsai/cudf/blob/d5b2448fc69f17509304d594f029d0df56984962/python/cudf/cudf/core/tools/datetimes.py#L644-L662
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/dis.py
python
dis
(x=None)
Disassemble classes, methods, functions, or code. With no argument, disassemble the last traceback.
Disassemble classes, methods, functions, or code.
[ "Disassemble", "classes", "methods", "functions", "or", "code", "." ]
def dis(x=None): """Disassemble classes, methods, functions, or code. With no argument, disassemble the last traceback. """ if x is None: distb() return if isinstance(x, types.InstanceType): x = x.__class__ if hasattr(x, 'im_func'): x = x.im_func if hasattr(x, 'func_code'): x = x.func_code if hasattr(x, '__dict__'): items = x.__dict__.items() items.sort() for name, x1 in items: if isinstance(x1, _have_code): print "Disassembly of %s:" % name try: dis(x1) except TypeError, msg: print "Sorry:", msg print elif hasattr(x, 'co_code'): disassemble(x) elif isinstance(x, str): disassemble_string(x) else: raise TypeError, \ "don't know how to disassemble %s objects" % \ type(x).__name__
[ "def", "dis", "(", "x", "=", "None", ")", ":", "if", "x", "is", "None", ":", "distb", "(", ")", "return", "if", "isinstance", "(", "x", ",", "types", ".", "InstanceType", ")", ":", "x", "=", "x", ".", "__class__", "if", "hasattr", "(", "x", ",", "'im_func'", ")", ":", "x", "=", "x", ".", "im_func", "if", "hasattr", "(", "x", ",", "'func_code'", ")", ":", "x", "=", "x", ".", "func_code", "if", "hasattr", "(", "x", ",", "'__dict__'", ")", ":", "items", "=", "x", ".", "__dict__", ".", "items", "(", ")", "items", ".", "sort", "(", ")", "for", "name", ",", "x1", "in", "items", ":", "if", "isinstance", "(", "x1", ",", "_have_code", ")", ":", "print", "\"Disassembly of %s:\"", "%", "name", "try", ":", "dis", "(", "x1", ")", "except", "TypeError", ",", "msg", ":", "print", "\"Sorry:\"", ",", "msg", "print", "elif", "hasattr", "(", "x", ",", "'co_code'", ")", ":", "disassemble", "(", "x", ")", "elif", "isinstance", "(", "x", ",", "str", ")", ":", "disassemble_string", "(", "x", ")", "else", ":", "raise", "TypeError", ",", "\"don't know how to disassemble %s objects\"", "%", "type", "(", "x", ")", ".", "__name__" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/dis.py#L16-L49
msracver/Deep-Image-Analogy
632b9287b42552e32dad64922967c8c9ec7fc4d3
scripts/cpp_lint.py
python
_SetFilters
(filters)
Sets the module's error-message filters. These filters are applied when deciding whether to emit a given error message. Args: filters: A string of comma-separated filters (eg "whitespace/indent"). Each filter should start with + or -; else we die.
Sets the module's error-message filters.
[ "Sets", "the", "module", "s", "error", "-", "message", "filters", "." ]
def _SetFilters(filters): """Sets the module's error-message filters. These filters are applied when deciding whether to emit a given error message. Args: filters: A string of comma-separated filters (eg "whitespace/indent"). Each filter should start with + or -; else we die. """ _cpplint_state.SetFilters(filters)
[ "def", "_SetFilters", "(", "filters", ")", ":", "_cpplint_state", ".", "SetFilters", "(", "filters", ")" ]
https://github.com/msracver/Deep-Image-Analogy/blob/632b9287b42552e32dad64922967c8c9ec7fc4d3/scripts/cpp_lint.py#L797-L807
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/numpy/py3/numpy/distutils/ccompiler.py
python
_needs_build
(obj, cc_args, extra_postargs, pp_opts)
return False
Check if an objects needs to be rebuild based on its dependencies Parameters ---------- obj : str object file Returns ------- bool
Check if an objects needs to be rebuild based on its dependencies
[ "Check", "if", "an", "objects", "needs", "to", "be", "rebuild", "based", "on", "its", "dependencies" ]
def _needs_build(obj, cc_args, extra_postargs, pp_opts): """ Check if an objects needs to be rebuild based on its dependencies Parameters ---------- obj : str object file Returns ------- bool """ # defined in unixcompiler.py dep_file = obj + '.d' if not os.path.exists(dep_file): return True # dep_file is a makefile containing 'object: dependencies' # formatted like posix shell (spaces escaped, \ line continuations) # the last line contains the compiler commandline arguments as some # projects may compile an extension multiple times with different # arguments with open(dep_file, "r") as f: lines = f.readlines() cmdline =_commandline_dep_string(cc_args, extra_postargs, pp_opts) last_cmdline = lines[-1] if last_cmdline != cmdline: return True contents = ''.join(lines[:-1]) deps = [x for x in shlex.split(contents, posix=True) if x != "\n" and not x.endswith(":")] try: t_obj = os.stat(obj).st_mtime # check if any of the dependencies is newer than the object # the dependencies includes the source used to create the object for f in deps: if os.stat(f).st_mtime > t_obj: return True except OSError: # no object counts as newer (shouldn't happen if dep_file exists) return True return False
[ "def", "_needs_build", "(", "obj", ",", "cc_args", ",", "extra_postargs", ",", "pp_opts", ")", ":", "# defined in unixcompiler.py", "dep_file", "=", "obj", "+", "'.d'", "if", "not", "os", ".", "path", ".", "exists", "(", "dep_file", ")", ":", "return", "True", "# dep_file is a makefile containing 'object: dependencies'", "# formatted like posix shell (spaces escaped, \\ line continuations)", "# the last line contains the compiler commandline arguments as some", "# projects may compile an extension multiple times with different", "# arguments", "with", "open", "(", "dep_file", ",", "\"r\"", ")", "as", "f", ":", "lines", "=", "f", ".", "readlines", "(", ")", "cmdline", "=", "_commandline_dep_string", "(", "cc_args", ",", "extra_postargs", ",", "pp_opts", ")", "last_cmdline", "=", "lines", "[", "-", "1", "]", "if", "last_cmdline", "!=", "cmdline", ":", "return", "True", "contents", "=", "''", ".", "join", "(", "lines", "[", ":", "-", "1", "]", ")", "deps", "=", "[", "x", "for", "x", "in", "shlex", ".", "split", "(", "contents", ",", "posix", "=", "True", ")", "if", "x", "!=", "\"\\n\"", "and", "not", "x", ".", "endswith", "(", "\":\"", ")", "]", "try", ":", "t_obj", "=", "os", ".", "stat", "(", "obj", ")", ".", "st_mtime", "# check if any of the dependencies is newer than the object", "# the dependencies includes the source used to create the object", "for", "f", "in", "deps", ":", "if", "os", ".", "stat", "(", "f", ")", ".", "st_mtime", ">", "t_obj", ":", "return", "True", "except", "OSError", ":", "# no object counts as newer (shouldn't happen if dep_file exists)", "return", "True", "return", "False" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/numpy/py3/numpy/distutils/ccompiler.py#L37-L84
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/distributions/python/ops/wishart.py
python
_WishartLinearOperator.scale_operator
(self)
return self._scale_operator
Wishart distribution scale matrix as an Linear Operator.
Wishart distribution scale matrix as an Linear Operator.
[ "Wishart", "distribution", "scale", "matrix", "as", "an", "Linear", "Operator", "." ]
def scale_operator(self): """Wishart distribution scale matrix as an Linear Operator.""" return self._scale_operator
[ "def", "scale_operator", "(", "self", ")", ":", "return", "self", ".", "_scale_operator" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/contrib/distributions/python/ops/wishart.py#L194-L196
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/stc.py
python
StyledTextCtrl.DocLineFromVisible
(*args, **kwargs)
return _stc.StyledTextCtrl_DocLineFromVisible(*args, **kwargs)
DocLineFromVisible(self, int lineDisplay) -> int Find the document line of a display line taking hidden lines into account.
DocLineFromVisible(self, int lineDisplay) -> int
[ "DocLineFromVisible", "(", "self", "int", "lineDisplay", ")", "-", ">", "int" ]
def DocLineFromVisible(*args, **kwargs): """ DocLineFromVisible(self, int lineDisplay) -> int Find the document line of a display line taking hidden lines into account. """ return _stc.StyledTextCtrl_DocLineFromVisible(*args, **kwargs)
[ "def", "DocLineFromVisible", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_stc", ".", "StyledTextCtrl_DocLineFromVisible", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/stc.py#L3880-L3886
eclipse/sumo
7132a9b8b6eea734bdec38479026b4d8c4336d03
tools/contributed/sumopy/coremodules/network/networkxtools.py
python
Primary.get_priority
(self, is_opp=False)
Returns priority of road. To be overridden.
Returns priority of road.
[ "Returns", "priority", "of", "road", "." ]
def get_priority(self, is_opp=False): """ Returns priority of road. To be overridden. """ if is_opp: lanes = self._lanes_opp else: lanes = self._lanes speed_max = self.get_speed_max() n_lane = len(lanes) # is_residential =... if n_lane == 0: return 0 elif n_lane <= 2: if speed_max <= 51.0/3.6: return 7 elif speed_max <= 111.0/3.6: return 8 else: return 9 elif n_lane == 3: if speed_max <= 51.0/3.6: return 7 elif speed_max <= 91.0/3.6: return 8 elif speed_max <= 111.0/3.6: return 9 else: return 10 elif n_lane > 3: return 10
[ "def", "get_priority", "(", "self", ",", "is_opp", "=", "False", ")", ":", "if", "is_opp", ":", "lanes", "=", "self", ".", "_lanes_opp", "else", ":", "lanes", "=", "self", ".", "_lanes", "speed_max", "=", "self", ".", "get_speed_max", "(", ")", "n_lane", "=", "len", "(", "lanes", ")", "# is_residential =...", "if", "n_lane", "==", "0", ":", "return", "0", "elif", "n_lane", "<=", "2", ":", "if", "speed_max", "<=", "51.0", "/", "3.6", ":", "return", "7", "elif", "speed_max", "<=", "111.0", "/", "3.6", ":", "return", "8", "else", ":", "return", "9", "elif", "n_lane", "==", "3", ":", "if", "speed_max", "<=", "51.0", "/", "3.6", ":", "return", "7", "elif", "speed_max", "<=", "91.0", "/", "3.6", ":", "return", "8", "elif", "speed_max", "<=", "111.0", "/", "3.6", ":", "return", "9", "else", ":", "return", "10", "elif", "n_lane", ">", "3", ":", "return", "10" ]
https://github.com/eclipse/sumo/blob/7132a9b8b6eea734bdec38479026b4d8c4336d03/tools/contributed/sumopy/coremodules/network/networkxtools.py#L1717-L1753
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/mailbox.py
python
_singlefileMailbox.add
(self, message)
return self._next_key - 1
Add message and return assigned key.
Add message and return assigned key.
[ "Add", "message", "and", "return", "assigned", "key", "." ]
def add(self, message): """Add message and return assigned key.""" self._lookup() self._toc[self._next_key] = self._append_message(message) self._next_key += 1 # _append_message appends the message to the mailbox file. We # don't need a full rewrite + rename, sync is enough. self._pending_sync = True return self._next_key - 1
[ "def", "add", "(", "self", ",", "message", ")", ":", "self", ".", "_lookup", "(", ")", "self", ".", "_toc", "[", "self", ".", "_next_key", "]", "=", "self", ".", "_append_message", "(", "message", ")", "self", ".", "_next_key", "+=", "1", "# _append_message appends the message to the mailbox file. We", "# don't need a full rewrite + rename, sync is enough.", "self", ".", "_pending_sync", "=", "True", "return", "self", ".", "_next_key", "-", "1" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi-v7a/toolchain/lib/python2.7/mailbox.py#L584-L592
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/numpy/py2/numpy/core/function_base.py
python
geomspace
(start, stop, num=50, endpoint=True, dtype=None, axis=0)
return result.astype(dtype, copy=False)
Return numbers spaced evenly on a log scale (a geometric progression). This is similar to `logspace`, but with endpoints specified directly. Each output sample is a constant multiple of the previous. .. versionchanged:: 1.16.0 Non-scalar `start` and `stop` are now supported. Parameters ---------- start : array_like The starting value of the sequence. stop : array_like The final value of the sequence, unless `endpoint` is False. In that case, ``num + 1`` values are spaced over the interval in log-space, of which all but the last (a sequence of length `num`) are returned. num : integer, optional Number of samples to generate. Default is 50. endpoint : boolean, optional If true, `stop` is the last sample. Otherwise, it is not included. Default is True. dtype : dtype The type of the output array. If `dtype` is not given, infer the data type from the other input arguments. axis : int, optional The axis in the result to store the samples. Relevant only if start or stop are array-like. By default (0), the samples will be along a new axis inserted at the beginning. Use -1 to get an axis at the end. .. versionadded:: 1.16.0 Returns ------- samples : ndarray `num` samples, equally spaced on a log scale. See Also -------- logspace : Similar to geomspace, but with endpoints specified using log and base. linspace : Similar to geomspace, but with arithmetic instead of geometric progression. arange : Similar to linspace, with the step size specified instead of the number of samples. Notes ----- If the inputs or dtype are complex, the output will follow a logarithmic spiral in the complex plane. (There are an infinite number of spirals passing through two points; the output will follow the shortest such path.) Examples -------- >>> np.geomspace(1, 1000, num=4) array([ 1., 10., 100., 1000.]) >>> np.geomspace(1, 1000, num=3, endpoint=False) array([ 1., 10., 100.]) >>> np.geomspace(1, 1000, num=4, endpoint=False) array([ 1. , 5.62341325, 31.6227766 , 177.827941 ]) >>> np.geomspace(1, 256, num=9) array([ 1., 2., 4., 8., 16., 32., 64., 128., 256.]) Note that the above may not produce exact integers: >>> np.geomspace(1, 256, num=9, dtype=int) array([ 1, 2, 4, 7, 16, 32, 63, 127, 256]) >>> np.around(np.geomspace(1, 256, num=9)).astype(int) array([ 1, 2, 4, 8, 16, 32, 64, 128, 256]) Negative, decreasing, and complex inputs are allowed: >>> np.geomspace(1000, 1, num=4) array([ 1000., 100., 10., 1.]) >>> np.geomspace(-1000, -1, num=4) array([-1000., -100., -10., -1.]) >>> np.geomspace(1j, 1000j, num=4) # Straight line array([ 0. +1.j, 0. +10.j, 0. +100.j, 0.+1000.j]) >>> np.geomspace(-1+0j, 1+0j, num=5) # Circle array([-1.00000000+0.j , -0.70710678+0.70710678j, 0.00000000+1.j , 0.70710678+0.70710678j, 1.00000000+0.j ]) Graphical illustration of ``endpoint`` parameter: >>> import matplotlib.pyplot as plt >>> N = 10 >>> y = np.zeros(N) >>> plt.semilogx(np.geomspace(1, 1000, N, endpoint=True), y + 1, 'o') >>> plt.semilogx(np.geomspace(1, 1000, N, endpoint=False), y + 2, 'o') >>> plt.axis([0.5, 2000, 0, 3]) >>> plt.grid(True, color='0.7', linestyle='-', which='both', axis='both') >>> plt.show()
Return numbers spaced evenly on a log scale (a geometric progression).
[ "Return", "numbers", "spaced", "evenly", "on", "a", "log", "scale", "(", "a", "geometric", "progression", ")", "." ]
def geomspace(start, stop, num=50, endpoint=True, dtype=None, axis=0): """ Return numbers spaced evenly on a log scale (a geometric progression). This is similar to `logspace`, but with endpoints specified directly. Each output sample is a constant multiple of the previous. .. versionchanged:: 1.16.0 Non-scalar `start` and `stop` are now supported. Parameters ---------- start : array_like The starting value of the sequence. stop : array_like The final value of the sequence, unless `endpoint` is False. In that case, ``num + 1`` values are spaced over the interval in log-space, of which all but the last (a sequence of length `num`) are returned. num : integer, optional Number of samples to generate. Default is 50. endpoint : boolean, optional If true, `stop` is the last sample. Otherwise, it is not included. Default is True. dtype : dtype The type of the output array. If `dtype` is not given, infer the data type from the other input arguments. axis : int, optional The axis in the result to store the samples. Relevant only if start or stop are array-like. By default (0), the samples will be along a new axis inserted at the beginning. Use -1 to get an axis at the end. .. versionadded:: 1.16.0 Returns ------- samples : ndarray `num` samples, equally spaced on a log scale. See Also -------- logspace : Similar to geomspace, but with endpoints specified using log and base. linspace : Similar to geomspace, but with arithmetic instead of geometric progression. arange : Similar to linspace, with the step size specified instead of the number of samples. Notes ----- If the inputs or dtype are complex, the output will follow a logarithmic spiral in the complex plane. (There are an infinite number of spirals passing through two points; the output will follow the shortest such path.) Examples -------- >>> np.geomspace(1, 1000, num=4) array([ 1., 10., 100., 1000.]) >>> np.geomspace(1, 1000, num=3, endpoint=False) array([ 1., 10., 100.]) >>> np.geomspace(1, 1000, num=4, endpoint=False) array([ 1. , 5.62341325, 31.6227766 , 177.827941 ]) >>> np.geomspace(1, 256, num=9) array([ 1., 2., 4., 8., 16., 32., 64., 128., 256.]) Note that the above may not produce exact integers: >>> np.geomspace(1, 256, num=9, dtype=int) array([ 1, 2, 4, 7, 16, 32, 63, 127, 256]) >>> np.around(np.geomspace(1, 256, num=9)).astype(int) array([ 1, 2, 4, 8, 16, 32, 64, 128, 256]) Negative, decreasing, and complex inputs are allowed: >>> np.geomspace(1000, 1, num=4) array([ 1000., 100., 10., 1.]) >>> np.geomspace(-1000, -1, num=4) array([-1000., -100., -10., -1.]) >>> np.geomspace(1j, 1000j, num=4) # Straight line array([ 0. +1.j, 0. +10.j, 0. +100.j, 0.+1000.j]) >>> np.geomspace(-1+0j, 1+0j, num=5) # Circle array([-1.00000000+0.j , -0.70710678+0.70710678j, 0.00000000+1.j , 0.70710678+0.70710678j, 1.00000000+0.j ]) Graphical illustration of ``endpoint`` parameter: >>> import matplotlib.pyplot as plt >>> N = 10 >>> y = np.zeros(N) >>> plt.semilogx(np.geomspace(1, 1000, N, endpoint=True), y + 1, 'o') >>> plt.semilogx(np.geomspace(1, 1000, N, endpoint=False), y + 2, 'o') >>> plt.axis([0.5, 2000, 0, 3]) >>> plt.grid(True, color='0.7', linestyle='-', which='both', axis='both') >>> plt.show() """ start = asanyarray(start) stop = asanyarray(stop) if _nx.any(start == 0) or _nx.any(stop == 0): raise ValueError('Geometric sequence cannot include zero') dt = result_type(start, stop, float(num), _nx.zeros((), dtype)) if dtype is None: dtype = dt else: # complex to dtype('complex128'), for instance dtype = _nx.dtype(dtype) # Promote both arguments to the same dtype in case, for instance, one is # complex and another is negative and log would produce NaN otherwise. # Copy since we may change things in-place further down. start = start.astype(dt, copy=True) stop = stop.astype(dt, copy=True) out_sign = _nx.ones(_nx.broadcast(start, stop).shape, dt) # Avoid negligible real or imaginary parts in output by rotating to # positive real, calculating, then undoing rotation if _nx.issubdtype(dt, _nx.complexfloating): all_imag = (start.real == 0.) & (stop.real == 0.) if _nx.any(all_imag): start[all_imag] = start[all_imag].imag stop[all_imag] = stop[all_imag].imag out_sign[all_imag] = 1j both_negative = (_nx.sign(start) == -1) & (_nx.sign(stop) == -1) if _nx.any(both_negative): _nx.negative(start, out=start, where=both_negative) _nx.negative(stop, out=stop, where=both_negative) _nx.negative(out_sign, out=out_sign, where=both_negative) log_start = _nx.log10(start) log_stop = _nx.log10(stop) result = out_sign * logspace(log_start, log_stop, num=num, endpoint=endpoint, base=10.0, dtype=dtype) if axis != 0: result = _nx.moveaxis(result, 0, axis) return result.astype(dtype, copy=False)
[ "def", "geomspace", "(", "start", ",", "stop", ",", "num", "=", "50", ",", "endpoint", "=", "True", ",", "dtype", "=", "None", ",", "axis", "=", "0", ")", ":", "start", "=", "asanyarray", "(", "start", ")", "stop", "=", "asanyarray", "(", "stop", ")", "if", "_nx", ".", "any", "(", "start", "==", "0", ")", "or", "_nx", ".", "any", "(", "stop", "==", "0", ")", ":", "raise", "ValueError", "(", "'Geometric sequence cannot include zero'", ")", "dt", "=", "result_type", "(", "start", ",", "stop", ",", "float", "(", "num", ")", ",", "_nx", ".", "zeros", "(", "(", ")", ",", "dtype", ")", ")", "if", "dtype", "is", "None", ":", "dtype", "=", "dt", "else", ":", "# complex to dtype('complex128'), for instance", "dtype", "=", "_nx", ".", "dtype", "(", "dtype", ")", "# Promote both arguments to the same dtype in case, for instance, one is", "# complex and another is negative and log would produce NaN otherwise.", "# Copy since we may change things in-place further down.", "start", "=", "start", ".", "astype", "(", "dt", ",", "copy", "=", "True", ")", "stop", "=", "stop", ".", "astype", "(", "dt", ",", "copy", "=", "True", ")", "out_sign", "=", "_nx", ".", "ones", "(", "_nx", ".", "broadcast", "(", "start", ",", "stop", ")", ".", "shape", ",", "dt", ")", "# Avoid negligible real or imaginary parts in output by rotating to", "# positive real, calculating, then undoing rotation", "if", "_nx", ".", "issubdtype", "(", "dt", ",", "_nx", ".", "complexfloating", ")", ":", "all_imag", "=", "(", "start", ".", "real", "==", "0.", ")", "&", "(", "stop", ".", "real", "==", "0.", ")", "if", "_nx", ".", "any", "(", "all_imag", ")", ":", "start", "[", "all_imag", "]", "=", "start", "[", "all_imag", "]", ".", "imag", "stop", "[", "all_imag", "]", "=", "stop", "[", "all_imag", "]", ".", "imag", "out_sign", "[", "all_imag", "]", "=", "1j", "both_negative", "=", "(", "_nx", ".", "sign", "(", "start", ")", "==", "-", "1", ")", "&", "(", "_nx", ".", "sign", "(", "stop", ")", "==", "-", "1", ")", "if", "_nx", ".", "any", "(", "both_negative", ")", ":", "_nx", ".", "negative", "(", "start", ",", "out", "=", "start", ",", "where", "=", "both_negative", ")", "_nx", ".", "negative", "(", "stop", ",", "out", "=", "stop", ",", "where", "=", "both_negative", ")", "_nx", ".", "negative", "(", "out_sign", ",", "out", "=", "out_sign", ",", "where", "=", "both_negative", ")", "log_start", "=", "_nx", ".", "log10", "(", "start", ")", "log_stop", "=", "_nx", ".", "log10", "(", "stop", ")", "result", "=", "out_sign", "*", "logspace", "(", "log_start", ",", "log_stop", ",", "num", "=", "num", ",", "endpoint", "=", "endpoint", ",", "base", "=", "10.0", ",", "dtype", "=", "dtype", ")", "if", "axis", "!=", "0", ":", "result", "=", "_nx", ".", "moveaxis", "(", "result", ",", "0", ",", "axis", ")", "return", "result", ".", "astype", "(", "dtype", ",", "copy", "=", "False", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/numpy/py2/numpy/core/function_base.py#L289-L427
wlanjie/AndroidFFmpeg
7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf
tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/plat-mac/lib-scriptpackages/Terminal/Standard_Suite.py
python
Standard_Suite_Events.quit
(self, _object, _attributes={}, **_arguments)
quit: Quit an application. Required argument: the object for the command Keyword argument saving: Specifies whether changes should be saved before quitting. Keyword argument _attributes: AppleEvent attribute dictionary
quit: Quit an application. Required argument: the object for the command Keyword argument saving: Specifies whether changes should be saved before quitting. Keyword argument _attributes: AppleEvent attribute dictionary
[ "quit", ":", "Quit", "an", "application", ".", "Required", "argument", ":", "the", "object", "for", "the", "command", "Keyword", "argument", "saving", ":", "Specifies", "whether", "changes", "should", "be", "saved", "before", "quitting", ".", "Keyword", "argument", "_attributes", ":", "AppleEvent", "attribute", "dictionary" ]
def quit(self, _object, _attributes={}, **_arguments): """quit: Quit an application. Required argument: the object for the command Keyword argument saving: Specifies whether changes should be saved before quitting. Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'aevt' _subcode = 'quit' aetools.keysubst(_arguments, self._argmap_quit) _arguments['----'] = _object aetools.enumsubst(_arguments, 'savo', _Enum_savo) _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----']
[ "def", "quit", "(", "self", ",", "_object", ",", "_attributes", "=", "{", "}", ",", "*", "*", "_arguments", ")", ":", "_code", "=", "'aevt'", "_subcode", "=", "'quit'", "aetools", ".", "keysubst", "(", "_arguments", ",", "self", ".", "_argmap_quit", ")", "_arguments", "[", "'----'", "]", "=", "_object", "aetools", ".", "enumsubst", "(", "_arguments", ",", "'savo'", ",", "_Enum_savo", ")", "_reply", ",", "_arguments", ",", "_attributes", "=", "self", ".", "send", "(", "_code", ",", "_subcode", ",", "_arguments", ",", "_attributes", ")", "if", "_arguments", ".", "get", "(", "'errn'", ",", "0", ")", ":", "raise", "aetools", ".", "Error", ",", "aetools", ".", "decodeerror", "(", "_arguments", ")", "# XXXX Optionally decode result", "if", "_arguments", ".", "has_key", "(", "'----'", ")", ":", "return", "_arguments", "[", "'----'", "]" ]
https://github.com/wlanjie/AndroidFFmpeg/blob/7baf9122f4b8e1c74e7baf4be5c422c7a5ba5aaf/tools/fdk-aac-build/armeabi/toolchain/lib/python2.7/plat-mac/lib-scriptpackages/Terminal/Standard_Suite.py#L258-L278
astra-toolbox/astra-toolbox
1e7ec8af702e595b76654f2e500f4c00344b273f
python/astra/data2d.py
python
info
()
return d.info()
Print info on 2D objects in memory.
Print info on 2D objects in memory.
[ "Print", "info", "on", "2D", "objects", "in", "memory", "." ]
def info(): """Print info on 2D objects in memory.""" return d.info()
[ "def", "info", "(", ")", ":", "return", "d", ".", "info", "(", ")" ]
https://github.com/astra-toolbox/astra-toolbox/blob/1e7ec8af702e595b76654f2e500f4c00344b273f/python/astra/data2d.py#L136-L138
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/animate.py
python
Animation.GetTransparentColour
(*args, **kwargs)
return _animate.Animation_GetTransparentColour(*args, **kwargs)
GetTransparentColour(self, int frame) -> Colour
GetTransparentColour(self, int frame) -> Colour
[ "GetTransparentColour", "(", "self", "int", "frame", ")", "-", ">", "Colour" ]
def GetTransparentColour(*args, **kwargs): """GetTransparentColour(self, int frame) -> Colour""" return _animate.Animation_GetTransparentColour(*args, **kwargs)
[ "def", "GetTransparentColour", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_animate", ".", "Animation_GetTransparentColour", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/animate.py#L134-L136
AcademySoftwareFoundation/OpenColorIO
73508eb5230374df8d96147a0627c015d359a641
share/nuke/ocionuke/cdl.py
python
export_multiple_to_ccc
(filename = None)
Exported all selected OCIOCDLTransform nodes to a ColorCorrectionCollection XML file (.ccc)
Exported all selected OCIOCDLTransform nodes to a ColorCorrectionCollection XML file (.ccc)
[ "Exported", "all", "selected", "OCIOCDLTransform", "nodes", "to", "a", "ColorCorrectionCollection", "XML", "file", "(", ".", "ccc", ")" ]
def export_multiple_to_ccc(filename = None): """Exported all selected OCIOCDLTransform nodes to a ColorCorrectionCollection XML file (.ccc) """ if filename is None: filename = nuke.getFilename("Color Correction XML file", pattern = "*.cc *.ccc") if filename is None: # User clicked cancel return allcc = [] for node in nuke.selectedNodes("OCIOCDLTransform"): allcc.append(_node_to_cdltransform(node)) xml = _cdltransforms_to_xml(allcc) print "Writing %r, contents:\n%s" % (filename, xml) open(filename, "w").write(xml)
[ "def", "export_multiple_to_ccc", "(", "filename", "=", "None", ")", ":", "if", "filename", "is", "None", ":", "filename", "=", "nuke", ".", "getFilename", "(", "\"Color Correction XML file\"", ",", "pattern", "=", "\"*.cc *.ccc\"", ")", "if", "filename", "is", "None", ":", "# User clicked cancel", "return", "allcc", "=", "[", "]", "for", "node", "in", "nuke", ".", "selectedNodes", "(", "\"OCIOCDLTransform\"", ")", ":", "allcc", ".", "append", "(", "_node_to_cdltransform", "(", "node", ")", ")", "xml", "=", "_cdltransforms_to_xml", "(", "allcc", ")", "print", "\"Writing %r, contents:\\n%s\"", "%", "(", "filename", ",", "xml", ")", "open", "(", "filename", ",", "\"w\"", ")", ".", "write", "(", "xml", ")" ]
https://github.com/AcademySoftwareFoundation/OpenColorIO/blob/73508eb5230374df8d96147a0627c015d359a641/share/nuke/ocionuke/cdl.py#L218-L235
chanyn/3Dpose_ssl
585696676279683a279b1ecca136c0e0d02aef2a
caffe-3dssl/tools/extra/parse_log.py
python
write_csv
(output_filename, dict_list, delimiter, verbose=False)
Write a CSV file
Write a CSV file
[ "Write", "a", "CSV", "file" ]
def write_csv(output_filename, dict_list, delimiter, verbose=False): """Write a CSV file """ if not dict_list: if verbose: print('Not writing %s; no lines to write' % output_filename) return dialect = csv.excel dialect.delimiter = delimiter with open(output_filename, 'w') as f: dict_writer = csv.DictWriter(f, fieldnames=dict_list[0].keys(), dialect=dialect) dict_writer.writeheader() dict_writer.writerows(dict_list) if verbose: print 'Wrote %s' % output_filename
[ "def", "write_csv", "(", "output_filename", ",", "dict_list", ",", "delimiter", ",", "verbose", "=", "False", ")", ":", "if", "not", "dict_list", ":", "if", "verbose", ":", "print", "(", "'Not writing %s; no lines to write'", "%", "output_filename", ")", "return", "dialect", "=", "csv", ".", "excel", "dialect", ".", "delimiter", "=", "delimiter", "with", "open", "(", "output_filename", ",", "'w'", ")", "as", "f", ":", "dict_writer", "=", "csv", ".", "DictWriter", "(", "f", ",", "fieldnames", "=", "dict_list", "[", "0", "]", ".", "keys", "(", ")", ",", "dialect", "=", "dialect", ")", "dict_writer", ".", "writeheader", "(", ")", "dict_writer", ".", "writerows", "(", "dict_list", ")", "if", "verbose", ":", "print", "'Wrote %s'", "%", "output_filename" ]
https://github.com/chanyn/3Dpose_ssl/blob/585696676279683a279b1ecca136c0e0d02aef2a/caffe-3dssl/tools/extra/parse_log.py#L145-L163
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/python/ops/variables.py
python
Variable.scatter_sub
(self, sparse_delta, use_locking=False)
return state_ops.scatter_sub( self._variable, sparse_delta.indices, sparse_delta.values, use_locking=use_locking)
Subtracts `IndexedSlices` from this variable. This is essentially a shortcut for `scatter_sub(self, sparse_delta.indices, sparse_delta.values)`. Args: sparse_delta: `IndexedSlices` to be subtracted from this variable. use_locking: If `True`, use locking during the operation. Returns: A `Tensor` that will hold the new value of this variable after the scattered subtraction has completed. Raises: ValueError: if `sparse_delta` is not an `IndexedSlices`.
Subtracts `IndexedSlices` from this variable.
[ "Subtracts", "IndexedSlices", "from", "this", "variable", "." ]
def scatter_sub(self, sparse_delta, use_locking=False): """Subtracts `IndexedSlices` from this variable. This is essentially a shortcut for `scatter_sub(self, sparse_delta.indices, sparse_delta.values)`. Args: sparse_delta: `IndexedSlices` to be subtracted from this variable. use_locking: If `True`, use locking during the operation. Returns: A `Tensor` that will hold the new value of this variable after the scattered subtraction has completed. Raises: ValueError: if `sparse_delta` is not an `IndexedSlices`. """ if not isinstance(sparse_delta, ops.IndexedSlices): raise ValueError("sparse_delta is not IndexedSlices: %s" % sparse_delta) return state_ops.scatter_sub( self._variable, sparse_delta.indices, sparse_delta.values, use_locking=use_locking)
[ "def", "scatter_sub", "(", "self", ",", "sparse_delta", ",", "use_locking", "=", "False", ")", ":", "if", "not", "isinstance", "(", "sparse_delta", ",", "ops", ".", "IndexedSlices", ")", ":", "raise", "ValueError", "(", "\"sparse_delta is not IndexedSlices: %s\"", "%", "sparse_delta", ")", "return", "state_ops", ".", "scatter_sub", "(", "self", ".", "_variable", ",", "sparse_delta", ".", "indices", ",", "sparse_delta", ".", "values", ",", "use_locking", "=", "use_locking", ")" ]
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/python/ops/variables.py#L559-L582
openalpr/openalpr
736ab0e608cf9b20d92f36a873bb1152240daa98
src/bindings/python/openalpr/openalpr.py
python
Alpr.set_prewarp
(self, prewarp)
Updates the prewarp configuration used to skew images in OpenALPR before processing. :param prewarp: A unicode/ascii string (Python 2/3) or bytes array (Python 3) :return: None
Updates the prewarp configuration used to skew images in OpenALPR before processing.
[ "Updates", "the", "prewarp", "configuration", "used", "to", "skew", "images", "in", "OpenALPR", "before", "processing", "." ]
def set_prewarp(self, prewarp): """ Updates the prewarp configuration used to skew images in OpenALPR before processing. :param prewarp: A unicode/ascii string (Python 2/3) or bytes array (Python 3) :return: None """ prewarp = _convert_to_charp(prewarp) self._set_prewarp_func(self.alpr_pointer, prewarp)
[ "def", "set_prewarp", "(", "self", ",", "prewarp", ")", ":", "prewarp", "=", "_convert_to_charp", "(", "prewarp", ")", "self", ".", "_set_prewarp_func", "(", "self", ".", "alpr_pointer", ",", "prewarp", ")" ]
https://github.com/openalpr/openalpr/blob/736ab0e608cf9b20d92f36a873bb1152240daa98/src/bindings/python/openalpr/openalpr.py#L225-L234
ApolloAuto/apollo-platform
86d9dc6743b496ead18d597748ebabd34a513289
ros/third_party/lib_x86_64/python2.7/dist-packages/numpy/distutils/ccompiler.py
python
CCompiler_spawn
(self, cmd, display=None)
Execute a command in a sub-process. Parameters ---------- cmd : str The command to execute. display : str or sequence of str, optional The text to add to the log file kept by `numpy.distutils`. If not given, `display` is equal to `cmd`. Returns ------- None Raises ------ DistutilsExecError If the command failed, i.e. the exit status was not 0.
Execute a command in a sub-process.
[ "Execute", "a", "command", "in", "a", "sub", "-", "process", "." ]
def CCompiler_spawn(self, cmd, display=None): """ Execute a command in a sub-process. Parameters ---------- cmd : str The command to execute. display : str or sequence of str, optional The text to add to the log file kept by `numpy.distutils`. If not given, `display` is equal to `cmd`. Returns ------- None Raises ------ DistutilsExecError If the command failed, i.e. the exit status was not 0. """ if display is None: display = cmd if is_sequence(display): display = ' '.join(list(display)) log.info(display) s, o = exec_command(cmd) if s: if is_sequence(cmd): cmd = ' '.join(list(cmd)) try: print(o) except UnicodeError: # When installing through pip, `o` can contain non-ascii chars pass if re.search('Too many open files', o): msg = '\nTry rerunning setup command until build succeeds.' else: msg = '' raise DistutilsExecError('Command "%s" failed with exit status %d%s' % (cmd, s, msg))
[ "def", "CCompiler_spawn", "(", "self", ",", "cmd", ",", "display", "=", "None", ")", ":", "if", "display", "is", "None", ":", "display", "=", "cmd", "if", "is_sequence", "(", "display", ")", ":", "display", "=", "' '", ".", "join", "(", "list", "(", "display", ")", ")", "log", ".", "info", "(", "display", ")", "s", ",", "o", "=", "exec_command", "(", "cmd", ")", "if", "s", ":", "if", "is_sequence", "(", "cmd", ")", ":", "cmd", "=", "' '", ".", "join", "(", "list", "(", "cmd", ")", ")", "try", ":", "print", "(", "o", ")", "except", "UnicodeError", ":", "# When installing through pip, `o` can contain non-ascii chars", "pass", "if", "re", ".", "search", "(", "'Too many open files'", ",", "o", ")", ":", "msg", "=", "'\\nTry rerunning setup command until build succeeds.'", "else", ":", "msg", "=", "''", "raise", "DistutilsExecError", "(", "'Command \"%s\" failed with exit status %d%s'", "%", "(", "cmd", ",", "s", ",", "msg", ")", ")" ]
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/third_party/lib_x86_64/python2.7/dist-packages/numpy/distutils/ccompiler.py#L32-L72
hughperkins/tf-coriander
970d3df6c11400ad68405f22b0c42a52374e94ca
tensorflow/python/training/server_lib.py
python
ClusterSpec.as_cluster_def
(self)
return self._cluster_def
Returns a `tf.train.ClusterDef` protocol buffer based on this cluster.
Returns a `tf.train.ClusterDef` protocol buffer based on this cluster.
[ "Returns", "a", "tf", ".", "train", ".", "ClusterDef", "protocol", "buffer", "based", "on", "this", "cluster", "." ]
def as_cluster_def(self): """Returns a `tf.train.ClusterDef` protocol buffer based on this cluster.""" return self._cluster_def
[ "def", "as_cluster_def", "(", "self", ")", ":", "return", "self", ".", "_cluster_def" ]
https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/python/training/server_lib.py#L344-L346
microsoft/EdgeML
ef9f8a77f096acbdeb941014791f8eda1c1bc35b
tf/edgeml_tf/trainer/emirnnTrainer.py
python
EMI_Driver.assignToGraph
(self, initVarList)
This method should deal with restoring the entire graph now
This method should deal with restoring the entire graph now
[ "This", "method", "should", "deal", "with", "restoring", "the", "entire", "graph", "now" ]
def assignToGraph(self, initVarList): ''' This method should deal with restoring the entire graph now''' raise NotImplementedError()
[ "def", "assignToGraph", "(", "self", ",", "initVarList", ")", ":", "raise", "NotImplementedError", "(", ")" ]
https://github.com/microsoft/EdgeML/blob/ef9f8a77f096acbdeb941014791f8eda1c1bc35b/tf/edgeml_tf/trainer/emirnnTrainer.py#L341-L345
xapian/xapian
2b803ea5e3904a6e0cd7d111b2ff38a704c21041
xapian-maintainer-tools/buildbot/scripts/get_tarballs.py
python
get_archive_links
(url, archives)
return links
Get the links to the archive files.
Get the links to the archive files.
[ "Get", "the", "links", "to", "the", "archive", "files", "." ]
def get_archive_links(url, archives): """Get the links to the archive files. """ print("Getting links from '%s'" % url) fd = u.urlopen(url) html = fd.read() fd.close() max_revision, links = parsehtml(html, archives) return links
[ "def", "get_archive_links", "(", "url", ",", "archives", ")", ":", "print", "(", "\"Getting links from '%s'\"", "%", "url", ")", "fd", "=", "u", ".", "urlopen", "(", "url", ")", "html", "=", "fd", ".", "read", "(", ")", "fd", ".", "close", "(", ")", "max_revision", ",", "links", "=", "parsehtml", "(", "html", ",", "archives", ")", "return", "links" ]
https://github.com/xapian/xapian/blob/2b803ea5e3904a6e0cd7d111b2ff38a704c21041/xapian-maintainer-tools/buildbot/scripts/get_tarballs.py#L63-L72
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/platform.py
python
node
()
return uname().node
Returns the computer's network name (which may not be fully qualified) An empty string is returned if the value cannot be determined.
Returns the computer's network name (which may not be fully qualified)
[ "Returns", "the", "computer", "s", "network", "name", "(", "which", "may", "not", "be", "fully", "qualified", ")" ]
def node(): """ Returns the computer's network name (which may not be fully qualified) An empty string is returned if the value cannot be determined. """ return uname().node
[ "def", "node", "(", ")", ":", "return", "uname", "(", ")", ".", "node" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/platform.py#L1070-L1078
deepmind/open_spiel
4ca53bea32bb2875c7385d215424048ae92f78c8
open_spiel/python/algorithms/policy_utils.py
python
policy_to_dict
(player_policy, game, all_states=None, state_to_information_state=None)
return tabular_policy
Converts a Policy instance into a tabular policy represented as a dict. This is compatible with the C++ TabularExploitability code (i.e. pyspiel.exploitability, pyspiel.TabularBestResponse, etc.). While you do not have to pass the all_states and state_to_information_state arguments, creating them outside of this funciton will speed your code up dramatically. Args: player_policy: The policy you want to convert to a dict. game: The game the policy is for. all_states: The result of calling get_all_states.get_all_states. Can be cached for improved performance. state_to_information_state: A dict mapping str(state) to state.information_state for every state in the game. Can be cached for improved performance. Returns: A dictionary version of player_policy that can be passed to the C++ TabularBestResponse, Exploitability, and BestResponse functions/classes.
Converts a Policy instance into a tabular policy represented as a dict.
[ "Converts", "a", "Policy", "instance", "into", "a", "tabular", "policy", "represented", "as", "a", "dict", "." ]
def policy_to_dict(player_policy, game, all_states=None, state_to_information_state=None): """Converts a Policy instance into a tabular policy represented as a dict. This is compatible with the C++ TabularExploitability code (i.e. pyspiel.exploitability, pyspiel.TabularBestResponse, etc.). While you do not have to pass the all_states and state_to_information_state arguments, creating them outside of this funciton will speed your code up dramatically. Args: player_policy: The policy you want to convert to a dict. game: The game the policy is for. all_states: The result of calling get_all_states.get_all_states. Can be cached for improved performance. state_to_information_state: A dict mapping str(state) to state.information_state for every state in the game. Can be cached for improved performance. Returns: A dictionary version of player_policy that can be passed to the C++ TabularBestResponse, Exploitability, and BestResponse functions/classes. """ if all_states is None: all_states = get_all_states.get_all_states( game, depth_limit=-1, include_terminals=False, include_chance_states=False) state_to_information_state = { state: all_states[state].information_state_string() for state in all_states } tabular_policy = dict() for state in all_states: information_state = state_to_information_state[state] tabular_policy[information_state] = list( player_policy.action_probabilities(all_states[state]).items()) return tabular_policy
[ "def", "policy_to_dict", "(", "player_policy", ",", "game", ",", "all_states", "=", "None", ",", "state_to_information_state", "=", "None", ")", ":", "if", "all_states", "is", "None", ":", "all_states", "=", "get_all_states", ".", "get_all_states", "(", "game", ",", "depth_limit", "=", "-", "1", ",", "include_terminals", "=", "False", ",", "include_chance_states", "=", "False", ")", "state_to_information_state", "=", "{", "state", ":", "all_states", "[", "state", "]", ".", "information_state_string", "(", ")", "for", "state", "in", "all_states", "}", "tabular_policy", "=", "dict", "(", ")", "for", "state", "in", "all_states", ":", "information_state", "=", "state_to_information_state", "[", "state", "]", "tabular_policy", "[", "information_state", "]", "=", "list", "(", "player_policy", ".", "action_probabilities", "(", "all_states", "[", "state", "]", ")", ".", "items", "(", ")", ")", "return", "tabular_policy" ]
https://github.com/deepmind/open_spiel/blob/4ca53bea32bb2875c7385d215424048ae92f78c8/open_spiel/python/algorithms/policy_utils.py#L20-L61
baidu-research/tensorflow-allreduce
66d5b855e90b0949e9fa5cca5599fd729a70e874
tensorflow/contrib/tpu/python/tpu/tpu.py
python
outside_all_rewrites
()
Experimental API to 'break out' of a tpu.rewrite() (or shard(), etc.).
Experimental API to 'break out' of a tpu.rewrite() (or shard(), etc.).
[ "Experimental", "API", "to", "break", "out", "of", "a", "tpu", ".", "rewrite", "()", "(", "or", "shard", "()", "etc", ".", ")", "." ]
def outside_all_rewrites(): """Experimental API to 'break out' of a tpu.rewrite() (or shard(), etc.).""" with ops.control_dependencies(None): yield
[ "def", "outside_all_rewrites", "(", ")", ":", "with", "ops", ".", "control_dependencies", "(", "None", ")", ":", "yield" ]
https://github.com/baidu-research/tensorflow-allreduce/blob/66d5b855e90b0949e9fa5cca5599fd729a70e874/tensorflow/contrib/tpu/python/tpu/tpu.py#L98-L101
trilinos/Trilinos
6168be6dd51e35e1cd681e9c4b24433e709df140
packages/seacas/scripts/exomerge3.py
python
ExodusModel._get_displacement_field_prefix
(self)
return prefix
Return the prefix of the displacement field. If no displacement field exists, this will return the default prefix.
Return the prefix of the displacement field.
[ "Return", "the", "prefix", "of", "the", "displacement", "field", "." ]
def _get_displacement_field_prefix(self): """ Return the prefix of the displacement field. If no displacement field exists, this will return the default prefix. """ prefix = 'disp' for node_field_name in self.get_node_field_names(): # if it doesn't end in "_x", it's not a prefix if len(node_field_name) < 3 or node_field_name[-2] != '_': continue # check against acceptable names this_prefix = node_field_name[:-2] if self._is_displacement_field_prefix(this_prefix): prefix = this_prefix return prefix
[ "def", "_get_displacement_field_prefix", "(", "self", ")", ":", "prefix", "=", "'disp'", "for", "node_field_name", "in", "self", ".", "get_node_field_names", "(", ")", ":", "# if it doesn't end in \"_x\", it's not a prefix", "if", "len", "(", "node_field_name", ")", "<", "3", "or", "node_field_name", "[", "-", "2", "]", "!=", "'_'", ":", "continue", "# check against acceptable names", "this_prefix", "=", "node_field_name", "[", ":", "-", "2", "]", "if", "self", ".", "_is_displacement_field_prefix", "(", "this_prefix", ")", ":", "prefix", "=", "this_prefix", "return", "prefix" ]
https://github.com/trilinos/Trilinos/blob/6168be6dd51e35e1cd681e9c4b24433e709df140/packages/seacas/scripts/exomerge3.py#L1275-L1291
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/gtk/_core.py
python
FileSystemHandler.GetLeftLocation
(*args, **kwargs)
return _core_.FileSystemHandler_GetLeftLocation(*args, **kwargs)
GetLeftLocation(String location) -> String
GetLeftLocation(String location) -> String
[ "GetLeftLocation", "(", "String", "location", ")", "-", ">", "String" ]
def GetLeftLocation(*args, **kwargs): """GetLeftLocation(String location) -> String""" return _core_.FileSystemHandler_GetLeftLocation(*args, **kwargs)
[ "def", "GetLeftLocation", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "FileSystemHandler_GetLeftLocation", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/gtk/_core.py#L2365-L2367
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/_misc.py
python
StandardPaths.GetExecutablePath
(*args, **kwargs)
return _misc_.StandardPaths_GetExecutablePath(*args, **kwargs)
GetExecutablePath(self) -> String Return the path (directory+filename) of the running executable or an empty string if it couldn't be determined. The path is returned as an absolute path whenever possible.
GetExecutablePath(self) -> String
[ "GetExecutablePath", "(", "self", ")", "-", ">", "String" ]
def GetExecutablePath(*args, **kwargs): """ GetExecutablePath(self) -> String Return the path (directory+filename) of the running executable or an empty string if it couldn't be determined. The path is returned as an absolute path whenever possible. """ return _misc_.StandardPaths_GetExecutablePath(*args, **kwargs)
[ "def", "GetExecutablePath", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_misc_", ".", "StandardPaths_GetExecutablePath", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_misc.py#L6299-L6307
apple/turicreate
cce55aa5311300e3ce6af93cb45ba791fd1bdf49
deps/src/libxml2-2.9.1/python/libxml2.py
python
parserCtxt.parserHandleReference
(self)
TODO: Remove, now deprecated ... the test is done directly in the content parsing routines. [67] Reference ::= EntityRef | CharRef [68] EntityRef ::= '&' Name ';' [ WFC: Entity Declared ] the Name given in the entity reference must match that in an entity declaration, except that well-formed documents need not declare any of the following entities: amp, lt, gt, apos, quot. [ WFC: Parsed Entity ] An entity reference must not contain the name of an unparsed entity [66] CharRef ::= '&#' [0-9]+ ';' | '&#x' [0-9a-fA-F]+ ';' A PEReference may have been detected in the current input stream the handling is done accordingly to http://www.w3.org/TR/REC-xml#entproc
TODO: Remove, now deprecated ... the test is done directly in the content parsing routines. [67] Reference ::= EntityRef | CharRef [68] EntityRef ::= '&' Name ';' [ WFC: Entity Declared ] the Name given in the entity reference must match that in an entity declaration, except that well-formed documents need not declare any of the following entities: amp, lt, gt, apos, quot. [ WFC: Parsed Entity ] An entity reference must not contain the name of an unparsed entity [66] CharRef ::= '&#' [0-9]+ ';' | '&#x' [0-9a-fA-F]+ ';' A PEReference may have been detected in the current input stream the handling is done accordingly to http://www.w3.org/TR/REC-xml#entproc
[ "TODO", ":", "Remove", "now", "deprecated", "...", "the", "test", "is", "done", "directly", "in", "the", "content", "parsing", "routines", ".", "[", "67", "]", "Reference", "::", "=", "EntityRef", "|", "CharRef", "[", "68", "]", "EntityRef", "::", "=", "&", "Name", ";", "[", "WFC", ":", "Entity", "Declared", "]", "the", "Name", "given", "in", "the", "entity", "reference", "must", "match", "that", "in", "an", "entity", "declaration", "except", "that", "well", "-", "formed", "documents", "need", "not", "declare", "any", "of", "the", "following", "entities", ":", "amp", "lt", "gt", "apos", "quot", ".", "[", "WFC", ":", "Parsed", "Entity", "]", "An", "entity", "reference", "must", "not", "contain", "the", "name", "of", "an", "unparsed", "entity", "[", "66", "]", "CharRef", "::", "=", "&#", "[", "0", "-", "9", "]", "+", ";", "|", "&#x", "[", "0", "-", "9a", "-", "fA", "-", "F", "]", "+", ";", "A", "PEReference", "may", "have", "been", "detected", "in", "the", "current", "input", "stream", "the", "handling", "is", "done", "accordingly", "to", "http", ":", "//", "www", ".", "w3", ".", "org", "/", "TR", "/", "REC", "-", "xml#entproc" ]
def parserHandleReference(self): """TODO: Remove, now deprecated ... the test is done directly in the content parsing routines. [67] Reference ::= EntityRef | CharRef [68] EntityRef ::= '&' Name ';' [ WFC: Entity Declared ] the Name given in the entity reference must match that in an entity declaration, except that well-formed documents need not declare any of the following entities: amp, lt, gt, apos, quot. [ WFC: Parsed Entity ] An entity reference must not contain the name of an unparsed entity [66] CharRef ::= '&#' [0-9]+ ';' | '&#x' [0-9a-fA-F]+ ';' A PEReference may have been detected in the current input stream the handling is done accordingly to http://www.w3.org/TR/REC-xml#entproc """ libxml2mod.xmlParserHandleReference(self._o)
[ "def", "parserHandleReference", "(", "self", ")", ":", "libxml2mod", ".", "xmlParserHandleReference", "(", "self", ".", "_o", ")" ]
https://github.com/apple/turicreate/blob/cce55aa5311300e3ce6af93cb45ba791fd1bdf49/deps/src/libxml2-2.9.1/python/libxml2.py#L5507-L5520
ApolloAuto/apollo-platform
86d9dc6743b496ead18d597748ebabd34a513289
ros/ros_comm/rosmaster/src/rosmaster/master_api.py
python
ROSMasterHandler._notify
(self, registrations, task, key, value, node_apis)
Generic implementation of callback notification @param registrations: Registrations @type registrations: L{Registrations} @param task: task to queue @type task: fn @param key: registration key @type key: str @param value: value to pass to task @type value: Any
Generic implementation of callback notification
[ "Generic", "implementation", "of", "callback", "notification" ]
def _notify(self, registrations, task, key, value, node_apis): """ Generic implementation of callback notification @param registrations: Registrations @type registrations: L{Registrations} @param task: task to queue @type task: fn @param key: registration key @type key: str @param value: value to pass to task @type value: Any """ # cache thread_pool for thread safety thread_pool = self.thread_pool if not thread_pool: return try: for node_api in node_apis: # use the api as a marker so that we limit one thread per subscriber thread_pool.queue_task(node_api, task, (node_api, key, value)) except KeyError: _logger.warn('subscriber data stale (key [%s], listener [%s]): node API unknown'%(key, s))
[ "def", "_notify", "(", "self", ",", "registrations", ",", "task", ",", "key", ",", "value", ",", "node_apis", ")", ":", "# cache thread_pool for thread safety", "thread_pool", "=", "self", ".", "thread_pool", "if", "not", "thread_pool", ":", "return", "try", ":", "for", "node_api", "in", "node_apis", ":", "# use the api as a marker so that we limit one thread per subscriber", "thread_pool", ".", "queue_task", "(", "node_api", ",", "task", ",", "(", "node_api", ",", "key", ",", "value", ")", ")", "except", "KeyError", ":", "_logger", ".", "warn", "(", "'subscriber data stale (key [%s], listener [%s]): node API unknown'", "%", "(", "key", ",", "s", ")", ")" ]
https://github.com/ApolloAuto/apollo-platform/blob/86d9dc6743b496ead18d597748ebabd34a513289/ros/ros_comm/rosmaster/src/rosmaster/master_api.py#L515-L537
Xilinx/Vitis-AI
fc74d404563d9951b57245443c73bef389f3657f
tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/math_ops.py
python
to_bfloat16
(x, name="ToBFloat16")
return cast(x, dtypes.bfloat16, name=name)
Casts a tensor to type `bfloat16`. Args: x: A `Tensor` or `SparseTensor` or `IndexedSlices`. name: A name for the operation (optional). Returns: A `Tensor` or `SparseTensor` or `IndexedSlices` with same shape as `x` with type `bfloat16`. Raises: TypeError: If `x` cannot be cast to the `bfloat16`.
Casts a tensor to type `bfloat16`.
[ "Casts", "a", "tensor", "to", "type", "bfloat16", "." ]
def to_bfloat16(x, name="ToBFloat16"): """Casts a tensor to type `bfloat16`. Args: x: A `Tensor` or `SparseTensor` or `IndexedSlices`. name: A name for the operation (optional). Returns: A `Tensor` or `SparseTensor` or `IndexedSlices` with same shape as `x` with type `bfloat16`. Raises: TypeError: If `x` cannot be cast to the `bfloat16`. """ return cast(x, dtypes.bfloat16, name=name)
[ "def", "to_bfloat16", "(", "x", ",", "name", "=", "\"ToBFloat16\"", ")", ":", "return", "cast", "(", "x", ",", "dtypes", ".", "bfloat16", ",", "name", "=", "name", ")" ]
https://github.com/Xilinx/Vitis-AI/blob/fc74d404563d9951b57245443c73bef389f3657f/tools/Vitis-AI-Quantizer/vai_q_tensorflow1.x/tensorflow/python/ops/math_ops.py#L821-L835
neoml-lib/neoml
a0d370fba05269a1b2258cef126f77bbd2054a3e
NeoML/Python/neoml/Blob.py
python
Blob.asarray
(self, copy=False)
return np.array(cpu_blob._internal, copy=False)
Returns the contents of the blob as a multi-dimensional array, keeping only those dimensions that are more than 1 element long. If all dimensions are 1, the blob will be a one-element array. :param copy: if `True`, the data will be copied. If `False`, the array may share the memory buffer with the blob if possible and only provide more convenient access to the same data. Not copying may be impossible if the blob is in GPU memory. :type copy: bool, default=False
Returns the contents of the blob as a multi-dimensional array, keeping only those dimensions that are more than 1 element long. If all dimensions are 1, the blob will be a one-element array.
[ "Returns", "the", "contents", "of", "the", "blob", "as", "a", "multi", "-", "dimensional", "array", "keeping", "only", "those", "dimensions", "that", "are", "more", "than", "1", "element", "long", ".", "If", "all", "dimensions", "are", "1", "the", "blob", "will", "be", "a", "one", "-", "element", "array", "." ]
def asarray(self, copy=False): """Returns the contents of the blob as a multi-dimensional array, keeping only those dimensions that are more than 1 element long. If all dimensions are 1, the blob will be a one-element array. :param copy: if `True`, the data will be copied. If `False`, the array may share the memory buffer with the blob if possible and only provide more convenient access to the same data. Not copying may be impossible if the blob is in GPU memory. :type copy: bool, default=False """ if type(self.math_engine) is MathEngine.CpuMathEngine: return np.array(self._internal, copy=copy) cpu_blob = self.copy(MathEngine.default_math_engine()) return np.array(cpu_blob._internal, copy=False)
[ "def", "asarray", "(", "self", ",", "copy", "=", "False", ")", ":", "if", "type", "(", "self", ".", "math_engine", ")", "is", "MathEngine", ".", "CpuMathEngine", ":", "return", "np", ".", "array", "(", "self", ".", "_internal", ",", "copy", "=", "copy", ")", "cpu_blob", "=", "self", ".", "copy", "(", "MathEngine", ".", "default_math_engine", "(", ")", ")", "return", "np", ".", "array", "(", "cpu_blob", ".", "_internal", ",", "copy", "=", "False", ")" ]
https://github.com/neoml-lib/neoml/blob/a0d370fba05269a1b2258cef126f77bbd2054a3e/NeoML/Python/neoml/Blob.py#L109-L123
idaholab/moose
9eeebc65e098b4c30f8205fb41591fd5b61eb6ff
python/peacock/base/Preferences.py
python
Preferences.addInt
(self, name, caption, default, min_val, max_val, tooltip)
Convenience function to add an integer preference.
Convenience function to add an integer preference.
[ "Convenience", "function", "to", "add", "an", "integer", "preference", "." ]
def addInt(self, name, caption, default, min_val, max_val, tooltip): """ Convenience function to add an integer preference. """ self.addWidget(IntPreferenceWidget(name, caption, default, min_val, max_val, tooltip, self._key))
[ "def", "addInt", "(", "self", ",", "name", ",", "caption", ",", "default", ",", "min_val", ",", "max_val", ",", "tooltip", ")", ":", "self", ".", "addWidget", "(", "IntPreferenceWidget", "(", "name", ",", "caption", ",", "default", ",", "min_val", ",", "max_val", ",", "tooltip", ",", "self", ".", "_key", ")", ")" ]
https://github.com/idaholab/moose/blob/9eeebc65e098b4c30f8205fb41591fd5b61eb6ff/python/peacock/base/Preferences.py#L195-L199
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/catapult/third_party/mapreduce/mapreduce/hooks.py
python
Hooks.enqueue_done_task
(self, task, queue_name)
Enqueues a task that is triggered when the mapreduce completes. This hook will be called within a transaction scope. Hook should add task transactionally. Args: task: A taskqueue.Task that must be queued in order for the client to be notified when the mapreduce is complete. queue_name: The queue where the task should be run e.g. "default". Raises: NotImplementedError: to indicate that the default mapreduce notification strategy should be used.
Enqueues a task that is triggered when the mapreduce completes.
[ "Enqueues", "a", "task", "that", "is", "triggered", "when", "the", "mapreduce", "completes", "." ]
def enqueue_done_task(self, task, queue_name): """Enqueues a task that is triggered when the mapreduce completes. This hook will be called within a transaction scope. Hook should add task transactionally. Args: task: A taskqueue.Task that must be queued in order for the client to be notified when the mapreduce is complete. queue_name: The queue where the task should be run e.g. "default". Raises: NotImplementedError: to indicate that the default mapreduce notification strategy should be used. """ raise NotImplementedError()
[ "def", "enqueue_done_task", "(", "self", ",", "task", ",", "queue_name", ")", ":", "raise", "NotImplementedError", "(", ")" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/catapult/third_party/mapreduce/mapreduce/hooks.py#L70-L85
rapidsai/cudf
d5b2448fc69f17509304d594f029d0df56984962
python/cudf/cudf/core/series.py
python
_append_new_row_inplace
(col: ColumnLike, value: ScalarLike)
Append a scalar `value` to the end of `col` inplace. Cast to common type if possible
Append a scalar `value` to the end of `col` inplace. Cast to common type if possible
[ "Append", "a", "scalar", "value", "to", "the", "end", "of", "col", "inplace", ".", "Cast", "to", "common", "type", "if", "possible" ]
def _append_new_row_inplace(col: ColumnLike, value: ScalarLike): """Append a scalar `value` to the end of `col` inplace. Cast to common type if possible """ to_type = find_common_type([type(value), col.dtype]) val_col = as_column(value, dtype=to_type) old_col = col.astype(to_type) col._mimic_inplace(concat_columns([old_col, val_col]), inplace=True)
[ "def", "_append_new_row_inplace", "(", "col", ":", "ColumnLike", ",", "value", ":", "ScalarLike", ")", ":", "to_type", "=", "find_common_type", "(", "[", "type", "(", "value", ")", ",", "col", ".", "dtype", "]", ")", "val_col", "=", "as_column", "(", "value", ",", "dtype", "=", "to_type", ")", "old_col", "=", "col", ".", "astype", "(", "to_type", ")", "col", ".", "_mimic_inplace", "(", "concat_columns", "(", "[", "old_col", ",", "val_col", "]", ")", ",", "inplace", "=", "True", ")" ]
https://github.com/rapidsai/cudf/blob/d5b2448fc69f17509304d594f029d0df56984962/python/cudf/cudf/core/series.py#L86-L94
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python3/src/Lib/lib2to3/fixer_base.py
python
BaseFix.new_name
(self, template="xxx_todo_changeme")
return name
Return a string suitable for use as an identifier The new name is guaranteed not to conflict with other identifiers.
Return a string suitable for use as an identifier
[ "Return", "a", "string", "suitable", "for", "use", "as", "an", "identifier" ]
def new_name(self, template="xxx_todo_changeme"): """Return a string suitable for use as an identifier The new name is guaranteed not to conflict with other identifiers. """ name = template while name in self.used_names: name = template + str(next(self.numbers)) self.used_names.add(name) return name
[ "def", "new_name", "(", "self", ",", "template", "=", "\"xxx_todo_changeme\"", ")", ":", "name", "=", "template", "while", "name", "in", "self", ".", "used_names", ":", "name", "=", "template", "+", "str", "(", "next", "(", "self", ".", "numbers", ")", ")", "self", ".", "used_names", ".", "add", "(", "name", ")", "return", "name" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python3/src/Lib/lib2to3/fixer_base.py#L105-L114
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/_controls.py
python
ListBox.HitTest
(*args, **kwargs)
return _controls_.ListBox_HitTest(*args, **kwargs)
HitTest(self, Point pt) -> int Test where the given (in client coords) point lies
HitTest(self, Point pt) -> int
[ "HitTest", "(", "self", "Point", "pt", ")", "-", ">", "int" ]
def HitTest(*args, **kwargs): """ HitTest(self, Point pt) -> int Test where the given (in client coords) point lies """ return _controls_.ListBox_HitTest(*args, **kwargs)
[ "def", "HitTest", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_controls_", ".", "ListBox_HitTest", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_controls.py#L1237-L1243
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/html.py
python
HtmlLinkInfo.GetEvent
(*args, **kwargs)
return _html.HtmlLinkInfo_GetEvent(*args, **kwargs)
GetEvent(self) -> MouseEvent
GetEvent(self) -> MouseEvent
[ "GetEvent", "(", "self", ")", "-", ">", "MouseEvent" ]
def GetEvent(*args, **kwargs): """GetEvent(self) -> MouseEvent""" return _html.HtmlLinkInfo_GetEvent(*args, **kwargs)
[ "def", "GetEvent", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_html", ".", "HtmlLinkInfo_GetEvent", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/html.py#L110-L112
ComputationalRadiationPhysics/picongpu
59e9b53605f9a5c1bf271eeb055bc74370a99052
lib/python/picongpu/plugins/data/phase_space.py
python
PhaseSpaceData.get_iterations
(self, ps, species, species_filter='all', file_ext="h5")
return iterations
Return an array of iterations with available data. Parameters ---------- ps : string phase space selection in order: spatial, momentum component, e.g. 'ypy' or 'ypx' species : string short name of the particle species, e.g. 'e' for electrons (defined in ``speciesDefinition.param``) species_filter: string name of the particle species filter, default is 'all' (defined in ``particleFilters.param``) file_ext: string filename extension for openPMD backend default is 'h5' for the HDF5 backend Returns ------- An array with unsigned integers.
Return an array of iterations with available data.
[ "Return", "an", "array", "of", "iterations", "with", "available", "data", "." ]
def get_iterations(self, ps, species, species_filter='all', file_ext="h5"): """ Return an array of iterations with available data. Parameters ---------- ps : string phase space selection in order: spatial, momentum component, e.g. 'ypy' or 'ypx' species : string short name of the particle species, e.g. 'e' for electrons (defined in ``speciesDefinition.param``) species_filter: string name of the particle species filter, default is 'all' (defined in ``particleFilters.param``) file_ext: string filename extension for openPMD backend default is 'h5' for the HDF5 backend Returns ------- An array with unsigned integers. """ # get the regular expression matching all available files data_file_path = self.get_data_path(ps, species, species_filter, file_ext=file_ext) series = io.Series(data_file_path, io.Access.read_only) iterations = [key for key, _ in series.iterations.items()] return iterations
[ "def", "get_iterations", "(", "self", ",", "ps", ",", "species", ",", "species_filter", "=", "'all'", ",", "file_ext", "=", "\"h5\"", ")", ":", "# get the regular expression matching all available files", "data_file_path", "=", "self", ".", "get_data_path", "(", "ps", ",", "species", ",", "species_filter", ",", "file_ext", "=", "file_ext", ")", "series", "=", "io", ".", "Series", "(", "data_file_path", ",", "io", ".", "Access", ".", "read_only", ")", "iterations", "=", "[", "key", "for", "key", ",", "_", "in", "series", ".", "iterations", ".", "items", "(", ")", "]", "return", "iterations" ]
https://github.com/ComputationalRadiationPhysics/picongpu/blob/59e9b53605f9a5c1bf271eeb055bc74370a99052/lib/python/picongpu/plugins/data/phase_space.py#L129-L160
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/windows/Lib/idlelib/pyshell.py
python
capture_warnings
(capture)
Replace warning.showwarning with idle_showwarning, or reverse.
Replace warning.showwarning with idle_showwarning, or reverse.
[ "Replace", "warning", ".", "showwarning", "with", "idle_showwarning", "or", "reverse", "." ]
def capture_warnings(capture): "Replace warning.showwarning with idle_showwarning, or reverse." global _warnings_showwarning if capture: if _warnings_showwarning is None: _warnings_showwarning = warnings.showwarning warnings.showwarning = idle_showwarning else: if _warnings_showwarning is not None: warnings.showwarning = _warnings_showwarning _warnings_showwarning = None
[ "def", "capture_warnings", "(", "capture", ")", ":", "global", "_warnings_showwarning", "if", "capture", ":", "if", "_warnings_showwarning", "is", "None", ":", "_warnings_showwarning", "=", "warnings", ".", "showwarning", "warnings", ".", "showwarning", "=", "idle_showwarning", "else", ":", "if", "_warnings_showwarning", "is", "not", "None", ":", "warnings", ".", "showwarning", "=", "_warnings_showwarning", "_warnings_showwarning", "=", "None" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/windows/Lib/idlelib/pyshell.py#L88-L99
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/scipy/py3/scipy/io/matlab/miobase.py
python
read_dtype
(mat_stream, a_dtype)
return arr
Generic get of byte stream data of known type Parameters ---------- mat_stream : file_like object MATLAB (tm) mat file stream a_dtype : dtype dtype of array to read. `a_dtype` is assumed to be correct endianness. Returns ------- arr : ndarray Array of dtype `a_dtype` read from stream.
Generic get of byte stream data of known type
[ "Generic", "get", "of", "byte", "stream", "data", "of", "known", "type" ]
def read_dtype(mat_stream, a_dtype): """ Generic get of byte stream data of known type Parameters ---------- mat_stream : file_like object MATLAB (tm) mat file stream a_dtype : dtype dtype of array to read. `a_dtype` is assumed to be correct endianness. Returns ------- arr : ndarray Array of dtype `a_dtype` read from stream. """ num_bytes = a_dtype.itemsize arr = np.ndarray(shape=(), dtype=a_dtype, buffer=mat_stream.read(num_bytes), order='F') return arr
[ "def", "read_dtype", "(", "mat_stream", ",", "a_dtype", ")", ":", "num_bytes", "=", "a_dtype", ".", "itemsize", "arr", "=", "np", ".", "ndarray", "(", "shape", "=", "(", ")", ",", "dtype", "=", "a_dtype", ",", "buffer", "=", "mat_stream", ".", "read", "(", "num_bytes", ")", ",", "order", "=", "'F'", ")", "return", "arr" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/scipy/py3/scipy/io/matlab/miobase.py#L161-L184
francinexue/xuefu
b6ff79747a42e020588c0c0a921048e08fe4680c
ctpx/ctp2/ctptd.py
python
CtpTd.onRspTradingAccountPasswordUpdate
(self, TradingAccountPasswordUpdateField, RspInfoField, requestId, final)
资金账户口令更新请求响应
资金账户口令更新请求响应
[ "资金账户口令更新请求响应" ]
def onRspTradingAccountPasswordUpdate(self, TradingAccountPasswordUpdateField, RspInfoField, requestId, final): """资金账户口令更新请求响应""" pass
[ "def", "onRspTradingAccountPasswordUpdate", "(", "self", ",", "TradingAccountPasswordUpdateField", ",", "RspInfoField", ",", "requestId", ",", "final", ")", ":", "pass" ]
https://github.com/francinexue/xuefu/blob/b6ff79747a42e020588c0c0a921048e08fe4680c/ctpx/ctp2/ctptd.py#L89-L91
CRYTEK/CRYENGINE
232227c59a220cbbd311576f0fbeba7bb53b2a8c
Editor/Python/windows/Lib/site-packages/pip/_vendor/requests/adapters.py
python
HTTPAdapter.init_poolmanager
(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs)
Initializes a urllib3 PoolManager. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param connections: The number of urllib3 connection pools to cache. :param maxsize: The maximum number of connections to save in the pool. :param block: Block when no free connections are available. :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager.
Initializes a urllib3 PoolManager.
[ "Initializes", "a", "urllib3", "PoolManager", "." ]
def init_poolmanager(self, connections, maxsize, block=DEFAULT_POOLBLOCK, **pool_kwargs): """Initializes a urllib3 PoolManager. This method should not be called from user code, and is only exposed for use when subclassing the :class:`HTTPAdapter <requests.adapters.HTTPAdapter>`. :param connections: The number of urllib3 connection pools to cache. :param maxsize: The maximum number of connections to save in the pool. :param block: Block when no free connections are available. :param pool_kwargs: Extra keyword arguments used to initialize the Pool Manager. """ # save these values for pickling self._pool_connections = connections self._pool_maxsize = maxsize self._pool_block = block self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize, block=block, strict=True, **pool_kwargs)
[ "def", "init_poolmanager", "(", "self", ",", "connections", ",", "maxsize", ",", "block", "=", "DEFAULT_POOLBLOCK", ",", "*", "*", "pool_kwargs", ")", ":", "# save these values for pickling", "self", ".", "_pool_connections", "=", "connections", "self", ".", "_pool_maxsize", "=", "maxsize", "self", ".", "_pool_block", "=", "block", "self", ".", "poolmanager", "=", "PoolManager", "(", "num_pools", "=", "connections", ",", "maxsize", "=", "maxsize", ",", "block", "=", "block", ",", "strict", "=", "True", ",", "*", "*", "pool_kwargs", ")" ]
https://github.com/CRYTEK/CRYENGINE/blob/232227c59a220cbbd311576f0fbeba7bb53b2a8c/Editor/Python/windows/Lib/site-packages/pip/_vendor/requests/adapters.py#L116-L134
hughperkins/tf-coriander
970d3df6c11400ad68405f22b0c42a52374e94ca
tensorflow/models/image/alexnet/alexnet_benchmark.py
python
run_benchmark
()
Run the benchmark on AlexNet.
Run the benchmark on AlexNet.
[ "Run", "the", "benchmark", "on", "AlexNet", "." ]
def run_benchmark(): """Run the benchmark on AlexNet.""" with tf.Graph().as_default(): # Generate some dummy images. image_size = 224 # Note that our padding definition is slightly different the cuda-convnet. # In order to force the model to start with the same activations sizes, # we add 3 to the image_size and employ VALID padding above. images = tf.Variable(tf.random_normal([FLAGS.batch_size, image_size, image_size, 3], dtype=tf.float32, stddev=1e-1)) # Build a Graph that computes the logits predictions from the # inference model. pool5, parameters = inference(images) # Build an initialization operation. init = tf.initialize_all_variables() # Start running operations on the Graph. config = tf.ConfigProto() config.gpu_options.allocator_type = 'BFC' sess = tf.Session(config=config) sess.run(init) # Run the forward benchmark. time_tensorflow_run(sess, pool5, "Forward") # Add a simple objective so we can calculate the backward pass. objective = tf.nn.l2_loss(pool5) # Compute the gradient with respect to all the parameters. grad = tf.gradients(objective, parameters) # Run the backward benchmark. time_tensorflow_run(sess, grad, "Forward-backward")
[ "def", "run_benchmark", "(", ")", ":", "with", "tf", ".", "Graph", "(", ")", ".", "as_default", "(", ")", ":", "# Generate some dummy images.", "image_size", "=", "224", "# Note that our padding definition is slightly different the cuda-convnet.", "# In order to force the model to start with the same activations sizes,", "# we add 3 to the image_size and employ VALID padding above.", "images", "=", "tf", ".", "Variable", "(", "tf", ".", "random_normal", "(", "[", "FLAGS", ".", "batch_size", ",", "image_size", ",", "image_size", ",", "3", "]", ",", "dtype", "=", "tf", ".", "float32", ",", "stddev", "=", "1e-1", ")", ")", "# Build a Graph that computes the logits predictions from the", "# inference model.", "pool5", ",", "parameters", "=", "inference", "(", "images", ")", "# Build an initialization operation.", "init", "=", "tf", ".", "initialize_all_variables", "(", ")", "# Start running operations on the Graph.", "config", "=", "tf", ".", "ConfigProto", "(", ")", "config", ".", "gpu_options", ".", "allocator_type", "=", "'BFC'", "sess", "=", "tf", ".", "Session", "(", "config", "=", "config", ")", "sess", ".", "run", "(", "init", ")", "# Run the forward benchmark.", "time_tensorflow_run", "(", "sess", ",", "pool5", ",", "\"Forward\"", ")", "# Add a simple objective so we can calculate the backward pass.", "objective", "=", "tf", ".", "nn", ".", "l2_loss", "(", "pool5", ")", "# Compute the gradient with respect to all the parameters.", "grad", "=", "tf", ".", "gradients", "(", "objective", ",", "parameters", ")", "# Run the backward benchmark.", "time_tensorflow_run", "(", "sess", ",", "grad", ",", "\"Forward-backward\"", ")" ]
https://github.com/hughperkins/tf-coriander/blob/970d3df6c11400ad68405f22b0c42a52374e94ca/tensorflow/models/image/alexnet/alexnet_benchmark.py#L188-L223
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/_core.py
python
PyApp.Yield
(*args, **kwargs)
return _core_.PyApp_Yield(*args, **kwargs)
Yield(self, bool onlyIfNeeded=False) -> bool Process all currently pending events right now, instead of waiting until return to the event loop. It is an error to call ``Yield`` recursively unless the value of ``onlyIfNeeded`` is True. :warning: This function is dangerous as it can lead to unexpected reentrancies (i.e. when called from an event handler it may result in calling the same event handler again), use with extreme care or, better, don't use at all! :see: `wx.Yield`, `wx.YieldIfNeeded`, `wx.SafeYield`
Yield(self, bool onlyIfNeeded=False) -> bool
[ "Yield", "(", "self", "bool", "onlyIfNeeded", "=", "False", ")", "-", ">", "bool" ]
def Yield(*args, **kwargs): """ Yield(self, bool onlyIfNeeded=False) -> bool Process all currently pending events right now, instead of waiting until return to the event loop. It is an error to call ``Yield`` recursively unless the value of ``onlyIfNeeded`` is True. :warning: This function is dangerous as it can lead to unexpected reentrancies (i.e. when called from an event handler it may result in calling the same event handler again), use with extreme care or, better, don't use at all! :see: `wx.Yield`, `wx.YieldIfNeeded`, `wx.SafeYield` """ return _core_.PyApp_Yield(*args, **kwargs)
[ "def", "Yield", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "PyApp_Yield", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/_core.py#L7900-L7916
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/tools/python/src/Lib/idlelib/PyShell.py
python
ModifiedInterpreter.showsyntaxerror
(self, filename=None)
Extend base class method: Add Colorizing Color the offending position instead of printing it and pointing at it with a caret.
Extend base class method: Add Colorizing
[ "Extend", "base", "class", "method", ":", "Add", "Colorizing" ]
def showsyntaxerror(self, filename=None): """Extend base class method: Add Colorizing Color the offending position instead of printing it and pointing at it with a caret. """ text = self.tkconsole.text stuff = self.unpackerror() if stuff: msg, lineno, offset, line = stuff if lineno == 1: pos = "iomark + %d chars" % (offset-1) else: pos = "iomark linestart + %d lines + %d chars" % \ (lineno-1, offset-1) text.tag_add("ERROR", pos) text.see(pos) char = text.get(pos) if char and char in IDENTCHARS: text.tag_add("ERROR", pos + " wordstart", pos) self.tkconsole.resetoutput() self.write("SyntaxError: %s\n" % str(msg)) else: self.tkconsole.resetoutput() InteractiveInterpreter.showsyntaxerror(self, filename) self.tkconsole.showprompt()
[ "def", "showsyntaxerror", "(", "self", ",", "filename", "=", "None", ")", ":", "text", "=", "self", ".", "tkconsole", ".", "text", "stuff", "=", "self", ".", "unpackerror", "(", ")", "if", "stuff", ":", "msg", ",", "lineno", ",", "offset", ",", "line", "=", "stuff", "if", "lineno", "==", "1", ":", "pos", "=", "\"iomark + %d chars\"", "%", "(", "offset", "-", "1", ")", "else", ":", "pos", "=", "\"iomark linestart + %d lines + %d chars\"", "%", "(", "lineno", "-", "1", ",", "offset", "-", "1", ")", "text", ".", "tag_add", "(", "\"ERROR\"", ",", "pos", ")", "text", ".", "see", "(", "pos", ")", "char", "=", "text", ".", "get", "(", "pos", ")", "if", "char", "and", "char", "in", "IDENTCHARS", ":", "text", ".", "tag_add", "(", "\"ERROR\"", ",", "pos", "+", "\" wordstart\"", ",", "pos", ")", "self", ".", "tkconsole", ".", "resetoutput", "(", ")", "self", ".", "write", "(", "\"SyntaxError: %s\\n\"", "%", "str", "(", "msg", ")", ")", "else", ":", "self", ".", "tkconsole", ".", "resetoutput", "(", ")", "InteractiveInterpreter", ".", "showsyntaxerror", "(", "self", ",", "filename", ")", "self", ".", "tkconsole", ".", "showprompt", "(", ")" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/tools/python/src/Lib/idlelib/PyShell.py#L709-L735
apache/incubator-mxnet
f03fb23f1d103fec9541b5ae59ee06b1734a51d9
python/mxnet/gluon/metric.py
python
EvalMetric.get_config
(self)
return config
Save configurations of metric. Can be recreated from configs with metric.create(``**config``)
Save configurations of metric. Can be recreated from configs with metric.create(``**config``)
[ "Save", "configurations", "of", "metric", ".", "Can", "be", "recreated", "from", "configs", "with", "metric", ".", "create", "(", "**", "config", ")" ]
def get_config(self): """Save configurations of metric. Can be recreated from configs with metric.create(``**config``) """ config = self._kwargs.copy() config.update({ 'metric': self.__class__.__name__, 'name': self.name, 'output_names': self.output_names, 'label_names': self.label_names}) return config
[ "def", "get_config", "(", "self", ")", ":", "config", "=", "self", ".", "_kwargs", ".", "copy", "(", ")", "config", ".", "update", "(", "{", "'metric'", ":", "self", ".", "__class__", ".", "__name__", ",", "'name'", ":", "self", ".", "name", ",", "'output_names'", ":", "self", ".", "output_names", ",", "'label_names'", ":", "self", ".", "label_names", "}", ")", "return", "config" ]
https://github.com/apache/incubator-mxnet/blob/f03fb23f1d103fec9541b5ae59ee06b1734a51d9/python/mxnet/gluon/metric.py#L99-L109
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_cocoa/propgrid.py
python
PGArrayStringEditorDialog.OnCustomNewAction
(*args, **kwargs)
return _propgrid.PGArrayStringEditorDialog_OnCustomNewAction(*args, **kwargs)
OnCustomNewAction(self, String resString) -> bool
OnCustomNewAction(self, String resString) -> bool
[ "OnCustomNewAction", "(", "self", "String", "resString", ")", "-", ">", "bool" ]
def OnCustomNewAction(*args, **kwargs): """OnCustomNewAction(self, String resString) -> bool""" return _propgrid.PGArrayStringEditorDialog_OnCustomNewAction(*args, **kwargs)
[ "def", "OnCustomNewAction", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_propgrid", ".", "PGArrayStringEditorDialog_OnCustomNewAction", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_cocoa/propgrid.py#L3253-L3255
PlatformLab/RAMCloud
b1866af19124325a6dfd8cbc267e2e3ef1f965d1
scripts/cluster.py
python
Cluster.kill_server
(self, locator)
Kill a running server. @param locator: service locator for the server that needs to be killed.
Kill a running server.
[ "Kill", "a", "running", "server", "." ]
def kill_server(self, locator): """Kill a running server. @param locator: service locator for the server that needs to be killed. """ path = '%s/logs/shm' % os.getcwd() files = sorted([f for f in os.listdir(path) if os.path.isfile( os.path.join(path, f) )]) for file in files: f = open('%s/logs/shm/%s' % (os.getcwd(), file),'r') service_locator = f.read() if (locator in service_locator): to_kill = '1' mhost = file subprocess.Popen(['ssh', mhost.split('_')[0], '%s/killserver' % config.hooks.get_remote_scripts_path(), to_kill, os.getcwd(), mhost]) f.close() try: os.remove('%s/logs/shm/%s' % (os.getcwd(), file)) except: pass else: f.close()
[ "def", "kill_server", "(", "self", ",", "locator", ")", ":", "path", "=", "'%s/logs/shm'", "%", "os", ".", "getcwd", "(", ")", "files", "=", "sorted", "(", "[", "f", "for", "f", "in", "os", ".", "listdir", "(", "path", ")", "if", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "path", ",", "f", ")", ")", "]", ")", "for", "file", "in", "files", ":", "f", "=", "open", "(", "'%s/logs/shm/%s'", "%", "(", "os", ".", "getcwd", "(", ")", ",", "file", ")", ",", "'r'", ")", "service_locator", "=", "f", ".", "read", "(", ")", "if", "(", "locator", "in", "service_locator", ")", ":", "to_kill", "=", "'1'", "mhost", "=", "file", "subprocess", ".", "Popen", "(", "[", "'ssh'", ",", "mhost", ".", "split", "(", "'_'", ")", "[", "0", "]", ",", "'%s/killserver'", "%", "config", ".", "hooks", ".", "get_remote_scripts_path", "(", ")", ",", "to_kill", ",", "os", ".", "getcwd", "(", ")", ",", "mhost", "]", ")", "f", ".", "close", "(", ")", "try", ":", "os", ".", "remove", "(", "'%s/logs/shm/%s'", "%", "(", "os", ".", "getcwd", "(", ")", ",", "file", ")", ")", "except", ":", "pass", "else", ":", "f", ".", "close", "(", ")" ]
https://github.com/PlatformLab/RAMCloud/blob/b1866af19124325a6dfd8cbc267e2e3ef1f965d1/scripts/cluster.py#L383-L409
tfwu/FaceDetection-ConvNet-3D
f9251c48eb40c5aec8fba7455115c355466555be
python/build/lib.linux-x86_64-2.7/mxnet/executor_manager.py
python
DataParallelExecutorManager.aux_arrays
(self)
return self.execgrp.aux_arrays
shared aux states
shared aux states
[ "shared", "aux", "states" ]
def aux_arrays(self): """shared aux states""" # aux arrays are also shared by all executor groups return self.execgrp.aux_arrays
[ "def", "aux_arrays", "(", "self", ")", ":", "# aux arrays are also shared by all executor groups", "return", "self", ".", "execgrp", ".", "aux_arrays" ]
https://github.com/tfwu/FaceDetection-ConvNet-3D/blob/f9251c48eb40c5aec8fba7455115c355466555be/python/build/lib.linux-x86_64-2.7/mxnet/executor_manager.py#L359-L362
trailofbits/llvm-sanitizer-tutorial
d29dfeec7f51fbf234fd0080f28f2b30cd0b6e99
llvm/utils/llvm-build/llvmbuild/main.py
python
LLVMProjectInfo.get_required_libraries_for_component
(self, ci, traverse_groups = False)
get_required_libraries_for_component(component_info) -> iter Given a Library component info descriptor, return an iterator over all of the directly required libraries for linking with this component. If traverse_groups is True, then library and target groups will be traversed to include their required libraries.
get_required_libraries_for_component(component_info) -> iter
[ "get_required_libraries_for_component", "(", "component_info", ")", "-", ">", "iter" ]
def get_required_libraries_for_component(self, ci, traverse_groups = False): """ get_required_libraries_for_component(component_info) -> iter Given a Library component info descriptor, return an iterator over all of the directly required libraries for linking with this component. If traverse_groups is True, then library and target groups will be traversed to include their required libraries. """ assert ci.type_name in ('Library', 'OptionalLibrary', 'LibraryGroup', 'TargetGroup') for name in ci.required_libraries: # Get the dependency info. dep = self.component_info_map[name] # If it is a library, yield it. if dep.type_name == 'Library' or dep.type_name == 'OptionalLibrary': yield dep continue # Otherwise if it is a group, yield or traverse depending on what # was requested. if dep.type_name in ('LibraryGroup', 'TargetGroup'): if not traverse_groups: yield dep continue for res in self.get_required_libraries_for_component(dep, True): yield res
[ "def", "get_required_libraries_for_component", "(", "self", ",", "ci", ",", "traverse_groups", "=", "False", ")", ":", "assert", "ci", ".", "type_name", "in", "(", "'Library'", ",", "'OptionalLibrary'", ",", "'LibraryGroup'", ",", "'TargetGroup'", ")", "for", "name", "in", "ci", ".", "required_libraries", ":", "# Get the dependency info.", "dep", "=", "self", ".", "component_info_map", "[", "name", "]", "# If it is a library, yield it.", "if", "dep", ".", "type_name", "==", "'Library'", "or", "dep", ".", "type_name", "==", "'OptionalLibrary'", ":", "yield", "dep", "continue", "# Otherwise if it is a group, yield or traverse depending on what", "# was requested.", "if", "dep", ".", "type_name", "in", "(", "'LibraryGroup'", ",", "'TargetGroup'", ")", ":", "if", "not", "traverse_groups", ":", "yield", "dep", "continue", "for", "res", "in", "self", ".", "get_required_libraries_for_component", "(", "dep", ",", "True", ")", ":", "yield", "res" ]
https://github.com/trailofbits/llvm-sanitizer-tutorial/blob/d29dfeec7f51fbf234fd0080f28f2b30cd0b6e99/llvm/utils/llvm-build/llvmbuild/main.py#L424-L453
google/syzygy
8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5
third_party/numpy/files/numpy/ma/core.py
python
_DomainTan.__call__
(self, x)
return umath.less(umath.absolute(umath.cos(x)), self.eps)
Executes the call behavior.
Executes the call behavior.
[ "Executes", "the", "call", "behavior", "." ]
def __call__ (self, x): "Executes the call behavior." return umath.less(umath.absolute(umath.cos(x)), self.eps)
[ "def", "__call__", "(", "self", ",", "x", ")", ":", "return", "umath", ".", "less", "(", "umath", ".", "absolute", "(", "umath", ".", "cos", "(", "x", ")", ")", ",", "self", ".", "eps", ")" ]
https://github.com/google/syzygy/blob/8164b24ebde9c5649c9a09e88a7fc0b0fcbd1bc5/third_party/numpy/files/numpy/ma/core.py#L755-L757
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/build/waf-1.7.13/waflib/Runner.py
python
Parallel.refill_task_list
(self)
Put the next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`.
Put the next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`.
[ "Put", "the", "next", "group", "of", "tasks", "to", "execute", "in", ":", "py", ":", "attr", ":", "waflib", ".", "Runner", ".", "Parallel", ".", "outstanding", "." ]
def refill_task_list(self): """ Put the next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`. """ while self.count > self.numjobs * GAP: self.get_out() while not self.outstanding: if self.count: self.get_out() elif self.frozen: try: cond = self.deadlock == self.processed except AttributeError: pass else: if cond: msg = 'check the build order for the tasks' for tsk in self.frozen: if not tsk.run_after: msg = 'check the methods runnable_status' break lst = [] for tsk in self.frozen: lst.append('%s\t-> %r' % (repr(tsk), [id(x) for x in tsk.run_after])) raise Errors.WafError('Deadlock detected: %s%s' % (msg, ''.join(lst))) self.deadlock = self.processed if self.frozen: self.add_outstanding_tasks(self.frozen, False) self.frozen = [] elif not self.count: self.add_outstanding_tasks(next(self.biter)) break
[ "def", "refill_task_list", "(", "self", ")", ":", "while", "self", ".", "count", ">", "self", ".", "numjobs", "*", "GAP", ":", "self", ".", "get_out", "(", ")", "while", "not", "self", ".", "outstanding", ":", "if", "self", ".", "count", ":", "self", ".", "get_out", "(", ")", "elif", "self", ".", "frozen", ":", "try", ":", "cond", "=", "self", ".", "deadlock", "==", "self", ".", "processed", "except", "AttributeError", ":", "pass", "else", ":", "if", "cond", ":", "msg", "=", "'check the build order for the tasks'", "for", "tsk", "in", "self", ".", "frozen", ":", "if", "not", "tsk", ".", "run_after", ":", "msg", "=", "'check the methods runnable_status'", "break", "lst", "=", "[", "]", "for", "tsk", "in", "self", ".", "frozen", ":", "lst", ".", "append", "(", "'%s\\t-> %r'", "%", "(", "repr", "(", "tsk", ")", ",", "[", "id", "(", "x", ")", "for", "x", "in", "tsk", ".", "run_after", "]", ")", ")", "raise", "Errors", ".", "WafError", "(", "'Deadlock detected: %s%s'", "%", "(", "msg", ",", "''", ".", "join", "(", "lst", ")", ")", ")", "self", ".", "deadlock", "=", "self", ".", "processed", "if", "self", ".", "frozen", ":", "self", ".", "add_outstanding_tasks", "(", "self", ".", "frozen", ",", "False", ")", "self", ".", "frozen", "=", "[", "]", "elif", "not", "self", ".", "count", ":", "self", ".", "add_outstanding_tasks", "(", "next", "(", "self", ".", "biter", ")", ")", "break" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/build/waf-1.7.13/waflib/Runner.py#L176-L209
apple/swift-lldb
d74be846ef3e62de946df343e8c234bde93a8912
scripts/Python/static-binding/lldb.py
python
SBProcessInfo.GroupIDIsValid
(self)
return _lldb.SBProcessInfo_GroupIDIsValid(self)
GroupIDIsValid(SBProcessInfo self) -> bool
GroupIDIsValid(SBProcessInfo self) -> bool
[ "GroupIDIsValid", "(", "SBProcessInfo", "self", ")", "-", ">", "bool" ]
def GroupIDIsValid(self): """GroupIDIsValid(SBProcessInfo self) -> bool""" return _lldb.SBProcessInfo_GroupIDIsValid(self)
[ "def", "GroupIDIsValid", "(", "self", ")", ":", "return", "_lldb", ".", "SBProcessInfo_GroupIDIsValid", "(", "self", ")" ]
https://github.com/apple/swift-lldb/blob/d74be846ef3e62de946df343e8c234bde93a8912/scripts/Python/static-binding/lldb.py#L9000-L9002
catboost/catboost
167f64f237114a4d10b2b4ee42adb4569137debe
contrib/python/protobuf/py3/google/protobuf/internal/python_message.py
python
_OneofListener.Modified
(self)
Also updates the state of the containing oneof in the parent message.
Also updates the state of the containing oneof in the parent message.
[ "Also", "updates", "the", "state", "of", "the", "containing", "oneof", "in", "the", "parent", "message", "." ]
def Modified(self): """Also updates the state of the containing oneof in the parent message.""" try: self._parent_message_weakref._UpdateOneofState(self._field) super(_OneofListener, self).Modified() except ReferenceError: pass
[ "def", "Modified", "(", "self", ")", ":", "try", ":", "self", ".", "_parent_message_weakref", ".", "_UpdateOneofState", "(", "self", ".", "_field", ")", "super", "(", "_OneofListener", ",", "self", ")", ".", "Modified", "(", ")", "except", "ReferenceError", ":", "pass" ]
https://github.com/catboost/catboost/blob/167f64f237114a4d10b2b4ee42adb4569137debe/contrib/python/protobuf/py3/google/protobuf/internal/python_message.py#L1535-L1541
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/mailbox.py
python
Maildir._lookup
(self, key)
Use TOC to return subpath for given key, or raise a KeyError.
Use TOC to return subpath for given key, or raise a KeyError.
[ "Use", "TOC", "to", "return", "subpath", "for", "given", "key", "or", "raise", "a", "KeyError", "." ]
def _lookup(self, key): """Use TOC to return subpath for given key, or raise a KeyError.""" try: if os.path.exists(os.path.join(self._path, self._toc[key])): return self._toc[key] except KeyError: pass self._refresh() try: return self._toc[key] except KeyError: raise KeyError('No message with key: %s' % key) from None
[ "def", "_lookup", "(", "self", ",", "key", ")", ":", "try", ":", "if", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "self", ".", "_path", ",", "self", ".", "_toc", "[", "key", "]", ")", ")", ":", "return", "self", ".", "_toc", "[", "key", "]", "except", "KeyError", ":", "pass", "self", ".", "_refresh", "(", ")", "try", ":", "return", "self", ".", "_toc", "[", "key", "]", "except", "KeyError", ":", "raise", "KeyError", "(", "'No message with key: %s'", "%", "key", ")", "from", "None" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Tools/Python/3.7.10/mac/Python.framework/Versions/3.7/lib/python3.7/mailbox.py#L547-L558
lhmRyan/deep-supervised-hashing-DSH
631901f82e2ab031fbac33f914a5b08ef8e21d57
python/caffe/pycaffe.py
python
_Net_forward_all
(self, blobs=None, **kwargs)
return all_outs
Run net forward in batches. Parameters ---------- blobs : list of blobs to extract as in forward() kwargs : Keys are input blob names and values are blob ndarrays. Refer to forward(). Returns ------- all_outs : {blob name: list of blobs} dict.
Run net forward in batches.
[ "Run", "net", "forward", "in", "batches", "." ]
def _Net_forward_all(self, blobs=None, **kwargs): """ Run net forward in batches. Parameters ---------- blobs : list of blobs to extract as in forward() kwargs : Keys are input blob names and values are blob ndarrays. Refer to forward(). Returns ------- all_outs : {blob name: list of blobs} dict. """ # Collect outputs from batches all_outs = {out: [] for out in set(self.outputs + (blobs or []))} for batch in self._batch(kwargs): outs = self.forward(blobs=blobs, **batch) for out, out_blob in six.iteritems(outs): all_outs[out].extend(out_blob.copy()) # Package in ndarray. for out in all_outs: all_outs[out] = np.asarray(all_outs[out]) # Discard padding. pad = len(six.next(six.itervalues(all_outs))) - len(six.next(six.itervalues(kwargs))) if pad: for out in all_outs: all_outs[out] = all_outs[out][:-pad] return all_outs
[ "def", "_Net_forward_all", "(", "self", ",", "blobs", "=", "None", ",", "*", "*", "kwargs", ")", ":", "# Collect outputs from batches", "all_outs", "=", "{", "out", ":", "[", "]", "for", "out", "in", "set", "(", "self", ".", "outputs", "+", "(", "blobs", "or", "[", "]", ")", ")", "}", "for", "batch", "in", "self", ".", "_batch", "(", "kwargs", ")", ":", "outs", "=", "self", ".", "forward", "(", "blobs", "=", "blobs", ",", "*", "*", "batch", ")", "for", "out", ",", "out_blob", "in", "six", ".", "iteritems", "(", "outs", ")", ":", "all_outs", "[", "out", "]", ".", "extend", "(", "out_blob", ".", "copy", "(", ")", ")", "# Package in ndarray.", "for", "out", "in", "all_outs", ":", "all_outs", "[", "out", "]", "=", "np", ".", "asarray", "(", "all_outs", "[", "out", "]", ")", "# Discard padding.", "pad", "=", "len", "(", "six", ".", "next", "(", "six", ".", "itervalues", "(", "all_outs", ")", ")", ")", "-", "len", "(", "six", ".", "next", "(", "six", ".", "itervalues", "(", "kwargs", ")", ")", ")", "if", "pad", ":", "for", "out", "in", "all_outs", ":", "all_outs", "[", "out", "]", "=", "all_outs", "[", "out", "]", "[", ":", "-", "pad", "]", "return", "all_outs" ]
https://github.com/lhmRyan/deep-supervised-hashing-DSH/blob/631901f82e2ab031fbac33f914a5b08ef8e21d57/python/caffe/pycaffe.py#L161-L189
v8/v8
fee3bf095260bf657a3eea4d3d41f90c42c6c857
tools/sanitizers/sancov_merger.py
python
merge_two
(args)
Merge two sancov files. Called trough multiprocessing pool. The args are expected to unpack to: swarming_output_dir: Folder where to find the new file. coverage_dir: Folder where to find the existing file. f: File name of the file to be merged.
Merge two sancov files.
[ "Merge", "two", "sancov", "files", "." ]
def merge_two(args): """Merge two sancov files. Called trough multiprocessing pool. The args are expected to unpack to: swarming_output_dir: Folder where to find the new file. coverage_dir: Folder where to find the existing file. f: File name of the file to be merged. """ swarming_output_dir, coverage_dir, f = args input_file = os.path.join(swarming_output_dir, f) output_file = os.path.join(coverage_dir, f) process = subprocess.Popen( [SANCOV_TOOL, 'merge', input_file, output_file], stdout=subprocess.PIPE, stderr=subprocess.PIPE, ) output, _ = process.communicate() assert process.returncode == 0 with open(output_file, "wb") as f: f.write(output)
[ "def", "merge_two", "(", "args", ")", ":", "swarming_output_dir", ",", "coverage_dir", ",", "f", "=", "args", "input_file", "=", "os", ".", "path", ".", "join", "(", "swarming_output_dir", ",", "f", ")", "output_file", "=", "os", ".", "path", ".", "join", "(", "coverage_dir", ",", "f", ")", "process", "=", "subprocess", ".", "Popen", "(", "[", "SANCOV_TOOL", ",", "'merge'", ",", "input_file", ",", "output_file", "]", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", ")", "output", ",", "_", "=", "process", ".", "communicate", "(", ")", "assert", "process", ".", "returncode", "==", "0", "with", "open", "(", "output_file", ",", "\"wb\"", ")", "as", "f", ":", "f", ".", "write", "(", "output", ")" ]
https://github.com/v8/v8/blob/fee3bf095260bf657a3eea4d3d41f90c42c6c857/tools/sanitizers/sancov_merger.py#L160-L179
aws/lumberyard
f85344403c1c2e77ec8c75deb2c116e97b713217
dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/roc/hsadrv/driver.py
python
Program.finalize
(self, isa, callconv=0, options=None)
return CodeObject(code_object)
The program object is safe to be deleted after ``finalize``.
The program object is safe to be deleted after ``finalize``.
[ "The", "program", "object", "is", "safe", "to", "be", "deleted", "after", "finalize", "." ]
def finalize(self, isa, callconv=0, options=None): """ The program object is safe to be deleted after ``finalize``. """ code_object = drvapi.hsa_code_object_t() control_directives = drvapi.hsa_ext_control_directives_t() ctypes.memset(ctypes.byref(control_directives), 0, ctypes.sizeof(control_directives)) self._ftabl.hsa_ext_program_finalize(self._id, isa, callconv, control_directives, options, enums.HSA_CODE_OBJECT_TYPE_PROGRAM, ctypes.byref(code_object)) return CodeObject(code_object)
[ "def", "finalize", "(", "self", ",", "isa", ",", "callconv", "=", "0", ",", "options", "=", "None", ")", ":", "code_object", "=", "drvapi", ".", "hsa_code_object_t", "(", ")", "control_directives", "=", "drvapi", ".", "hsa_ext_control_directives_t", "(", ")", "ctypes", ".", "memset", "(", "ctypes", ".", "byref", "(", "control_directives", ")", ",", "0", ",", "ctypes", ".", "sizeof", "(", "control_directives", ")", ")", "self", ".", "_ftabl", ".", "hsa_ext_program_finalize", "(", "self", ".", "_id", ",", "isa", ",", "callconv", ",", "control_directives", ",", "options", ",", "enums", ".", "HSA_CODE_OBJECT_TYPE_PROGRAM", ",", "ctypes", ".", "byref", "(", "code_object", ")", ")", "return", "CodeObject", "(", "code_object", ")" ]
https://github.com/aws/lumberyard/blob/f85344403c1c2e77ec8c75deb2c116e97b713217/dev/Gems/CloudGemMetric/v1/AWS/common-code/Lib/numba/roc/hsadrv/driver.py#L994-L1009
Z3Prover/z3
d745d03afdfdf638d66093e2bfbacaf87187f35b
src/api/python/z3/z3.py
python
Statistics.__len__
(self)
return int(Z3_stats_size(self.ctx.ref(), self.stats))
Return the number of statistical counters. >>> x = Int('x') >>> s = Then('simplify', 'nlsat').solver() >>> s.add(x > 0) >>> s.check() sat >>> st = s.statistics() >>> len(st) 6
Return the number of statistical counters.
[ "Return", "the", "number", "of", "statistical", "counters", "." ]
def __len__(self): """Return the number of statistical counters. >>> x = Int('x') >>> s = Then('simplify', 'nlsat').solver() >>> s.add(x > 0) >>> s.check() sat >>> st = s.statistics() >>> len(st) 6 """ return int(Z3_stats_size(self.ctx.ref(), self.stats))
[ "def", "__len__", "(", "self", ")", ":", "return", "int", "(", "Z3_stats_size", "(", "self", ".", "ctx", ".", "ref", "(", ")", ",", "self", ".", "stats", ")", ")" ]
https://github.com/Z3Prover/z3/blob/d745d03afdfdf638d66093e2bfbacaf87187f35b/src/api/python/z3/z3.py#L6675-L6687
fabianschenk/RESLAM
2e71a578b6d1a1ad1fb018641218e1f41dd9e330
thirdparty/Sophus/py/sophus/se3.py
python
Se3.__mul__
(self, right)
left-multiplication either rotation concatenation or point-transform
left-multiplication either rotation concatenation or point-transform
[ "left", "-", "multiplication", "either", "rotation", "concatenation", "or", "point", "-", "transform" ]
def __mul__(self, right): """ left-multiplication either rotation concatenation or point-transform """ if isinstance(right, sympy.Matrix): assert right.shape == (3, 1), right.shape return self.so3 * right + self.t elif isinstance(right, Se3): r = self.so3 * right.so3 t = self.t + self.so3 * right.t return Se3(r, t) assert False, "unsupported type: {0}".format(type(right))
[ "def", "__mul__", "(", "self", ",", "right", ")", ":", "if", "isinstance", "(", "right", ",", "sympy", ".", "Matrix", ")", ":", "assert", "right", ".", "shape", "==", "(", "3", ",", "1", ")", ",", "right", ".", "shape", "return", "self", ".", "so3", "*", "right", "+", "self", ".", "t", "elif", "isinstance", "(", "right", ",", "Se3", ")", ":", "r", "=", "self", ".", "so3", "*", "right", ".", "so3", "t", "=", "self", ".", "t", "+", "self", ".", "so3", "*", "right", ".", "t", "return", "Se3", "(", "r", ",", "t", ")", "assert", "False", ",", "\"unsupported type: {0}\"", ".", "format", "(", "type", "(", "right", ")", ")" ]
https://github.com/fabianschenk/RESLAM/blob/2e71a578b6d1a1ad1fb018641218e1f41dd9e330/thirdparty/Sophus/py/sophus/se3.py#L65-L75
stepcode/stepcode
2a50010e6f6b8bd4843561e48fdb0fd4e8b87f39
src/exp2python/python/SCL/Part21.py
python
Parser.p_data_section
(self, p)
data_section : data_start entity_instance_list ENDSEC
data_section : data_start entity_instance_list ENDSEC
[ "data_section", ":", "data_start", "entity_instance_list", "ENDSEC" ]
def p_data_section(self, p): """data_section : data_start entity_instance_list ENDSEC""" p[0] = Section(p[2])
[ "def", "p_data_section", "(", "self", ",", "p", ")", ":", "p", "[", "0", "]", "=", "Section", "(", "p", "[", "2", "]", ")" ]
https://github.com/stepcode/stepcode/blob/2a50010e6f6b8bd4843561e48fdb0fd4e8b87f39/src/exp2python/python/SCL/Part21.py#L395-L397
giuspen/cherrytree
84712f206478fcf9acf30174009ad28c648c6344
pygtk2/modules/config.py
python
config_file_load
(dad)
Load the Preferences from Config File
Load the Preferences from Config File
[ "Load", "the", "Preferences", "from", "Config", "File" ]
def config_file_load(dad): """Load the Preferences from Config File""" dad.custom_kb_shortcuts = {} dad.custom_codexec_type = {} dad.custom_codexec_ext = {} dad.custom_codexec_term = None dad.latest_tag = ["", ""] if os.path.isfile(cons.CONFIG_PATH): cfg = ConfigParser.RawConfigParser() try: cfg.read(cons.CONFIG_PATH) except ConfigParser.MissingSectionHeaderError: print "? ConfigParser.MissingSectionHeaderError" section = "state" dad.file_dir = unicode(cfg.get(section, "file_dir"), cons.STR_UTF8, cons.STR_IGNORE) if cfg.has_option(section, "file_dir") else "" dad.file_name = unicode(cfg.get(section, "file_name"), cons.STR_UTF8, cons.STR_IGNORE) if cfg.has_option(section, "file_name") else "" dad.toolbar_visible = cfg.getboolean(section, "toolbar_visible") if cfg.has_option(section, "toolbar_visible") else True dad.win_is_maximized = cfg.getboolean(section, "win_is_maximized") if cfg.has_option(section, "win_is_maximized") else False # restore window size and position if cfg.has_option(section, "win_position_x") and cfg.has_option(section, "win_position_y"): dad.win_position = [cfg.getint(section, "win_position_x"), cfg.getint(section, "win_position_y")] dad.window.move(dad.win_position[0], dad.win_position[1]) else: dad.win_position = [10, 10] if dad.win_is_maximized: dad.window.maximize() elif cfg.has_option(section, "win_size_w") and cfg.has_option(section, "win_size_h"): win_size = [cfg.getint(section, "win_size_w"), cfg.getint(section, "win_size_h")] dad.window.resize(win_size[0], win_size[1]) dad.hpaned_pos = cfg.getint(section, "hpaned_pos") if cfg.has_option(section, "hpaned_pos") else 170 dad.tree_visible = cfg.getboolean(section, "tree_visible") if cfg.has_option(section, "tree_visible") else True if cfg.has_option(section, "node_path"): # restore the selected node dad.node_path = get_node_path_from_str(cfg.get(section, "node_path")) dad.cursor_position = cfg.getint(section, "cursor_position") if cfg.has_option(section, "cursor_position") else 0 else: dad.node_path = None dad.recent_docs = [] saved_from_gtkmm = False for i in range(cons.MAX_RECENT_DOCS): curr_key = "doc_%s" % i if cfg.has_option(section, curr_key): dad.recent_docs.append(unicode(cfg.get(section, curr_key), cons.STR_UTF8, cons.STR_IGNORE)) # supporting saved from gtkmm if not dad.file_name or saved_from_gtkmm is True: if i == 0: dad.file_name = os.path.basename(dad.recent_docs[0]) dad.file_dir = os.path.dirname(dad.recent_docs[0]) if cfg.has_option(section, "nodep_0"): saved_from_gtkmm = True dad.node_path = get_node_path_from_str(cfg.get(section, "nodep_0").replace(":", " ")) dad.cursor_position = cfg.getint(section, "curs_0") dad.expanded_collapsed_string = cfg.get(section, "expcol_0") elif i in (1,2,3): if cfg.has_option(section, "nodep_%s" % i): setattr(dad, "expcollnam%s" % i, os.path.basename(dad.recent_docs[i])) setattr(dad, "expcollstr%s" % i, cfg.get(section, "expcol_%s" % i)) setattr(dad, "expcollsel%s" % i, cfg.get(section, "nodep_%s" % i)) setattr(dad, "expcollcur%s" % i, cfg.get(section, "curs_%s" % i)) else: break dad.pick_dir_import = cfg.get(section, "pick_dir_import") if cfg.has_option(section, "pick_dir_import") else "" dad.pick_dir_export = cfg.get(section, "pick_dir_export") if cfg.has_option(section, "pick_dir_export") else "" dad.pick_dir_file = cfg.get(section, "pick_dir_file") if cfg.has_option(section, "pick_dir_file") else "" dad.pick_dir_img = cfg.get(section, "pick_dir_img") if cfg.has_option(section, "pick_dir_img") else "" dad.pick_dir_csv = cfg.get(section, "pick_dir_csv") if cfg.has_option(section, "pick_dir_csv") else "" dad.pick_dir_cbox = cfg.get(section, "pick_dir_cbox") if cfg.has_option(section, "pick_dir_cbox") else "" dad.link_type = cfg.get(section, "link_type") if cfg.has_option(section, "link_type") else cons.LINK_TYPE_WEBS dad.show_node_name_header = cfg.getboolean(section, "show_node_name_header") if cfg.has_option(section, "show_node_name_header") else True dad.nodes_on_node_name_header = cfg.getint(section, "nodes_on_node_name_header") if cfg.has_option(section, "nodes_on_node_name_header") else NODES_ON_NODE_NAME_HEADER_DEFAULT if cfg.has_option(section, "toolbar_icon_size"): dad.toolbar_icon_size = cfg.getint(section, "toolbar_icon_size") if dad.toolbar_icon_size not in ICONS_SIZE: dad.toolbar_icon_size = 1 else: dad.toolbar_icon_size = 1 dad.curr_colors = { 'f':gtk.gdk.color_parse(cfg.get(section, "fg")) if cfg.has_option(section, "fg") else None, 'b':gtk.gdk.color_parse(cfg.get(section, "bg")) if cfg.has_option(section, "bg") else None, 'n':gtk.gdk.color_parse(cfg.get(section, "nn")) if cfg.has_option(section, "nn") else None} section = "tree" dad.rest_exp_coll = cfg.getint(section, "rest_exp_coll") if cfg.has_option(section, "rest_exp_coll") else 0 if not hasattr(dad, "expanded_collapsed_string"): dad.expanded_collapsed_string = cfg.get(section, "expanded_collapsed_string") if cfg.has_option(section, "expanded_collapsed_string") else "" if not hasattr(dad, "expcollnam1"): dad.expcollnam1 = unicode(cfg.get(section, "expcollnam1"), cons.STR_UTF8, cons.STR_IGNORE) if cfg.has_option(section, "expcollnam1") else "" dad.expcollstr1 = cfg.get(section, "expcollstr1") if cfg.has_option(section, "expcollstr1") else "" dad.expcollsel1 = cfg.get(section, "expcollsel1") if cfg.has_option(section, "expcollsel1") else "" dad.expcollcur1 = cfg.getint(section, "expcollcur1") if cfg.has_option(section, "expcollcur1") else 0 if not hasattr(dad, "expcollnam2"): dad.expcollnam2 = unicode(cfg.get(section, "expcollnam2"), cons.STR_UTF8, cons.STR_IGNORE) if cfg.has_option(section, "expcollnam2") else "" dad.expcollstr2 = cfg.get(section, "expcollstr2") if cfg.has_option(section, "expcollstr2") else "" dad.expcollsel2 = cfg.get(section, "expcollsel2") if cfg.has_option(section, "expcollsel2") else "" dad.expcollcur2 = cfg.getint(section, "expcollcur2") if cfg.has_option(section, "expcollcur2") else 0 if not hasattr(dad, "expcollnam3"): dad.expcollnam3 = unicode(cfg.get(section, "expcollnam3"), cons.STR_UTF8, cons.STR_IGNORE) if cfg.has_option(section, "expcollnam3") else "" dad.expcollstr3 = cfg.get(section, "expcollstr3") if cfg.has_option(section, "expcollstr3") else "" dad.expcollsel3 = cfg.get(section, "expcollsel3") if cfg.has_option(section, "expcollsel3") else "" dad.expcollcur3 = cfg.getint(section, "expcollcur3") if cfg.has_option(section, "expcollcur3") else 0 dad.nodes_bookm_exp = cfg.getboolean(section, "nodes_bookm_exp") if cfg.has_option(section, "nodes_bookm_exp") else False dad.nodes_icons = cfg.get(section, "nodes_icons") if cfg.has_option(section, "nodes_icons") else "c" dad.aux_icon_hide = cfg.getboolean(section, "aux_icon_hide") if cfg.has_option(section, "aux_icon_hide") else False dad.default_icon_text = cfg.getint(section, "default_icon_text") if cfg.has_option(section, "default_icon_text") else cons.NODE_ICON_BULLET_ID dad.tree_right_side = cfg.getboolean(section, "tree_right_side") if cfg.has_option(section, "tree_right_side") else False dad.cherry_wrap_width = cfg.getint(section, "cherry_wrap_width") if cfg.has_option(section, "cherry_wrap_width") else 130 dad.tree_click_focus_text = cfg.getboolean(section, "tree_click_focus_text") if cfg.has_option(section, "tree_click_focus_text") else False dad.tree_click_expand = cfg.getboolean(section, "tree_click_expand") if cfg.has_option(section, "tree_click_expand") else False section = "editor" dad.syntax_highlighting = cfg.get(section, "syntax_highlighting") if cfg.has_option(section, "syntax_highlighting") else cons.RICH_TEXT_ID dad.auto_syn_highl = cfg.get(section, "auto_syn_highl") if cfg.has_option(section, "auto_syn_highl") else "sh" dad.style_scheme = cfg.get(section, "style_scheme") if cfg.has_option(section, "style_scheme") else cons.STYLE_SCHEME_DARK dad.enable_spell_check = cfg.getboolean(section, "enable_spell_check") if cfg.has_option(section, "enable_spell_check") else False dad.spell_check_lang = cfg.get(section, "spell_check_lang") if cfg.has_option(section, "spell_check_lang") else SPELL_CHECK_LANG_DEFAULT dad.show_line_numbers = cfg.getboolean(section, "show_line_numbers") if cfg.has_option(section, "show_line_numbers") else False dad.spaces_instead_tabs = cfg.getboolean(section, "spaces_instead_tabs") if cfg.has_option(section, "spaces_instead_tabs") else True dad.tabs_width = cfg.getint(section, "tabs_width") if cfg.has_option(section, "tabs_width") else 4 dad.anchor_size = cfg.getint(section, "anchor_size") if cfg.has_option(section, "anchor_size") else 16 dad.embfile_size = cfg.getint(section, "embfile_size") if cfg.has_option(section, "embfile_size") else 48 dad.embfile_show_filename = cfg.getboolean(section, "embfile_show_filename") if cfg.has_option(section, "embfile_show_filename") else True dad.embfile_max_size = cfg.getint(section, "embfile_max_size") if cfg.has_option(section, "embfile_max_size") else MAX_SIZE_EMBFILE_MB_DEFAULT dad.line_wrapping = cfg.getboolean(section, "line_wrapping") if cfg.has_option(section, "line_wrapping") else True dad.auto_smart_quotes = cfg.getboolean(section, "auto_smart_quotes") if cfg.has_option(section, "auto_smart_quotes") else True dad.triple_click_paragraph = cfg.getboolean(section, "triple_click_paragraph") if cfg.has_option(section, "triple_click_paragraph") else True dad.enable_symbol_autoreplace = cfg.getboolean(section, "enable_symbol_autoreplace") if cfg.has_option(section, "enable_symbol_autoreplace") else True dad.wrapping_indent = cfg.getint(section, "wrapping_indent") if cfg.has_option(section, "wrapping_indent") else -14 dad.auto_indent = cfg.getboolean(section, "auto_indent") if cfg.has_option(section, "auto_indent") else True dad.rt_show_white_spaces = cfg.getboolean(section, "rt_show_white_spaces") if cfg.has_option(section, "rt_show_white_spaces") else False dad.pt_show_white_spaces = cfg.getboolean(section, "pt_show_white_spaces") if cfg.has_option(section, "pt_show_white_spaces") else True dad.rt_highl_curr_line = cfg.getboolean(section, "rt_highl_curr_line") if cfg.has_option(section, "rt_highl_curr_line") else True dad.pt_highl_curr_line = cfg.getboolean(section, "pt_highl_curr_line") if cfg.has_option(section, "pt_highl_curr_line") else True dad.space_around_lines = cfg.getint(section, "space_around_lines") if cfg.has_option(section, "space_around_lines") else 0 dad.relative_wrapped_space = cfg.getint(section, "relative_wrapped_space") if cfg.has_option(section, "relative_wrapped_space") else 50 dad.h_rule = cfg.get(section, "h_rule") if cfg.has_option(section, "h_rule") else HORIZONTAL_RULE dad.special_chars = unicode(cfg.get(section, "special_chars"), cons.STR_UTF8, cons.STR_IGNORE) if cfg.has_option(section, "special_chars") else SPECIAL_CHARS_DEFAULT dad.selword_chars = unicode(cfg.get(section, "selword_chars"), cons.STR_UTF8, cons.STR_IGNORE) if cfg.has_option(section, "selword_chars") else SELWORD_CHARS_DEFAULT dad.chars_listbul = unicode(cfg.get(section, "chars_listbul"), cons.STR_UTF8, cons.STR_IGNORE) if cfg.has_option(section, "chars_listbul") else CHARS_LISTBUL_DEFAULT dad.chars_todo = unicode(cfg.get(section, "chars_todo"), cons.STR_UTF8, cons.STR_IGNORE) if cfg.has_option(section, "chars_todo") else CHARS_TODO_DEFAULT dad.chars_toc = unicode(cfg.get(section, "chars_toc"), cons.STR_UTF8, cons.STR_IGNORE) if cfg.has_option(section, "chars_toc") else CHARS_TOC_DEFAULT dad.chars_smart_dquote = unicode(cfg.get(section, "chars_smart_dquote"), cons.STR_UTF8, cons.STR_IGNORE) if cfg.has_option(section, "chars_smart_dquote") else CHARS_SMART_DQUOTE_DEFAULT dad.chars_smart_squote = unicode(cfg.get(section, "chars_smart_squote"), cons.STR_UTF8, cons.STR_IGNORE) if cfg.has_option(section, "chars_smart_squote") else CHARS_SMART_SQUOTE_DEFAULT if cfg.has_option(section, "latest_tag_prop") and cfg.has_option(section, "latest_tag_val"): dad.latest_tag[0] = cfg.get(section, "latest_tag_prop") dad.latest_tag[1] = cfg.get(section, "latest_tag_val") dad.timestamp_format = cfg.get(section, "timestamp_format") if cfg.has_option(section, "timestamp_format") else TIMESTAMP_FORMAT_DEFAULT dad.links_underline = cfg.getboolean(section, "links_underline") if cfg.has_option(section, "links_underline") else True dad.links_relative = cfg.getboolean(section, "links_relative") if cfg.has_option(section, "links_relative") else False if cfg.has_option(section, "weblink_custom_action"): temp_str = cfg.get(section, "weblink_custom_action") dad.weblink_custom_action = [True, temp_str[4:]] if temp_str[:4] == "True" else [False, temp_str[5:]] else: dad.weblink_custom_action = [False, LINK_CUSTOM_ACTION_DEFAULT_WEB] if cfg.has_option(section, "filelink_custom_action"): temp_str = cfg.get(section, "filelink_custom_action") dad.filelink_custom_action = [True, temp_str[4:]] if temp_str[:4] == "True" else [False, temp_str[5:]] else: dad.filelink_custom_action = [False, LINK_CUSTOM_ACTION_DEFAULT_FILE] if cfg.has_option(section, "folderlink_custom_action"): temp_str = cfg.get(section, "folderlink_custom_action") dad.folderlink_custom_action = [True, temp_str[4:]] if temp_str[:4] == "True" else [False, temp_str[5:]] else: dad.folderlink_custom_action = [False, LINK_CUSTOM_ACTION_DEFAULT_FILE] section = "codebox" if cfg.has_option(section, "codebox_width"): dad.codebox_width = cfg.getfloat(section, "codebox_width") else: dad.codebox_width = 700 if cfg.has_option(section, "codebox_height"): dad.codebox_height = cfg.getfloat(section, "codebox_height") else: dad.codebox_height = 100 dad.codebox_width_pixels = cfg.getboolean(section, "codebox_width_pixels") if cfg.has_option(section, "codebox_width_pixels") else True dad.codebox_line_num = cfg.getboolean(section, "codebox_line_num") if cfg.has_option(section, "codebox_line_num") else False dad.codebox_match_bra = cfg.getboolean(section, "codebox_match_bra") if cfg.has_option(section, "codebox_match_bra") else True dad.codebox_syn_highl = cfg.get(section, "codebox_syn_highl") if cfg.has_option(section, "codebox_syn_highl") else cons.PLAIN_TEXT_ID dad.codebox_auto_resize = cfg.getboolean(section, "codebox_auto_resize") if cfg.has_option(section, "codebox_auto_resize") else False section = "table" dad.table_rows = cfg.getint(section, "table_rows") if cfg.has_option(section, "table_rows") else 3 dad.table_columns = cfg.getint(section, "table_columns") if cfg.has_option(section, "table_columns") else 3 dad.table_column_mode = cfg.get(section, "table_column_mode") if cfg.has_option(section, "table_column_mode") else "rename" dad.table_col_min = cfg.getint(section, "table_col_min") if cfg.has_option(section, "table_col_min") else 40 dad.table_col_max = cfg.getint(section, "table_col_max") if cfg.has_option(section, "table_col_max") else 60 section = "fonts" dad.rt_font = cfg.get(section, "rt_font") if cfg.has_option(section, "rt_font") else "Sans 9" # default rich text font dad.pt_font = cfg.get(section, "pt_font") if cfg.has_option(section, "pt_font") else "Sans 9" # default plain text font dad.tree_font = cfg.get(section, "tree_font") if cfg.has_option(section, "tree_font") else "Sans 8" # default tree font dad.code_font = cfg.get(section, "code_font") if cfg.has_option(section, "code_font") else "Monospace 9" # default code font section = "colors" dad.rt_def_fg = cfg.get(section, "rt_def_fg") if cfg.has_option(section, "rt_def_fg") else cons.RICH_TEXT_DARK_FG dad.rt_def_bg = cfg.get(section, "rt_def_bg") if cfg.has_option(section, "rt_def_bg") else cons.RICH_TEXT_DARK_BG dad.tt_def_fg = cfg.get(section, "tt_def_fg") if cfg.has_option(section, "tt_def_fg") else cons.TREE_TEXT_LIGHT_FG dad.tt_def_bg = cfg.get(section, "tt_def_bg") if cfg.has_option(section, "tt_def_bg") else cons.TREE_TEXT_LIGHT_BG dad.monospace_bg = cfg.get(section, "monospace_bg") if cfg.has_option(section, "monospace_bg") else DEFAULT_MONOSPACE_BG if cfg.has_option(section, "palette_list"): dad.palette_list = cfg.get(section, "palette_list").split(":") else: dad.palette_list = COLOR_PALETTE_DEFAULT dad.col_link_webs = cfg.get(section, "col_link_webs") if cfg.has_option(section, "col_link_webs") else cons.COLOR_48_LINK_WEBS dad.col_link_node = cfg.get(section, "col_link_node") if cfg.has_option(section, "col_link_node") else cons.COLOR_48_LINK_NODE dad.col_link_file = cfg.get(section, "col_link_file") if cfg.has_option(section, "col_link_file") else cons.COLOR_48_LINK_FILE dad.col_link_fold = cfg.get(section, "col_link_fold") if cfg.has_option(section, "col_link_fold") else cons.COLOR_48_LINK_FOLD section = "misc" dad.toolbar_ui_list = cfg.get(section, "toolbar_ui_list").split(cons.CHAR_COMMA) if cfg.has_option(section, "toolbar_ui_list") else menus.TOOLBAR_VEC_DEFAULT dad.systray = cfg.getboolean(section, "systray") if cfg.has_option(section, "systray") else False dad.start_on_systray = cfg.getboolean(section, "start_on_systray") if cfg.has_option(section, "start_on_systray") else False dad.use_appind = cfg.getboolean(section, "use_appind") if cfg.has_option(section, "use_appind") else False if cfg.has_option(section, "autosave") and cfg.has_option(section, "autosave_val"): dad.autosave = [cfg.getboolean(section, "autosave"), cfg.getint(section, "autosave_val")] else: dad.autosave = [False, 5] dad.check_version = cfg.getboolean(section, "check_version") if cfg.has_option(section, "check_version") else False dad.word_count = cfg.getboolean(section, "word_count") if cfg.has_option(section, "word_count") else False dad.reload_doc_last = cfg.getboolean(section, "reload_doc_last") if cfg.has_option(section, "reload_doc_last") else True dad.enable_mod_time_sentinel = cfg.getboolean(section, "mod_time_sent") if cfg.has_option(section, "mod_time_sent") else False dad.backup_copy = cfg.getboolean(section, "backup_copy") if cfg.has_option(section, "backup_copy") else True dad.backup_num = cfg.getint(section, "backup_num") if cfg.has_option(section, "backup_num") else 3 dad.autosave_on_quit = cfg.getboolean(section, "autosave_on_quit") if cfg.has_option(section, "autosave_on_quit") else False dad.limit_undoable_steps = cfg.getint(section, "limit_undoable_steps") if cfg.has_option(section, "limit_undoable_steps") else 20 dad.journal_day_format = cfg.get(section, "journal_day_format") if cfg.has_option(section, "journal_day_format") else JOURNAL_DAY_FORMAT_DEFAULT #print "read", cons.CONFIG_PATH, "('%s', '%s')" % (dad.file_name, dad.file_dir) section = "keyboard" if cfg.has_section(section): for option in cfg.options(section): value = cfg.get(section, option).strip() dad.custom_kb_shortcuts[option] = value if value else None section = "codexec_term" if cfg.has_section(section): if cfg.has_option(section, "custom"): dad.custom_codexec_term = cfg.get(section, "custom") section = "codexec_type" if cfg.has_section(section): for option in cfg.options(section): dad.custom_codexec_type[option] = cfg.get(section, option) section = "codexec_ext" if cfg.has_section(section): for option in cfg.options(section): dad.custom_codexec_ext[option] = cfg.get(section, option) else: dad.file_dir = "" dad.file_name = "" dad.node_path = None dad.curr_colors = {'f':None, 'b':None, 'n':None} dad.syntax_highlighting = cons.RICH_TEXT_ID dad.auto_syn_highl = "sh" dad.style_scheme = cons.STYLE_SCHEME_DARK dad.tree_font = "Sans 8" # default tree font dad.rt_font = "Sans 9" # default rich text font dad.pt_font = "Sans 9" # default plain text font dad.code_font = "Monospace 9" # default code font dad.rt_def_fg = cons.RICH_TEXT_DARK_FG dad.rt_def_bg = cons.RICH_TEXT_DARK_BG dad.tt_def_fg = cons.TREE_TEXT_LIGHT_FG dad.tt_def_bg = cons.TREE_TEXT_LIGHT_BG dad.palette_list = COLOR_PALETTE_DEFAULT dad.col_link_webs = cons.COLOR_48_LINK_WEBS dad.col_link_node = cons.COLOR_48_LINK_NODE dad.col_link_file = cons.COLOR_48_LINK_FILE dad.col_link_fold = cons.COLOR_48_LINK_FOLD dad.h_rule = HORIZONTAL_RULE dad.special_chars = SPECIAL_CHARS_DEFAULT dad.selword_chars = SELWORD_CHARS_DEFAULT dad.chars_listbul = CHARS_LISTBUL_DEFAULT dad.chars_todo = CHARS_TODO_DEFAULT dad.chars_toc = CHARS_TOC_DEFAULT dad.chars_smart_dquote = CHARS_SMART_DQUOTE_DEFAULT dad.chars_smart_squote = CHARS_SMART_SQUOTE_DEFAULT dad.enable_spell_check = False dad.spell_check_lang = SPELL_CHECK_LANG_DEFAULT dad.show_line_numbers = False dad.spaces_instead_tabs = True dad.tabs_width = 4 dad.anchor_size = 16 dad.embfile_size = 48 dad.embfile_show_filename = True dad.embfile_max_size = MAX_SIZE_EMBFILE_MB_DEFAULT dad.line_wrapping = True dad.auto_smart_quotes = True dad.triple_click_paragraph = True dad.enable_symbol_autoreplace = True dad.wrapping_indent = -14 dad.auto_indent = True dad.toolbar_ui_list = menus.TOOLBAR_VEC_DEFAULT dad.systray = False dad.win_position = [10, 10] dad.autosave = [False, 5] dad.win_is_maximized = False dad.rest_exp_coll = 0 dad.expanded_collapsed_string = "" dad.expcollnam1 = "" dad.expcollnam2 = "" dad.expcollnam3 = "" dad.pick_dir_import = "" dad.pick_dir_export = "" dad.pick_dir_file = "" dad.pick_dir_img = "" dad.pick_dir_csv = "" dad.pick_dir_cbox = "" dad.link_type = cons.LINK_TYPE_WEBS dad.toolbar_icon_size = 1 dad.table_rows = 3 dad.table_columns = 3 dad.table_column_mode = "rename" dad.table_col_min = 40 dad.table_col_max = 60 dad.limit_undoable_steps = 20 dad.cherry_wrap_width = 130 dad.tree_click_focus_text = False dad.tree_click_expand = False dad.start_on_systray = False dad.use_appind = False dad.monospace_bg = DEFAULT_MONOSPACE_BG dad.links_underline = True dad.links_relative = False dad.weblink_custom_action = [False, LINK_CUSTOM_ACTION_DEFAULT_WEB] dad.filelink_custom_action = [False, LINK_CUSTOM_ACTION_DEFAULT_FILE] dad.folderlink_custom_action = [False, LINK_CUSTOM_ACTION_DEFAULT_FILE] dad.timestamp_format = TIMESTAMP_FORMAT_DEFAULT dad.codebox_width = 700 dad.codebox_height = 100 dad.codebox_width_pixels = True dad.codebox_line_num = False dad.codebox_match_bra = True dad.codebox_syn_highl = cons.PLAIN_TEXT_ID dad.codebox_auto_resize = False dad.check_version = False dad.word_count = False dad.reload_doc_last = True dad.enable_mod_time_sentinel = False dad.backup_copy = True dad.backup_num = 3 dad.autosave_on_quit = False dad.tree_right_side = False dad.aux_icon_hide = False dad.nodes_bookm_exp = False dad.rt_show_white_spaces = False dad.pt_show_white_spaces = True dad.rt_highl_curr_line = True dad.pt_highl_curr_line = True dad.space_around_lines = 0 dad.relative_wrapped_space = 50 dad.hpaned_pos = 170 dad.tree_visible = True dad.show_node_name_header = True dad.nodes_on_node_name_header = NODES_ON_NODE_NAME_HEADER_DEFAULT dad.nodes_icons = "c" dad.default_icon_text = cons.NODE_ICON_BULLET_ID dad.recent_docs = [] dad.toolbar_visible = True dad.journal_day_format = JOURNAL_DAY_FORMAT_DEFAULT print "missing", cons.CONFIG_PATH
[ "def", "config_file_load", "(", "dad", ")", ":", "dad", ".", "custom_kb_shortcuts", "=", "{", "}", "dad", ".", "custom_codexec_type", "=", "{", "}", "dad", ".", "custom_codexec_ext", "=", "{", "}", "dad", ".", "custom_codexec_term", "=", "None", "dad", ".", "latest_tag", "=", "[", "\"\"", ",", "\"\"", "]", "if", "os", ".", "path", ".", "isfile", "(", "cons", ".", "CONFIG_PATH", ")", ":", "cfg", "=", "ConfigParser", ".", "RawConfigParser", "(", ")", "try", ":", "cfg", ".", "read", "(", "cons", ".", "CONFIG_PATH", ")", "except", "ConfigParser", ".", "MissingSectionHeaderError", ":", "print", "\"? ConfigParser.MissingSectionHeaderError\"", "section", "=", "\"state\"", "dad", ".", "file_dir", "=", "unicode", "(", "cfg", ".", "get", "(", "section", ",", "\"file_dir\"", ")", ",", "cons", ".", "STR_UTF8", ",", "cons", ".", "STR_IGNORE", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"file_dir\"", ")", "else", "\"\"", "dad", ".", "file_name", "=", "unicode", "(", "cfg", ".", "get", "(", "section", ",", "\"file_name\"", ")", ",", "cons", ".", "STR_UTF8", ",", "cons", ".", "STR_IGNORE", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"file_name\"", ")", "else", "\"\"", "dad", ".", "toolbar_visible", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"toolbar_visible\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"toolbar_visible\"", ")", "else", "True", "dad", ".", "win_is_maximized", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"win_is_maximized\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"win_is_maximized\"", ")", "else", "False", "# restore window size and position", "if", "cfg", ".", "has_option", "(", "section", ",", "\"win_position_x\"", ")", "and", "cfg", ".", "has_option", "(", "section", ",", "\"win_position_y\"", ")", ":", "dad", ".", "win_position", "=", "[", "cfg", ".", "getint", "(", "section", ",", "\"win_position_x\"", ")", ",", "cfg", ".", "getint", "(", "section", ",", "\"win_position_y\"", ")", "]", "dad", ".", "window", ".", "move", "(", "dad", ".", "win_position", "[", "0", "]", ",", "dad", ".", "win_position", "[", "1", "]", ")", "else", ":", "dad", ".", "win_position", "=", "[", "10", ",", "10", "]", "if", "dad", ".", "win_is_maximized", ":", "dad", ".", "window", ".", "maximize", "(", ")", "elif", "cfg", ".", "has_option", "(", "section", ",", "\"win_size_w\"", ")", "and", "cfg", ".", "has_option", "(", "section", ",", "\"win_size_h\"", ")", ":", "win_size", "=", "[", "cfg", ".", "getint", "(", "section", ",", "\"win_size_w\"", ")", ",", "cfg", ".", "getint", "(", "section", ",", "\"win_size_h\"", ")", "]", "dad", ".", "window", ".", "resize", "(", "win_size", "[", "0", "]", ",", "win_size", "[", "1", "]", ")", "dad", ".", "hpaned_pos", "=", "cfg", ".", "getint", "(", "section", ",", "\"hpaned_pos\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"hpaned_pos\"", ")", "else", "170", "dad", ".", "tree_visible", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"tree_visible\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"tree_visible\"", ")", "else", "True", "if", "cfg", ".", "has_option", "(", "section", ",", "\"node_path\"", ")", ":", "# restore the selected node", "dad", ".", "node_path", "=", "get_node_path_from_str", "(", "cfg", ".", "get", "(", "section", ",", "\"node_path\"", ")", ")", "dad", ".", "cursor_position", "=", "cfg", ".", "getint", "(", "section", ",", "\"cursor_position\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"cursor_position\"", ")", "else", "0", "else", ":", "dad", ".", "node_path", "=", "None", "dad", ".", "recent_docs", "=", "[", "]", "saved_from_gtkmm", "=", "False", "for", "i", "in", "range", "(", "cons", ".", "MAX_RECENT_DOCS", ")", ":", "curr_key", "=", "\"doc_%s\"", "%", "i", "if", "cfg", ".", "has_option", "(", "section", ",", "curr_key", ")", ":", "dad", ".", "recent_docs", ".", "append", "(", "unicode", "(", "cfg", ".", "get", "(", "section", ",", "curr_key", ")", ",", "cons", ".", "STR_UTF8", ",", "cons", ".", "STR_IGNORE", ")", ")", "# supporting saved from gtkmm", "if", "not", "dad", ".", "file_name", "or", "saved_from_gtkmm", "is", "True", ":", "if", "i", "==", "0", ":", "dad", ".", "file_name", "=", "os", ".", "path", ".", "basename", "(", "dad", ".", "recent_docs", "[", "0", "]", ")", "dad", ".", "file_dir", "=", "os", ".", "path", ".", "dirname", "(", "dad", ".", "recent_docs", "[", "0", "]", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"nodep_0\"", ")", ":", "saved_from_gtkmm", "=", "True", "dad", ".", "node_path", "=", "get_node_path_from_str", "(", "cfg", ".", "get", "(", "section", ",", "\"nodep_0\"", ")", ".", "replace", "(", "\":\"", ",", "\" \"", ")", ")", "dad", ".", "cursor_position", "=", "cfg", ".", "getint", "(", "section", ",", "\"curs_0\"", ")", "dad", ".", "expanded_collapsed_string", "=", "cfg", ".", "get", "(", "section", ",", "\"expcol_0\"", ")", "elif", "i", "in", "(", "1", ",", "2", ",", "3", ")", ":", "if", "cfg", ".", "has_option", "(", "section", ",", "\"nodep_%s\"", "%", "i", ")", ":", "setattr", "(", "dad", ",", "\"expcollnam%s\"", "%", "i", ",", "os", ".", "path", ".", "basename", "(", "dad", ".", "recent_docs", "[", "i", "]", ")", ")", "setattr", "(", "dad", ",", "\"expcollstr%s\"", "%", "i", ",", "cfg", ".", "get", "(", "section", ",", "\"expcol_%s\"", "%", "i", ")", ")", "setattr", "(", "dad", ",", "\"expcollsel%s\"", "%", "i", ",", "cfg", ".", "get", "(", "section", ",", "\"nodep_%s\"", "%", "i", ")", ")", "setattr", "(", "dad", ",", "\"expcollcur%s\"", "%", "i", ",", "cfg", ".", "get", "(", "section", ",", "\"curs_%s\"", "%", "i", ")", ")", "else", ":", "break", "dad", ".", "pick_dir_import", "=", "cfg", ".", "get", "(", "section", ",", "\"pick_dir_import\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"pick_dir_import\"", ")", "else", "\"\"", "dad", ".", "pick_dir_export", "=", "cfg", ".", "get", "(", "section", ",", "\"pick_dir_export\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"pick_dir_export\"", ")", "else", "\"\"", "dad", ".", "pick_dir_file", "=", "cfg", ".", "get", "(", "section", ",", "\"pick_dir_file\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"pick_dir_file\"", ")", "else", "\"\"", "dad", ".", "pick_dir_img", "=", "cfg", ".", "get", "(", "section", ",", "\"pick_dir_img\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"pick_dir_img\"", ")", "else", "\"\"", "dad", ".", "pick_dir_csv", "=", "cfg", ".", "get", "(", "section", ",", "\"pick_dir_csv\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"pick_dir_csv\"", ")", "else", "\"\"", "dad", ".", "pick_dir_cbox", "=", "cfg", ".", "get", "(", "section", ",", "\"pick_dir_cbox\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"pick_dir_cbox\"", ")", "else", "\"\"", "dad", ".", "link_type", "=", "cfg", ".", "get", "(", "section", ",", "\"link_type\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"link_type\"", ")", "else", "cons", ".", "LINK_TYPE_WEBS", "dad", ".", "show_node_name_header", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"show_node_name_header\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"show_node_name_header\"", ")", "else", "True", "dad", ".", "nodes_on_node_name_header", "=", "cfg", ".", "getint", "(", "section", ",", "\"nodes_on_node_name_header\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"nodes_on_node_name_header\"", ")", "else", "NODES_ON_NODE_NAME_HEADER_DEFAULT", "if", "cfg", ".", "has_option", "(", "section", ",", "\"toolbar_icon_size\"", ")", ":", "dad", ".", "toolbar_icon_size", "=", "cfg", ".", "getint", "(", "section", ",", "\"toolbar_icon_size\"", ")", "if", "dad", ".", "toolbar_icon_size", "not", "in", "ICONS_SIZE", ":", "dad", ".", "toolbar_icon_size", "=", "1", "else", ":", "dad", ".", "toolbar_icon_size", "=", "1", "dad", ".", "curr_colors", "=", "{", "'f'", ":", "gtk", ".", "gdk", ".", "color_parse", "(", "cfg", ".", "get", "(", "section", ",", "\"fg\"", ")", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"fg\"", ")", "else", "None", ",", "'b'", ":", "gtk", ".", "gdk", ".", "color_parse", "(", "cfg", ".", "get", "(", "section", ",", "\"bg\"", ")", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"bg\"", ")", "else", "None", ",", "'n'", ":", "gtk", ".", "gdk", ".", "color_parse", "(", "cfg", ".", "get", "(", "section", ",", "\"nn\"", ")", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"nn\"", ")", "else", "None", "}", "section", "=", "\"tree\"", "dad", ".", "rest_exp_coll", "=", "cfg", ".", "getint", "(", "section", ",", "\"rest_exp_coll\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"rest_exp_coll\"", ")", "else", "0", "if", "not", "hasattr", "(", "dad", ",", "\"expanded_collapsed_string\"", ")", ":", "dad", ".", "expanded_collapsed_string", "=", "cfg", ".", "get", "(", "section", ",", "\"expanded_collapsed_string\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"expanded_collapsed_string\"", ")", "else", "\"\"", "if", "not", "hasattr", "(", "dad", ",", "\"expcollnam1\"", ")", ":", "dad", ".", "expcollnam1", "=", "unicode", "(", "cfg", ".", "get", "(", "section", ",", "\"expcollnam1\"", ")", ",", "cons", ".", "STR_UTF8", ",", "cons", ".", "STR_IGNORE", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"expcollnam1\"", ")", "else", "\"\"", "dad", ".", "expcollstr1", "=", "cfg", ".", "get", "(", "section", ",", "\"expcollstr1\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"expcollstr1\"", ")", "else", "\"\"", "dad", ".", "expcollsel1", "=", "cfg", ".", "get", "(", "section", ",", "\"expcollsel1\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"expcollsel1\"", ")", "else", "\"\"", "dad", ".", "expcollcur1", "=", "cfg", ".", "getint", "(", "section", ",", "\"expcollcur1\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"expcollcur1\"", ")", "else", "0", "if", "not", "hasattr", "(", "dad", ",", "\"expcollnam2\"", ")", ":", "dad", ".", "expcollnam2", "=", "unicode", "(", "cfg", ".", "get", "(", "section", ",", "\"expcollnam2\"", ")", ",", "cons", ".", "STR_UTF8", ",", "cons", ".", "STR_IGNORE", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"expcollnam2\"", ")", "else", "\"\"", "dad", ".", "expcollstr2", "=", "cfg", ".", "get", "(", "section", ",", "\"expcollstr2\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"expcollstr2\"", ")", "else", "\"\"", "dad", ".", "expcollsel2", "=", "cfg", ".", "get", "(", "section", ",", "\"expcollsel2\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"expcollsel2\"", ")", "else", "\"\"", "dad", ".", "expcollcur2", "=", "cfg", ".", "getint", "(", "section", ",", "\"expcollcur2\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"expcollcur2\"", ")", "else", "0", "if", "not", "hasattr", "(", "dad", ",", "\"expcollnam3\"", ")", ":", "dad", ".", "expcollnam3", "=", "unicode", "(", "cfg", ".", "get", "(", "section", ",", "\"expcollnam3\"", ")", ",", "cons", ".", "STR_UTF8", ",", "cons", ".", "STR_IGNORE", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"expcollnam3\"", ")", "else", "\"\"", "dad", ".", "expcollstr3", "=", "cfg", ".", "get", "(", "section", ",", "\"expcollstr3\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"expcollstr3\"", ")", "else", "\"\"", "dad", ".", "expcollsel3", "=", "cfg", ".", "get", "(", "section", ",", "\"expcollsel3\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"expcollsel3\"", ")", "else", "\"\"", "dad", ".", "expcollcur3", "=", "cfg", ".", "getint", "(", "section", ",", "\"expcollcur3\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"expcollcur3\"", ")", "else", "0", "dad", ".", "nodes_bookm_exp", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"nodes_bookm_exp\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"nodes_bookm_exp\"", ")", "else", "False", "dad", ".", "nodes_icons", "=", "cfg", ".", "get", "(", "section", ",", "\"nodes_icons\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"nodes_icons\"", ")", "else", "\"c\"", "dad", ".", "aux_icon_hide", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"aux_icon_hide\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"aux_icon_hide\"", ")", "else", "False", "dad", ".", "default_icon_text", "=", "cfg", ".", "getint", "(", "section", ",", "\"default_icon_text\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"default_icon_text\"", ")", "else", "cons", ".", "NODE_ICON_BULLET_ID", "dad", ".", "tree_right_side", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"tree_right_side\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"tree_right_side\"", ")", "else", "False", "dad", ".", "cherry_wrap_width", "=", "cfg", ".", "getint", "(", "section", ",", "\"cherry_wrap_width\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"cherry_wrap_width\"", ")", "else", "130", "dad", ".", "tree_click_focus_text", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"tree_click_focus_text\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"tree_click_focus_text\"", ")", "else", "False", "dad", ".", "tree_click_expand", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"tree_click_expand\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"tree_click_expand\"", ")", "else", "False", "section", "=", "\"editor\"", "dad", ".", "syntax_highlighting", "=", "cfg", ".", "get", "(", "section", ",", "\"syntax_highlighting\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"syntax_highlighting\"", ")", "else", "cons", ".", "RICH_TEXT_ID", "dad", ".", "auto_syn_highl", "=", "cfg", ".", "get", "(", "section", ",", "\"auto_syn_highl\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"auto_syn_highl\"", ")", "else", "\"sh\"", "dad", ".", "style_scheme", "=", "cfg", ".", "get", "(", "section", ",", "\"style_scheme\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"style_scheme\"", ")", "else", "cons", ".", "STYLE_SCHEME_DARK", "dad", ".", "enable_spell_check", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"enable_spell_check\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"enable_spell_check\"", ")", "else", "False", "dad", ".", "spell_check_lang", "=", "cfg", ".", "get", "(", "section", ",", "\"spell_check_lang\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"spell_check_lang\"", ")", "else", "SPELL_CHECK_LANG_DEFAULT", "dad", ".", "show_line_numbers", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"show_line_numbers\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"show_line_numbers\"", ")", "else", "False", "dad", ".", "spaces_instead_tabs", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"spaces_instead_tabs\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"spaces_instead_tabs\"", ")", "else", "True", "dad", ".", "tabs_width", "=", "cfg", ".", "getint", "(", "section", ",", "\"tabs_width\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"tabs_width\"", ")", "else", "4", "dad", ".", "anchor_size", "=", "cfg", ".", "getint", "(", "section", ",", "\"anchor_size\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"anchor_size\"", ")", "else", "16", "dad", ".", "embfile_size", "=", "cfg", ".", "getint", "(", "section", ",", "\"embfile_size\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"embfile_size\"", ")", "else", "48", "dad", ".", "embfile_show_filename", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"embfile_show_filename\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"embfile_show_filename\"", ")", "else", "True", "dad", ".", "embfile_max_size", "=", "cfg", ".", "getint", "(", "section", ",", "\"embfile_max_size\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"embfile_max_size\"", ")", "else", "MAX_SIZE_EMBFILE_MB_DEFAULT", "dad", ".", "line_wrapping", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"line_wrapping\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"line_wrapping\"", ")", "else", "True", "dad", ".", "auto_smart_quotes", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"auto_smart_quotes\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"auto_smart_quotes\"", ")", "else", "True", "dad", ".", "triple_click_paragraph", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"triple_click_paragraph\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"triple_click_paragraph\"", ")", "else", "True", "dad", ".", "enable_symbol_autoreplace", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"enable_symbol_autoreplace\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"enable_symbol_autoreplace\"", ")", "else", "True", "dad", ".", "wrapping_indent", "=", "cfg", ".", "getint", "(", "section", ",", "\"wrapping_indent\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"wrapping_indent\"", ")", "else", "-", "14", "dad", ".", "auto_indent", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"auto_indent\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"auto_indent\"", ")", "else", "True", "dad", ".", "rt_show_white_spaces", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"rt_show_white_spaces\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"rt_show_white_spaces\"", ")", "else", "False", "dad", ".", "pt_show_white_spaces", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"pt_show_white_spaces\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"pt_show_white_spaces\"", ")", "else", "True", "dad", ".", "rt_highl_curr_line", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"rt_highl_curr_line\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"rt_highl_curr_line\"", ")", "else", "True", "dad", ".", "pt_highl_curr_line", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"pt_highl_curr_line\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"pt_highl_curr_line\"", ")", "else", "True", "dad", ".", "space_around_lines", "=", "cfg", ".", "getint", "(", "section", ",", "\"space_around_lines\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"space_around_lines\"", ")", "else", "0", "dad", ".", "relative_wrapped_space", "=", "cfg", ".", "getint", "(", "section", ",", "\"relative_wrapped_space\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"relative_wrapped_space\"", ")", "else", "50", "dad", ".", "h_rule", "=", "cfg", ".", "get", "(", "section", ",", "\"h_rule\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"h_rule\"", ")", "else", "HORIZONTAL_RULE", "dad", ".", "special_chars", "=", "unicode", "(", "cfg", ".", "get", "(", "section", ",", "\"special_chars\"", ")", ",", "cons", ".", "STR_UTF8", ",", "cons", ".", "STR_IGNORE", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"special_chars\"", ")", "else", "SPECIAL_CHARS_DEFAULT", "dad", ".", "selword_chars", "=", "unicode", "(", "cfg", ".", "get", "(", "section", ",", "\"selword_chars\"", ")", ",", "cons", ".", "STR_UTF8", ",", "cons", ".", "STR_IGNORE", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"selword_chars\"", ")", "else", "SELWORD_CHARS_DEFAULT", "dad", ".", "chars_listbul", "=", "unicode", "(", "cfg", ".", "get", "(", "section", ",", "\"chars_listbul\"", ")", ",", "cons", ".", "STR_UTF8", ",", "cons", ".", "STR_IGNORE", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"chars_listbul\"", ")", "else", "CHARS_LISTBUL_DEFAULT", "dad", ".", "chars_todo", "=", "unicode", "(", "cfg", ".", "get", "(", "section", ",", "\"chars_todo\"", ")", ",", "cons", ".", "STR_UTF8", ",", "cons", ".", "STR_IGNORE", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"chars_todo\"", ")", "else", "CHARS_TODO_DEFAULT", "dad", ".", "chars_toc", "=", "unicode", "(", "cfg", ".", "get", "(", "section", ",", "\"chars_toc\"", ")", ",", "cons", ".", "STR_UTF8", ",", "cons", ".", "STR_IGNORE", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"chars_toc\"", ")", "else", "CHARS_TOC_DEFAULT", "dad", ".", "chars_smart_dquote", "=", "unicode", "(", "cfg", ".", "get", "(", "section", ",", "\"chars_smart_dquote\"", ")", ",", "cons", ".", "STR_UTF8", ",", "cons", ".", "STR_IGNORE", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"chars_smart_dquote\"", ")", "else", "CHARS_SMART_DQUOTE_DEFAULT", "dad", ".", "chars_smart_squote", "=", "unicode", "(", "cfg", ".", "get", "(", "section", ",", "\"chars_smart_squote\"", ")", ",", "cons", ".", "STR_UTF8", ",", "cons", ".", "STR_IGNORE", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"chars_smart_squote\"", ")", "else", "CHARS_SMART_SQUOTE_DEFAULT", "if", "cfg", ".", "has_option", "(", "section", ",", "\"latest_tag_prop\"", ")", "and", "cfg", ".", "has_option", "(", "section", ",", "\"latest_tag_val\"", ")", ":", "dad", ".", "latest_tag", "[", "0", "]", "=", "cfg", ".", "get", "(", "section", ",", "\"latest_tag_prop\"", ")", "dad", ".", "latest_tag", "[", "1", "]", "=", "cfg", ".", "get", "(", "section", ",", "\"latest_tag_val\"", ")", "dad", ".", "timestamp_format", "=", "cfg", ".", "get", "(", "section", ",", "\"timestamp_format\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"timestamp_format\"", ")", "else", "TIMESTAMP_FORMAT_DEFAULT", "dad", ".", "links_underline", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"links_underline\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"links_underline\"", ")", "else", "True", "dad", ".", "links_relative", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"links_relative\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"links_relative\"", ")", "else", "False", "if", "cfg", ".", "has_option", "(", "section", ",", "\"weblink_custom_action\"", ")", ":", "temp_str", "=", "cfg", ".", "get", "(", "section", ",", "\"weblink_custom_action\"", ")", "dad", ".", "weblink_custom_action", "=", "[", "True", ",", "temp_str", "[", "4", ":", "]", "]", "if", "temp_str", "[", ":", "4", "]", "==", "\"True\"", "else", "[", "False", ",", "temp_str", "[", "5", ":", "]", "]", "else", ":", "dad", ".", "weblink_custom_action", "=", "[", "False", ",", "LINK_CUSTOM_ACTION_DEFAULT_WEB", "]", "if", "cfg", ".", "has_option", "(", "section", ",", "\"filelink_custom_action\"", ")", ":", "temp_str", "=", "cfg", ".", "get", "(", "section", ",", "\"filelink_custom_action\"", ")", "dad", ".", "filelink_custom_action", "=", "[", "True", ",", "temp_str", "[", "4", ":", "]", "]", "if", "temp_str", "[", ":", "4", "]", "==", "\"True\"", "else", "[", "False", ",", "temp_str", "[", "5", ":", "]", "]", "else", ":", "dad", ".", "filelink_custom_action", "=", "[", "False", ",", "LINK_CUSTOM_ACTION_DEFAULT_FILE", "]", "if", "cfg", ".", "has_option", "(", "section", ",", "\"folderlink_custom_action\"", ")", ":", "temp_str", "=", "cfg", ".", "get", "(", "section", ",", "\"folderlink_custom_action\"", ")", "dad", ".", "folderlink_custom_action", "=", "[", "True", ",", "temp_str", "[", "4", ":", "]", "]", "if", "temp_str", "[", ":", "4", "]", "==", "\"True\"", "else", "[", "False", ",", "temp_str", "[", "5", ":", "]", "]", "else", ":", "dad", ".", "folderlink_custom_action", "=", "[", "False", ",", "LINK_CUSTOM_ACTION_DEFAULT_FILE", "]", "section", "=", "\"codebox\"", "if", "cfg", ".", "has_option", "(", "section", ",", "\"codebox_width\"", ")", ":", "dad", ".", "codebox_width", "=", "cfg", ".", "getfloat", "(", "section", ",", "\"codebox_width\"", ")", "else", ":", "dad", ".", "codebox_width", "=", "700", "if", "cfg", ".", "has_option", "(", "section", ",", "\"codebox_height\"", ")", ":", "dad", ".", "codebox_height", "=", "cfg", ".", "getfloat", "(", "section", ",", "\"codebox_height\"", ")", "else", ":", "dad", ".", "codebox_height", "=", "100", "dad", ".", "codebox_width_pixels", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"codebox_width_pixels\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"codebox_width_pixels\"", ")", "else", "True", "dad", ".", "codebox_line_num", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"codebox_line_num\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"codebox_line_num\"", ")", "else", "False", "dad", ".", "codebox_match_bra", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"codebox_match_bra\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"codebox_match_bra\"", ")", "else", "True", "dad", ".", "codebox_syn_highl", "=", "cfg", ".", "get", "(", "section", ",", "\"codebox_syn_highl\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"codebox_syn_highl\"", ")", "else", "cons", ".", "PLAIN_TEXT_ID", "dad", ".", "codebox_auto_resize", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"codebox_auto_resize\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"codebox_auto_resize\"", ")", "else", "False", "section", "=", "\"table\"", "dad", ".", "table_rows", "=", "cfg", ".", "getint", "(", "section", ",", "\"table_rows\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"table_rows\"", ")", "else", "3", "dad", ".", "table_columns", "=", "cfg", ".", "getint", "(", "section", ",", "\"table_columns\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"table_columns\"", ")", "else", "3", "dad", ".", "table_column_mode", "=", "cfg", ".", "get", "(", "section", ",", "\"table_column_mode\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"table_column_mode\"", ")", "else", "\"rename\"", "dad", ".", "table_col_min", "=", "cfg", ".", "getint", "(", "section", ",", "\"table_col_min\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"table_col_min\"", ")", "else", "40", "dad", ".", "table_col_max", "=", "cfg", ".", "getint", "(", "section", ",", "\"table_col_max\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"table_col_max\"", ")", "else", "60", "section", "=", "\"fonts\"", "dad", ".", "rt_font", "=", "cfg", ".", "get", "(", "section", ",", "\"rt_font\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"rt_font\"", ")", "else", "\"Sans 9\"", "# default rich text font", "dad", ".", "pt_font", "=", "cfg", ".", "get", "(", "section", ",", "\"pt_font\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"pt_font\"", ")", "else", "\"Sans 9\"", "# default plain text font", "dad", ".", "tree_font", "=", "cfg", ".", "get", "(", "section", ",", "\"tree_font\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"tree_font\"", ")", "else", "\"Sans 8\"", "# default tree font", "dad", ".", "code_font", "=", "cfg", ".", "get", "(", "section", ",", "\"code_font\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"code_font\"", ")", "else", "\"Monospace 9\"", "# default code font", "section", "=", "\"colors\"", "dad", ".", "rt_def_fg", "=", "cfg", ".", "get", "(", "section", ",", "\"rt_def_fg\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"rt_def_fg\"", ")", "else", "cons", ".", "RICH_TEXT_DARK_FG", "dad", ".", "rt_def_bg", "=", "cfg", ".", "get", "(", "section", ",", "\"rt_def_bg\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"rt_def_bg\"", ")", "else", "cons", ".", "RICH_TEXT_DARK_BG", "dad", ".", "tt_def_fg", "=", "cfg", ".", "get", "(", "section", ",", "\"tt_def_fg\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"tt_def_fg\"", ")", "else", "cons", ".", "TREE_TEXT_LIGHT_FG", "dad", ".", "tt_def_bg", "=", "cfg", ".", "get", "(", "section", ",", "\"tt_def_bg\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"tt_def_bg\"", ")", "else", "cons", ".", "TREE_TEXT_LIGHT_BG", "dad", ".", "monospace_bg", "=", "cfg", ".", "get", "(", "section", ",", "\"monospace_bg\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"monospace_bg\"", ")", "else", "DEFAULT_MONOSPACE_BG", "if", "cfg", ".", "has_option", "(", "section", ",", "\"palette_list\"", ")", ":", "dad", ".", "palette_list", "=", "cfg", ".", "get", "(", "section", ",", "\"palette_list\"", ")", ".", "split", "(", "\":\"", ")", "else", ":", "dad", ".", "palette_list", "=", "COLOR_PALETTE_DEFAULT", "dad", ".", "col_link_webs", "=", "cfg", ".", "get", "(", "section", ",", "\"col_link_webs\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"col_link_webs\"", ")", "else", "cons", ".", "COLOR_48_LINK_WEBS", "dad", ".", "col_link_node", "=", "cfg", ".", "get", "(", "section", ",", "\"col_link_node\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"col_link_node\"", ")", "else", "cons", ".", "COLOR_48_LINK_NODE", "dad", ".", "col_link_file", "=", "cfg", ".", "get", "(", "section", ",", "\"col_link_file\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"col_link_file\"", ")", "else", "cons", ".", "COLOR_48_LINK_FILE", "dad", ".", "col_link_fold", "=", "cfg", ".", "get", "(", "section", ",", "\"col_link_fold\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"col_link_fold\"", ")", "else", "cons", ".", "COLOR_48_LINK_FOLD", "section", "=", "\"misc\"", "dad", ".", "toolbar_ui_list", "=", "cfg", ".", "get", "(", "section", ",", "\"toolbar_ui_list\"", ")", ".", "split", "(", "cons", ".", "CHAR_COMMA", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"toolbar_ui_list\"", ")", "else", "menus", ".", "TOOLBAR_VEC_DEFAULT", "dad", ".", "systray", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"systray\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"systray\"", ")", "else", "False", "dad", ".", "start_on_systray", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"start_on_systray\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"start_on_systray\"", ")", "else", "False", "dad", ".", "use_appind", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"use_appind\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"use_appind\"", ")", "else", "False", "if", "cfg", ".", "has_option", "(", "section", ",", "\"autosave\"", ")", "and", "cfg", ".", "has_option", "(", "section", ",", "\"autosave_val\"", ")", ":", "dad", ".", "autosave", "=", "[", "cfg", ".", "getboolean", "(", "section", ",", "\"autosave\"", ")", ",", "cfg", ".", "getint", "(", "section", ",", "\"autosave_val\"", ")", "]", "else", ":", "dad", ".", "autosave", "=", "[", "False", ",", "5", "]", "dad", ".", "check_version", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"check_version\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"check_version\"", ")", "else", "False", "dad", ".", "word_count", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"word_count\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"word_count\"", ")", "else", "False", "dad", ".", "reload_doc_last", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"reload_doc_last\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"reload_doc_last\"", ")", "else", "True", "dad", ".", "enable_mod_time_sentinel", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"mod_time_sent\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"mod_time_sent\"", ")", "else", "False", "dad", ".", "backup_copy", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"backup_copy\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"backup_copy\"", ")", "else", "True", "dad", ".", "backup_num", "=", "cfg", ".", "getint", "(", "section", ",", "\"backup_num\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"backup_num\"", ")", "else", "3", "dad", ".", "autosave_on_quit", "=", "cfg", ".", "getboolean", "(", "section", ",", "\"autosave_on_quit\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"autosave_on_quit\"", ")", "else", "False", "dad", ".", "limit_undoable_steps", "=", "cfg", ".", "getint", "(", "section", ",", "\"limit_undoable_steps\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"limit_undoable_steps\"", ")", "else", "20", "dad", ".", "journal_day_format", "=", "cfg", ".", "get", "(", "section", ",", "\"journal_day_format\"", ")", "if", "cfg", ".", "has_option", "(", "section", ",", "\"journal_day_format\"", ")", "else", "JOURNAL_DAY_FORMAT_DEFAULT", "#print \"read\", cons.CONFIG_PATH, \"('%s', '%s')\" % (dad.file_name, dad.file_dir)", "section", "=", "\"keyboard\"", "if", "cfg", ".", "has_section", "(", "section", ")", ":", "for", "option", "in", "cfg", ".", "options", "(", "section", ")", ":", "value", "=", "cfg", ".", "get", "(", "section", ",", "option", ")", ".", "strip", "(", ")", "dad", ".", "custom_kb_shortcuts", "[", "option", "]", "=", "value", "if", "value", "else", "None", "section", "=", "\"codexec_term\"", "if", "cfg", ".", "has_section", "(", "section", ")", ":", "if", "cfg", ".", "has_option", "(", "section", ",", "\"custom\"", ")", ":", "dad", ".", "custom_codexec_term", "=", "cfg", ".", "get", "(", "section", ",", "\"custom\"", ")", "section", "=", "\"codexec_type\"", "if", "cfg", ".", "has_section", "(", "section", ")", ":", "for", "option", "in", "cfg", ".", "options", "(", "section", ")", ":", "dad", ".", "custom_codexec_type", "[", "option", "]", "=", "cfg", ".", "get", "(", "section", ",", "option", ")", "section", "=", "\"codexec_ext\"", "if", "cfg", ".", "has_section", "(", "section", ")", ":", "for", "option", "in", "cfg", ".", "options", "(", "section", ")", ":", "dad", ".", "custom_codexec_ext", "[", "option", "]", "=", "cfg", ".", "get", "(", "section", ",", "option", ")", "else", ":", "dad", ".", "file_dir", "=", "\"\"", "dad", ".", "file_name", "=", "\"\"", "dad", ".", "node_path", "=", "None", "dad", ".", "curr_colors", "=", "{", "'f'", ":", "None", ",", "'b'", ":", "None", ",", "'n'", ":", "None", "}", "dad", ".", "syntax_highlighting", "=", "cons", ".", "RICH_TEXT_ID", "dad", ".", "auto_syn_highl", "=", "\"sh\"", "dad", ".", "style_scheme", "=", "cons", ".", "STYLE_SCHEME_DARK", "dad", ".", "tree_font", "=", "\"Sans 8\"", "# default tree font", "dad", ".", "rt_font", "=", "\"Sans 9\"", "# default rich text font", "dad", ".", "pt_font", "=", "\"Sans 9\"", "# default plain text font", "dad", ".", "code_font", "=", "\"Monospace 9\"", "# default code font", "dad", ".", "rt_def_fg", "=", "cons", ".", "RICH_TEXT_DARK_FG", "dad", ".", "rt_def_bg", "=", "cons", ".", "RICH_TEXT_DARK_BG", "dad", ".", "tt_def_fg", "=", "cons", ".", "TREE_TEXT_LIGHT_FG", "dad", ".", "tt_def_bg", "=", "cons", ".", "TREE_TEXT_LIGHT_BG", "dad", ".", "palette_list", "=", "COLOR_PALETTE_DEFAULT", "dad", ".", "col_link_webs", "=", "cons", ".", "COLOR_48_LINK_WEBS", "dad", ".", "col_link_node", "=", "cons", ".", "COLOR_48_LINK_NODE", "dad", ".", "col_link_file", "=", "cons", ".", "COLOR_48_LINK_FILE", "dad", ".", "col_link_fold", "=", "cons", ".", "COLOR_48_LINK_FOLD", "dad", ".", "h_rule", "=", "HORIZONTAL_RULE", "dad", ".", "special_chars", "=", "SPECIAL_CHARS_DEFAULT", "dad", ".", "selword_chars", "=", "SELWORD_CHARS_DEFAULT", "dad", ".", "chars_listbul", "=", "CHARS_LISTBUL_DEFAULT", "dad", ".", "chars_todo", "=", "CHARS_TODO_DEFAULT", "dad", ".", "chars_toc", "=", "CHARS_TOC_DEFAULT", "dad", ".", "chars_smart_dquote", "=", "CHARS_SMART_DQUOTE_DEFAULT", "dad", ".", "chars_smart_squote", "=", "CHARS_SMART_SQUOTE_DEFAULT", "dad", ".", "enable_spell_check", "=", "False", "dad", ".", "spell_check_lang", "=", "SPELL_CHECK_LANG_DEFAULT", "dad", ".", "show_line_numbers", "=", "False", "dad", ".", "spaces_instead_tabs", "=", "True", "dad", ".", "tabs_width", "=", "4", "dad", ".", "anchor_size", "=", "16", "dad", ".", "embfile_size", "=", "48", "dad", ".", "embfile_show_filename", "=", "True", "dad", ".", "embfile_max_size", "=", "MAX_SIZE_EMBFILE_MB_DEFAULT", "dad", ".", "line_wrapping", "=", "True", "dad", ".", "auto_smart_quotes", "=", "True", "dad", ".", "triple_click_paragraph", "=", "True", "dad", ".", "enable_symbol_autoreplace", "=", "True", "dad", ".", "wrapping_indent", "=", "-", "14", "dad", ".", "auto_indent", "=", "True", "dad", ".", "toolbar_ui_list", "=", "menus", ".", "TOOLBAR_VEC_DEFAULT", "dad", ".", "systray", "=", "False", "dad", ".", "win_position", "=", "[", "10", ",", "10", "]", "dad", ".", "autosave", "=", "[", "False", ",", "5", "]", "dad", ".", "win_is_maximized", "=", "False", "dad", ".", "rest_exp_coll", "=", "0", "dad", ".", "expanded_collapsed_string", "=", "\"\"", "dad", ".", "expcollnam1", "=", "\"\"", "dad", ".", "expcollnam2", "=", "\"\"", "dad", ".", "expcollnam3", "=", "\"\"", "dad", ".", "pick_dir_import", "=", "\"\"", "dad", ".", "pick_dir_export", "=", "\"\"", "dad", ".", "pick_dir_file", "=", "\"\"", "dad", ".", "pick_dir_img", "=", "\"\"", "dad", ".", "pick_dir_csv", "=", "\"\"", "dad", ".", "pick_dir_cbox", "=", "\"\"", "dad", ".", "link_type", "=", "cons", ".", "LINK_TYPE_WEBS", "dad", ".", "toolbar_icon_size", "=", "1", "dad", ".", "table_rows", "=", "3", "dad", ".", "table_columns", "=", "3", "dad", ".", "table_column_mode", "=", "\"rename\"", "dad", ".", "table_col_min", "=", "40", "dad", ".", "table_col_max", "=", "60", "dad", ".", "limit_undoable_steps", "=", "20", "dad", ".", "cherry_wrap_width", "=", "130", "dad", ".", "tree_click_focus_text", "=", "False", "dad", ".", "tree_click_expand", "=", "False", "dad", ".", "start_on_systray", "=", "False", "dad", ".", "use_appind", "=", "False", "dad", ".", "monospace_bg", "=", "DEFAULT_MONOSPACE_BG", "dad", ".", "links_underline", "=", "True", "dad", ".", "links_relative", "=", "False", "dad", ".", "weblink_custom_action", "=", "[", "False", ",", "LINK_CUSTOM_ACTION_DEFAULT_WEB", "]", "dad", ".", "filelink_custom_action", "=", "[", "False", ",", "LINK_CUSTOM_ACTION_DEFAULT_FILE", "]", "dad", ".", "folderlink_custom_action", "=", "[", "False", ",", "LINK_CUSTOM_ACTION_DEFAULT_FILE", "]", "dad", ".", "timestamp_format", "=", "TIMESTAMP_FORMAT_DEFAULT", "dad", ".", "codebox_width", "=", "700", "dad", ".", "codebox_height", "=", "100", "dad", ".", "codebox_width_pixels", "=", "True", "dad", ".", "codebox_line_num", "=", "False", "dad", ".", "codebox_match_bra", "=", "True", "dad", ".", "codebox_syn_highl", "=", "cons", ".", "PLAIN_TEXT_ID", "dad", ".", "codebox_auto_resize", "=", "False", "dad", ".", "check_version", "=", "False", "dad", ".", "word_count", "=", "False", "dad", ".", "reload_doc_last", "=", "True", "dad", ".", "enable_mod_time_sentinel", "=", "False", "dad", ".", "backup_copy", "=", "True", "dad", ".", "backup_num", "=", "3", "dad", ".", "autosave_on_quit", "=", "False", "dad", ".", "tree_right_side", "=", "False", "dad", ".", "aux_icon_hide", "=", "False", "dad", ".", "nodes_bookm_exp", "=", "False", "dad", ".", "rt_show_white_spaces", "=", "False", "dad", ".", "pt_show_white_spaces", "=", "True", "dad", ".", "rt_highl_curr_line", "=", "True", "dad", ".", "pt_highl_curr_line", "=", "True", "dad", ".", "space_around_lines", "=", "0", "dad", ".", "relative_wrapped_space", "=", "50", "dad", ".", "hpaned_pos", "=", "170", "dad", ".", "tree_visible", "=", "True", "dad", ".", "show_node_name_header", "=", "True", "dad", ".", "nodes_on_node_name_header", "=", "NODES_ON_NODE_NAME_HEADER_DEFAULT", "dad", ".", "nodes_icons", "=", "\"c\"", "dad", ".", "default_icon_text", "=", "cons", ".", "NODE_ICON_BULLET_ID", "dad", ".", "recent_docs", "=", "[", "]", "dad", ".", "toolbar_visible", "=", "True", "dad", ".", "journal_day_format", "=", "JOURNAL_DAY_FORMAT_DEFAULT", "print", "\"missing\"", ",", "cons", ".", "CONFIG_PATH" ]
https://github.com/giuspen/cherrytree/blob/84712f206478fcf9acf30174009ad28c648c6344/pygtk2/modules/config.py#L189-L533
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/model_fitting/model_fitting_model.py
python
ModelFittingModel.current_result_table_index
(self)
return self.fitting_context.current_result_table_index
Returns the index of the currently selected result table.
Returns the index of the currently selected result table.
[ "Returns", "the", "index", "of", "the", "currently", "selected", "result", "table", "." ]
def current_result_table_index(self) -> int: """Returns the index of the currently selected result table.""" return self.fitting_context.current_result_table_index
[ "def", "current_result_table_index", "(", "self", ")", "->", "int", ":", "return", "self", ".", "fitting_context", ".", "current_result_table_index" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqtinterfaces/mantidqtinterfaces/Muon/GUI/Common/fitting_widgets/model_fitting/model_fitting_model.py#L30-L32
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/msw/propgrid.py
python
PGProperty.SetAttribute
(*args, **kwargs)
return _propgrid.PGProperty_SetAttribute(*args, **kwargs)
SetAttribute(self, String name, wxVariant value)
SetAttribute(self, String name, wxVariant value)
[ "SetAttribute", "(", "self", "String", "name", "wxVariant", "value", ")" ]
def SetAttribute(*args, **kwargs): """SetAttribute(self, String name, wxVariant value)""" return _propgrid.PGProperty_SetAttribute(*args, **kwargs)
[ "def", "SetAttribute", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_propgrid", ".", "PGProperty_SetAttribute", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/msw/propgrid.py#L674-L676
pmq20/node-packer
12c46c6e44fbc14d9ee645ebd17d5296b324f7e0
current/tools/gyp/pylib/gyp/xcode_emulation.py
python
XcodeSettings.GetBundleJavaFolderPath
(self)
return os.path.join(self.GetBundleResourceFolder(), 'Java')
Returns the qualified path to the bundle's Java resource folder. E.g. Chromium.app/Contents/Resources/Java. Only valid for bundles.
Returns the qualified path to the bundle's Java resource folder. E.g. Chromium.app/Contents/Resources/Java. Only valid for bundles.
[ "Returns", "the", "qualified", "path", "to", "the", "bundle", "s", "Java", "resource", "folder", ".", "E", ".", "g", ".", "Chromium", ".", "app", "/", "Contents", "/", "Resources", "/", "Java", ".", "Only", "valid", "for", "bundles", "." ]
def GetBundleJavaFolderPath(self): """Returns the qualified path to the bundle's Java resource folder. E.g. Chromium.app/Contents/Resources/Java. Only valid for bundles.""" assert self._IsBundle() return os.path.join(self.GetBundleResourceFolder(), 'Java')
[ "def", "GetBundleJavaFolderPath", "(", "self", ")", ":", "assert", "self", ".", "_IsBundle", "(", ")", "return", "os", ".", "path", ".", "join", "(", "self", ".", "GetBundleResourceFolder", "(", ")", ",", "'Java'", ")" ]
https://github.com/pmq20/node-packer/blob/12c46c6e44fbc14d9ee645ebd17d5296b324f7e0/current/tools/gyp/pylib/gyp/xcode_emulation.py#L328-L332
mantidproject/mantid
03deeb89254ec4289edb8771e0188c2090a02f32
qt/python/mantidqt/mantidqt/widgets/superplot/view.py
python
SuperplotView.set_hold_button_size
(self, width, height)
Set the hold button fixed size. Args: width (int): button width height (int): button height
Set the hold button fixed size.
[ "Set", "the", "hold", "button", "fixed", "size", "." ]
def set_hold_button_size(self, width, height): """ Set the hold button fixed size. Args: width (int): button width height (int): button height """ self._bottom_view.holdButton.setFixedSize(width, height)
[ "def", "set_hold_button_size", "(", "self", ",", "width", ",", "height", ")", ":", "self", ".", "_bottom_view", ".", "holdButton", ".", "setFixedSize", "(", "width", ",", "height", ")" ]
https://github.com/mantidproject/mantid/blob/03deeb89254ec4289edb8771e0188c2090a02f32/qt/python/mantidqt/mantidqt/widgets/superplot/view.py#L423-L431
hanpfei/chromium-net
392cc1fa3a8f92f42e4071ab6e674d8e0482f83f
third_party/protobuf/python/google/protobuf/internal/python_message.py
python
_IsPresent
(item)
Given a (FieldDescriptor, value) tuple from _fields, return true if the value should be included in the list returned by ListFields().
Given a (FieldDescriptor, value) tuple from _fields, return true if the value should be included in the list returned by ListFields().
[ "Given", "a", "(", "FieldDescriptor", "value", ")", "tuple", "from", "_fields", "return", "true", "if", "the", "value", "should", "be", "included", "in", "the", "list", "returned", "by", "ListFields", "()", "." ]
def _IsPresent(item): """Given a (FieldDescriptor, value) tuple from _fields, return true if the value should be included in the list returned by ListFields().""" if item[0].label == _FieldDescriptor.LABEL_REPEATED: return bool(item[1]) elif item[0].cpp_type == _FieldDescriptor.CPPTYPE_MESSAGE: return item[1]._is_present_in_parent else: return True
[ "def", "_IsPresent", "(", "item", ")", ":", "if", "item", "[", "0", "]", ".", "label", "==", "_FieldDescriptor", ".", "LABEL_REPEATED", ":", "return", "bool", "(", "item", "[", "1", "]", ")", "elif", "item", "[", "0", "]", ".", "cpp_type", "==", "_FieldDescriptor", ".", "CPPTYPE_MESSAGE", ":", "return", "item", "[", "1", "]", ".", "_is_present_in_parent", "else", ":", "return", "True" ]
https://github.com/hanpfei/chromium-net/blob/392cc1fa3a8f92f42e4071ab6e674d8e0482f83f/third_party/protobuf/python/google/protobuf/internal/python_message.py#L785-L794
wxWidgets/wxPython-Classic
19571e1ae65f1ac445f5491474121998c97a1bf0
src/osx_carbon/_core.py
python
Window.SetWindowStyleFlag
(*args, **kwargs)
return _core_.Window_SetWindowStyleFlag(*args, **kwargs)
SetWindowStyleFlag(self, long style) Sets the style of the window. Please note that some styles cannot be changed after the window creation and that Refresh() might need to be called after changing the others for the change to take place immediately.
SetWindowStyleFlag(self, long style)
[ "SetWindowStyleFlag", "(", "self", "long", "style", ")" ]
def SetWindowStyleFlag(*args, **kwargs): """ SetWindowStyleFlag(self, long style) Sets the style of the window. Please note that some styles cannot be changed after the window creation and that Refresh() might need to be called after changing the others for the change to take place immediately. """ return _core_.Window_SetWindowStyleFlag(*args, **kwargs)
[ "def", "SetWindowStyleFlag", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "return", "_core_", ".", "Window_SetWindowStyleFlag", "(", "*", "args", ",", "*", "*", "kwargs", ")" ]
https://github.com/wxWidgets/wxPython-Classic/blob/19571e1ae65f1ac445f5491474121998c97a1bf0/src/osx_carbon/_core.py#L10016-L10025
stan-dev/math
5fd79f89933269a4ca4d8dd1fde2a36d53d4768c
lib/cpplint_1.4.5/cpplint.py
python
FlagCxx11Features
(filename, clean_lines, linenum, error)
Flag those c++11 features that we only allow in certain places. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found.
Flag those c++11 features that we only allow in certain places.
[ "Flag", "those", "c", "++", "11", "features", "that", "we", "only", "allow", "in", "certain", "places", "." ]
def FlagCxx11Features(filename, clean_lines, linenum, error): """Flag those c++11 features that we only allow in certain places. Args: filename: The name of the current file. clean_lines: A CleansedLines instance containing the file. linenum: The number of the line to check. error: The function to call with any errors found. """ line = clean_lines.elided[linenum] include = Match(r'\s*#\s*include\s+[<"]([^<"]+)[">]', line) # Flag unapproved C++ TR1 headers. if include and include.group(1).startswith('tr1/'): error(filename, linenum, 'build/c++tr1', 5, ('C++ TR1 headers such as <%s> are unapproved.') % include.group(1)) # Flag unapproved C++11 headers. if include and include.group(1) in ('cfenv', 'condition_variable', 'fenv.h', 'future', 'mutex', 'thread', 'chrono', 'ratio', 'regex', 'system_error', ): error(filename, linenum, 'build/c++11', 5, ('<%s> is an unapproved C++11 header.') % include.group(1)) # The only place where we need to worry about C++11 keywords and library # features in preprocessor directives is in macro definitions. if Match(r'\s*#', line) and not Match(r'\s*#\s*define\b', line): return # These are classes and free functions. The classes are always # mentioned as std::*, but we only catch the free functions if # they're not found by ADL. They're alphabetical by header. for top_name in ( # type_traits 'alignment_of', 'aligned_union', ): if Search(r'\bstd::%s\b' % top_name, line): error(filename, linenum, 'build/c++11', 5, ('std::%s is an unapproved C++11 class or function. Send c-style ' 'an example of where it would make your code more readable, and ' 'they may let you use it.') % top_name)
[ "def", "FlagCxx11Features", "(", "filename", ",", "clean_lines", ",", "linenum", ",", "error", ")", ":", "line", "=", "clean_lines", ".", "elided", "[", "linenum", "]", "include", "=", "Match", "(", "r'\\s*#\\s*include\\s+[<\"]([^<\"]+)[\">]'", ",", "line", ")", "# Flag unapproved C++ TR1 headers.", "if", "include", "and", "include", ".", "group", "(", "1", ")", ".", "startswith", "(", "'tr1/'", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'build/c++tr1'", ",", "5", ",", "(", "'C++ TR1 headers such as <%s> are unapproved.'", ")", "%", "include", ".", "group", "(", "1", ")", ")", "# Flag unapproved C++11 headers.", "if", "include", "and", "include", ".", "group", "(", "1", ")", "in", "(", "'cfenv'", ",", "'condition_variable'", ",", "'fenv.h'", ",", "'future'", ",", "'mutex'", ",", "'thread'", ",", "'chrono'", ",", "'ratio'", ",", "'regex'", ",", "'system_error'", ",", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'build/c++11'", ",", "5", ",", "(", "'<%s> is an unapproved C++11 header.'", ")", "%", "include", ".", "group", "(", "1", ")", ")", "# The only place where we need to worry about C++11 keywords and library", "# features in preprocessor directives is in macro definitions.", "if", "Match", "(", "r'\\s*#'", ",", "line", ")", "and", "not", "Match", "(", "r'\\s*#\\s*define\\b'", ",", "line", ")", ":", "return", "# These are classes and free functions. The classes are always", "# mentioned as std::*, but we only catch the free functions if", "# they're not found by ADL. They're alphabetical by header.", "for", "top_name", "in", "(", "# type_traits", "'alignment_of'", ",", "'aligned_union'", ",", ")", ":", "if", "Search", "(", "r'\\bstd::%s\\b'", "%", "top_name", ",", "line", ")", ":", "error", "(", "filename", ",", "linenum", ",", "'build/c++11'", ",", "5", ",", "(", "'std::%s is an unapproved C++11 class or function. Send c-style '", "'an example of where it would make your code more readable, and '", "'they may let you use it.'", ")", "%", "top_name", ")" ]
https://github.com/stan-dev/math/blob/5fd79f89933269a4ca4d8dd1fde2a36d53d4768c/lib/cpplint_1.4.5/cpplint.py#L6110-L6159